diff --git a/.azure-pipelines/ci.yml b/.azure-pipelines/ci.yml index b5b2765e438..d3e842d9f31 100644 --- a/.azure-pipelines/ci.yml +++ b/.azure-pipelines/ci.yml @@ -1,4 +1,4 @@ -trigger: ['main', '3.12', '3.11', '3.10', '3.9', '3.8', '3.7'] +trigger: ['main', '3.13', '3.12', '3.11', '3.10', '3.9', '3.8'] jobs: - job: Prebuild diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 6f8fe005621..a4ada1b66bf 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -2,11 +2,11 @@ FROM docker.io/library/fedora:40 ENV CC=clang -ENV WASI_SDK_VERSION=21 +ENV WASI_SDK_VERSION=22 ENV WASI_SDK_PATH=/opt/wasi-sdk ENV WASMTIME_HOME=/opt/wasmtime -ENV WASMTIME_VERSION=18.0.3 +ENV WASMTIME_VERSION=22.0.0 ENV WASMTIME_CPU_ARCH=x86_64 RUN dnf -y --nodocs --setopt=install_weak_deps=False install /usr/bin/{blurb,clang,curl,git,ln,tar,xz} 'dnf-command(builddep)' && \ diff --git a/.gitattributes b/.gitattributes index 5b81d2cb3c9..2f5a030981f 100644 --- a/.gitattributes +++ b/.gitattributes @@ -27,8 +27,6 @@ Lib/test/cjkencodings/* noeol Lib/test/tokenizedata/coding20731.py noeol Lib/test/decimaltestdata/*.decTest noeol Lib/test/test_email/data/*.txt noeol -Lib/test/test_importlib/resources/data01/* noeol -Lib/test/test_importlib/resources/namespacedata01/* noeol Lib/test/xmltestdata/* noeol # Shell scripts should have LF even on Windows because of Cygwin diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 86349355fc6..4462bfa54a1 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,8 +1,5 @@ name: Tests -# gh-84728: "paths-ignore" is not used to skip documentation-only PRs, because -# it prevents to mark a job as mandatory. A PR cannot be merged if a job is -# mandatory but not scheduled because of "paths-ignore". on: workflow_dispatch: push: @@ -23,92 +20,68 @@ concurrency: jobs: check_source: - name: 'Check for source changes' + name: Change detection + # To use boolean outputs from this job, parse them as JSON. + # Here's some examples: + # + # if: fromJSON(needs.check_source.outputs.run-docs) + # + # ${{ + # fromJSON(needs.check_source.outputs.run_tests) + # && 'truthy-branch' + # || 'falsy-branch' + # }} + # + uses: ./.github/workflows/reusable-change-detection.yml + + check-docs: + name: Docs + needs: check_source + if: fromJSON(needs.check_source.outputs.run-docs) + uses: ./.github/workflows/reusable-docs.yml + + check_abi: + name: 'Check if the ABI has changed' runs-on: ubuntu-latest - timeout-minutes: 10 - outputs: - run-docs: ${{ steps.docs-changes.outputs.run-docs || false }} - run_tests: ${{ steps.check.outputs.run_tests }} - run_hypothesis: ${{ steps.check.outputs.run_hypothesis }} - run_cifuzz: ${{ steps.check.outputs.run_cifuzz }} - config_hash: ${{ steps.config_hash.outputs.hash }} + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' steps: - uses: actions/checkout@v4 - - name: Check for source changes + - uses: actions/setup-python@v5 + - name: Install dependencies + run: | + sudo ./.github/workflows/posix-deps-apt.sh + sudo apt-get install -yq abigail-tools + - name: Build CPython + env: + CFLAGS: -g3 -O0 + run: | + # Build Python with the libpython dynamic library + ./configure --enable-shared + make -j4 + - name: Check for changes in the ABI id: check run: | - if [ -z "$GITHUB_BASE_REF" ]; then - echo "run_tests=true" >> $GITHUB_OUTPUT - else - git fetch origin $GITHUB_BASE_REF --depth=1 - # git diff "origin/$GITHUB_BASE_REF..." (3 dots) may be more - # reliable than git diff "origin/$GITHUB_BASE_REF.." (2 dots), - # but it requires to download more commits (this job uses - # "git fetch --depth=1"). - # - # git diff "origin/$GITHUB_BASE_REF..." (3 dots) works with Git - # 2.26, but Git 2.28 is stricter and fails with "no merge base". - # - # git diff "origin/$GITHUB_BASE_REF.." (2 dots) should be enough on - # GitHub, since GitHub starts by merging origin/$GITHUB_BASE_REF - # into the PR branch anyway. - # - # https://github.com/python/core-workflow/issues/373 - git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qvE '(\.rst$|^Doc|^Misc|^\.pre-commit-config\.yaml$|\.ruff\.toml$|\.md$|mypy\.ini$)' && echo "run_tests=true" >> $GITHUB_OUTPUT || true - fi - - # Check if we should run hypothesis tests - GIT_BRANCH=${GITHUB_BASE_REF:-${GITHUB_REF#refs/heads/}} - echo $GIT_BRANCH - if $(echo "$GIT_BRANCH" | grep -q -w '3\.\(8\|9\|10\|11\)'); then - echo "Branch too old for hypothesis tests" - echo "run_hypothesis=false" >> $GITHUB_OUTPUT - else - echo "Run hypothesis tests" - echo "run_hypothesis=true" >> $GITHUB_OUTPUT - fi - - # oss-fuzz maintains a configuration for fuzzing the main branch of - # CPython, so CIFuzz should be run only for code that is likely to be - # merged into the main branch; compatibility with older branches may - # be broken. - FUZZ_RELEVANT_FILES='(\.c$|\.h$|\.cpp$|^configure$|^\.github/workflows/build\.yml$|^Modules/_xxtestfuzz)' - if [ "$GITHUB_BASE_REF" = "main" ] && [ "$(git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qE $FUZZ_RELEVANT_FILES; echo $?)" -eq 0 ]; then - # The tests are pretty slow so they are executed only for PRs - # changing relevant files. - echo "Run CIFuzz tests" - echo "run_cifuzz=true" >> $GITHUB_OUTPUT - else - echo "Branch too old for CIFuzz tests; or no C files were changed" - echo "run_cifuzz=false" >> $GITHUB_OUTPUT + if ! make check-abidump; then + echo "Generated ABI file is not up to date." + echo "Please add the release manager of this branch as a reviewer of this PR." + echo "" + echo "The up to date ABI file should be attached to this build as an artifact." + echo "" + echo "To learn more about this check: https://devguide.python.org/getting-started/setup-building/index.html#regenerate-the-abi-dump" + echo "" + exit 1 fi - - name: Compute hash for config cache key - id: config_hash + - name: Generate updated ABI files + if: ${{ failure() && steps.check.conclusion == 'failure' }} run: | - echo "hash=${{ hashFiles('configure', 'configure.ac', '.github/workflows/build.yml') }}" >> $GITHUB_OUTPUT - - name: Get a list of the changed documentation-related files - if: github.event_name == 'pull_request' - id: changed-docs-files - uses: Ana06/get-changed-files@v2.3.0 + make regen-abidump + - uses: actions/upload-artifact@v4 + name: Publish updated ABI files + if: ${{ failure() && steps.check.conclusion == 'failure' }} with: - filter: | - Doc/** - Misc/** - .github/workflows/reusable-docs.yml - format: csv # works for paths with spaces - - name: Check for docs changes - if: >- - github.event_name == 'pull_request' - && steps.changed-docs-files.outputs.added_modified_renamed != '' - id: docs-changes - run: | - echo "run-docs=true" >> "${GITHUB_OUTPUT}" - - check-docs: - name: Docs - needs: check_source - if: fromJSON(needs.check_source.outputs.run-docs) - uses: ./.github/workflows/reusable-docs.yml + name: abi-data + path: ./Doc/data/*.abi check_generated_files: name: 'Check if generated files are up to date' @@ -179,79 +152,101 @@ jobs: run: make check-c-globals build_windows: - name: 'Windows' - needs: check_source - if: needs.check_source.outputs.run_tests == 'true' - uses: ./.github/workflows/reusable-windows.yml - - build_windows_free_threading: - name: 'Windows (free-threading)' + name: >- + Windows + ${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }} needs: check_source - if: needs.check_source.outputs.run_tests == 'true' + if: fromJSON(needs.check_source.outputs.run_tests) + strategy: + matrix: + arch: + - Win32 + - x64 + - arm64 + free-threading: + - false + - true uses: ./.github/workflows/reusable-windows.yml with: - free-threading: true + arch: ${{ matrix.arch }} + free-threading: ${{ matrix.free-threading }} - build_macos: - name: 'macOS' + build_windows_msi: + name: >- # ${{ '' } is a hack to nest jobs under the same sidebar category + Windows MSI${{ '' }} needs: check_source - if: needs.check_source.outputs.run_tests == 'true' - uses: ./.github/workflows/reusable-macos.yml + if: fromJSON(needs.check_source.outputs.run-win-msi) + strategy: + matrix: + arch: + - x86 + - x64 + - arm64 + uses: ./.github/workflows/reusable-windows-msi.yml with: - config_hash: ${{ needs.check_source.outputs.config_hash }} - # Cirrus and macos-14 are M1, macos-13 is default GHA Intel. - # Cirrus used for upstream, macos-14 for forks. - os-matrix: '["ghcr.io/cirruslabs/macos-runner:sonoma", "macos-14", "macos-13"]' + arch: ${{ matrix.arch }} - build_macos_free_threading: - name: 'macOS (free-threading)' + build_macos: + name: >- + macOS + ${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }} needs: check_source if: needs.check_source.outputs.run_tests == 'true' + strategy: + fail-fast: false + matrix: + # Cirrus and macos-14 are M1, macos-13 is default GHA Intel. + # macOS 13 only runs tests against the GIL-enabled CPython. + # Cirrus used for upstream, macos-14 for forks. + os: + - ghcr.io/cirruslabs/macos-runner:sonoma + - macos-14 + - macos-13 + is-fork: # only used for the exclusion trick + - ${{ github.repository_owner != 'python' }} + free-threading: + - false + - true + exclude: + - os: ghcr.io/cirruslabs/macos-runner:sonoma + is-fork: true + - os: macos-14 + is-fork: false + - os: macos-13 + free-threading: true uses: ./.github/workflows/reusable-macos.yml with: config_hash: ${{ needs.check_source.outputs.config_hash }} - free-threading: true - # Cirrus and macos-14 are M1. - # Cirrus used for upstream, macos-14 for forks. - os-matrix: '["ghcr.io/cirruslabs/macos-runner:sonoma", "macos-14"]' + free-threading: ${{ matrix.free-threading }} + os: ${{ matrix.os }} build_ubuntu: - name: 'Ubuntu' - needs: check_source - if: needs.check_source.outputs.run_tests == 'true' - uses: ./.github/workflows/reusable-ubuntu.yml - with: - config_hash: ${{ needs.check_source.outputs.config_hash }} - options: | - ../cpython-ro-srcdir/configure \ - --config-cache \ - --with-pydebug \ - --with-openssl=$OPENSSL_DIR - - build_ubuntu_free_threading: - name: 'Ubuntu (free-threading)' + name: >- + Ubuntu + ${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }} needs: check_source if: needs.check_source.outputs.run_tests == 'true' + strategy: + matrix: + free-threading: + - false + - true uses: ./.github/workflows/reusable-ubuntu.yml with: config_hash: ${{ needs.check_source.outputs.config_hash }} - options: | - ../cpython-ro-srcdir/configure \ - --config-cache \ - --with-pydebug \ - --with-openssl=$OPENSSL_DIR \ - --disable-gil + free-threading: ${{ matrix.free-threading }} build_ubuntu_ssltests: name: 'Ubuntu SSL tests with OpenSSL' - runs-on: ubuntu-22.04 + runs-on: ${{ matrix.os }} timeout-minutes: 60 needs: check_source if: needs.check_source.outputs.run_tests == 'true' strategy: fail-fast: false matrix: - openssl_ver: [1.1.1w, 3.0.13, 3.1.5, 3.2.1] + os: [ubuntu-22.04] + openssl_ver: [3.0.15, 3.1.7, 3.2.3, 3.3.2] env: OPENSSL_VER: ${{ matrix.openssl_ver }} MULTISSL_DIR: ${{ github.workspace }}/multissl @@ -280,7 +275,7 @@ jobs: uses: actions/cache@v4 with: path: ./multissl/openssl/${{ env.OPENSSL_VER }} - key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }} + key: ${{ matrix.os }}-multissl-openssl-${{ env.OPENSSL_VER }} - name: Install OpenSSL if: steps.cache-openssl.outputs.cache-hit != 'true' run: python3 Tools/ssl/multissltests.py --steps=library --base-directory $MULTISSL_DIR --openssl $OPENSSL_VER --system Linux @@ -315,7 +310,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests == 'true' && needs.check_source.outputs.run_hypothesis == 'true' env: - OPENSSL_VER: 3.0.13 + OPENSSL_VER: 3.0.15 PYTHONSTRICTEXTENSIONBUILD: 1 steps: - uses: actions/checkout@v4 @@ -393,7 +388,7 @@ jobs: path: ./hypothesis key: hypothesis-database-${{ github.head_ref || github.run_id }} restore-keys: | - - hypothesis-database- + hypothesis-database- - name: "Run tests" working-directory: ${{ env.CPYTHON_BUILDDIR }} run: | @@ -428,7 +423,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests == 'true' env: - OPENSSL_VER: 3.0.13 + OPENSSL_VER: 3.0.15 PYTHONSTRICTEXTENSIONBUILD: 1 ASAN_OPTIONS: detect_leaks=0:allocator_may_return_null=1:handle_segv=0 steps: @@ -458,7 +453,7 @@ jobs: uses: actions/cache@v4 with: path: ./multissl/openssl/${{ env.OPENSSL_VER }} - key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }} + key: ${{ matrix.os }}-multissl-openssl-${{ env.OPENSSL_VER }} - name: Install OpenSSL if: steps.cache-openssl.outputs.cache-hit != 'true' run: python3 Tools/ssl/multissltests.py --steps=library --base-directory $MULTISSL_DIR --openssl $OPENSSL_VER --system Linux @@ -550,13 +545,11 @@ jobs: - check-docs - check_generated_files - build_macos - - build_macos_free_threading - build_ubuntu - - build_ubuntu_free_threading - build_ubuntu_ssltests - build_wasi - build_windows - - build_windows_free_threading + - build_windows_msi - test_hypothesis - build_asan - build_tsan @@ -571,6 +564,7 @@ jobs: with: allowed-failures: >- build_ubuntu_ssltests, + build_windows_msi, cifuzz, test_hypothesis, allowed-skips: >- @@ -586,13 +580,10 @@ jobs: && ' check_generated_files, build_macos, - build_macos_free_threading, build_ubuntu, - build_ubuntu_free_threading, build_ubuntu_ssltests, build_wasi, build_windows, - build_windows_free_threading, build_asan, build_tsan, build_tsan_free_threading, diff --git a/.github/workflows/build_msi.yml b/.github/workflows/build_msi.yml deleted file mode 100644 index 65d32c734e7..00000000000 --- a/.github/workflows/build_msi.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: TestsMSI - -on: - workflow_dispatch: - push: - branches: - - 'main' - - '3.*' - paths: - - 'Tools/msi/**' - - '.github/workflows/build_msi.yml' - pull_request: - branches: - - 'main' - - '3.*' - paths: - - 'Tools/msi/**' - - '.github/workflows/build_msi.yml' - -permissions: - contents: read - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -jobs: - build: - name: Windows Installer - runs-on: windows-latest - timeout-minutes: 60 - strategy: - matrix: - type: [x86, x64, arm64] - env: - IncludeFreethreaded: true - steps: - - uses: actions/checkout@v4 - - name: Build CPython installer - run: .\Tools\msi\build.bat --doc -${{ matrix.type }} diff --git a/.github/workflows/reusable-change-detection.yml b/.github/workflows/reusable-change-detection.yml new file mode 100644 index 00000000000..6f599f75547 --- /dev/null +++ b/.github/workflows/reusable-change-detection.yml @@ -0,0 +1,158 @@ +--- + +name: Change detection + +on: # yamllint disable-line rule:truthy + workflow_call: + outputs: + # Some of the referenced steps set outputs conditionally and there may be + # cases when referencing them evaluates to empty strings. It is nice to + # work with proper booleans so they have to be evaluated through JSON + # conversion in the expressions. However, empty strings used like that + # may trigger all sorts of undefined and hard-to-debug behaviors in + # GitHub Actions CI/CD. To help with this, all of the outputs set here + # that are meant to be used as boolean flags (and not arbitrary strings), + # MUST have fallbacks with default values set. A common pattern would be + # to add ` || false` to all such expressions here, in the output + # definitions. They can then later be safely used through the following + # idiom in job conditionals and other expressions. Here's some examples: + # + # if: fromJSON(needs.change-detection.outputs.run-docs) + # + # ${{ + # fromJSON(needs.change-detection.outputs.run-tests) + # && 'truthy-branch' + # || 'falsy-branch' + # }} + # + config_hash: + description: Config hash value for use in cache keys + value: ${{ jobs.compute-changes.outputs.config-hash }} # str + run-docs: + description: Whether to build the docs + value: ${{ jobs.compute-changes.outputs.run-docs || false }} # bool + run_tests: + description: Whether to run the regular tests + value: ${{ jobs.compute-changes.outputs.run-tests || false }} # bool + run-win-msi: + description: Whether to run the MSI installer smoke tests + value: >- # bool + ${{ jobs.compute-changes.outputs.run-win-msi || false }} + run_hypothesis: + description: Whether to run the Hypothesis tests + value: >- # bool + ${{ jobs.compute-changes.outputs.run-hypothesis || false }} + run_cifuzz: + description: Whether to run the CIFuzz job + value: >- # bool + ${{ jobs.compute-changes.outputs.run-cifuzz || false }} + +jobs: + compute-changes: + name: Compute changed files + runs-on: ubuntu-latest + timeout-minutes: 10 + outputs: + config-hash: ${{ steps.config-hash.outputs.hash }} + run-cifuzz: ${{ steps.check.outputs.run-cifuzz }} + run-docs: ${{ steps.docs-changes.outputs.run-docs }} + run-hypothesis: ${{ steps.check.outputs.run-hypothesis }} + run-tests: ${{ steps.check.outputs.run-tests }} + run-win-msi: ${{ steps.win-msi-changes.outputs.run-win-msi }} + steps: + - run: >- + echo '${{ github.event_name }}' + - uses: actions/checkout@v4 + - name: Check for source changes + id: check + run: | + if [ -z "$GITHUB_BASE_REF" ]; then + echo "run-tests=true" >> $GITHUB_OUTPUT + else + git fetch origin $GITHUB_BASE_REF --depth=1 + # git diff "origin/$GITHUB_BASE_REF..." (3 dots) may be more + # reliable than git diff "origin/$GITHUB_BASE_REF.." (2 dots), + # but it requires to download more commits (this job uses + # "git fetch --depth=1"). + # + # git diff "origin/$GITHUB_BASE_REF..." (3 dots) works with Git + # 2.26, but Git 2.28 is stricter and fails with "no merge base". + # + # git diff "origin/$GITHUB_BASE_REF.." (2 dots) should be enough on + # GitHub, since GitHub starts by merging origin/$GITHUB_BASE_REF + # into the PR branch anyway. + # + # https://github.com/python/core-workflow/issues/373 + git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qvE '(\.rst$|^Doc|^Misc|^\.pre-commit-config\.yaml$|\.ruff\.toml$|\.md$|mypy\.ini$)' && echo "run-tests=true" >> $GITHUB_OUTPUT || true + fi + + # Check if we should run hypothesis tests + GIT_BRANCH=${GITHUB_BASE_REF:-${GITHUB_REF#refs/heads/}} + echo $GIT_BRANCH + if $(echo "$GIT_BRANCH" | grep -q -w '3\.\(8\|9\|10\|11\)'); then + echo "Branch too old for hypothesis tests" + echo "run-hypothesis=false" >> $GITHUB_OUTPUT + else + echo "Run hypothesis tests" + echo "run-hypothesis=true" >> $GITHUB_OUTPUT + fi + + # oss-fuzz maintains a configuration for fuzzing the main branch of + # CPython, so CIFuzz should be run only for code that is likely to be + # merged into the main branch; compatibility with older branches may + # be broken. + FUZZ_RELEVANT_FILES='(\.c$|\.h$|\.cpp$|^configure$|^\.github/workflows/build\.yml$|^Modules/_xxtestfuzz)' + if [ "$GITHUB_BASE_REF" = "main" ] && [ "$(git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qE $FUZZ_RELEVANT_FILES; echo $?)" -eq 0 ]; then + # The tests are pretty slow so they are executed only for PRs + # changing relevant files. + echo "Run CIFuzz tests" + echo "run-cifuzz=true" >> $GITHUB_OUTPUT + else + echo "Branch too old for CIFuzz tests; or no C files were changed" + echo "run-cifuzz=false" >> $GITHUB_OUTPUT + fi + - name: Compute hash for config cache key + id: config-hash + run: | + echo "hash=${{ hashFiles('configure', 'configure.ac', '.github/workflows/build.yml') }}" >> $GITHUB_OUTPUT + - name: Get a list of the changed documentation-related files + if: github.event_name == 'pull_request' + id: changed-docs-files + uses: Ana06/get-changed-files@v2.3.0 + with: + filter: | + Doc/** + Misc/** + .github/workflows/reusable-docs.yml + format: csv # works for paths with spaces + - name: Check for docs changes + # We only want to run this on PRs when related files are changed, + # or when user triggers manual workflow run. + if: >- + ( + github.event_name == 'pull_request' + && steps.changed-docs-files.outputs.added_modified_renamed != '' + ) || github.event_name == 'workflow_dispatch' + id: docs-changes + run: | + echo "run-docs=true" >> "${GITHUB_OUTPUT}" + - name: Get a list of the MSI installer-related files + if: github.event_name == 'pull_request' + id: changed-win-msi-files + uses: Ana06/get-changed-files@v2.3.0 + with: + filter: | + Tools/msi/** + .github/workflows/reusable-windows-msi.yml + format: csv # works for paths with spaces + - name: Check for changes in MSI installer-related files + # We only want to run this on PRs when related files are changed, + # or when user triggers manual workflow run. + if: >- + ( + github.event_name == 'pull_request' + && steps.changed-win-msi-files.outputs.added_modified_renamed != '' + ) || github.event_name == 'workflow_dispatch' + id: win-msi-changes + run: | + echo "run-win-msi=true" >> "${GITHUB_OUTPUT}" diff --git a/.github/workflows/reusable-docs.yml b/.github/workflows/reusable-docs.yml index 859f78d043b..4b021b3dc32 100644 --- a/.github/workflows/reusable-docs.yml +++ b/.github/workflows/reusable-docs.yml @@ -11,6 +11,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true +env: + FORCE_COLOR: 1 + jobs: build_doc: name: 'Docs' @@ -25,9 +28,15 @@ jobs: - name: 'Check out latest PR branch commit' uses: actions/checkout@v4 with: - ref: ${{ github.event.pull_request.head.sha }} + ref: >- + ${{ + github.event_name == 'pull_request' + && github.event.pull_request.head.sha + || '' + }} # Adapted from https://github.com/actions/checkout/issues/520#issuecomment-1167205721 - name: 'Fetch commits to get branch diff' + if: github.event_name == 'pull_request' run: | # Fetch enough history to find a common ancestor commit (aka merge-base): git fetch origin ${{ env.refspec_pr }} --depth=$(( ${{ github.event.pull_request.commits }} + 1 )) \ diff --git a/.github/workflows/reusable-macos.yml b/.github/workflows/reusable-macos.yml index f825d1a7b3f..eef6be75003 100644 --- a/.github/workflows/reusable-macos.yml +++ b/.github/workflows/reusable-macos.yml @@ -8,13 +8,14 @@ on: required: false type: boolean default: false - os-matrix: - required: false + os: + description: OS to run the job + required: true type: string jobs: build_macos: - name: build and test (${{ matrix.os }}) + name: build and test (${{ inputs.os }}) timeout-minutes: 60 env: HOMEBREW_NO_ANALYTICS: 1 @@ -23,18 +24,7 @@ jobs: HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1 PYTHONSTRICTEXTENSIONBUILD: 1 TERM: linux - strategy: - fail-fast: false - matrix: - os: ${{fromJson(inputs.os-matrix)}} - is-fork: - - ${{ github.repository_owner != 'python' }} - exclude: - - os: "ghcr.io/cirruslabs/macos-runner:sonoma" - is-fork: true - - os: "macos-14" - is-fork: false - runs-on: ${{ matrix.os }} + runs-on: ${{ inputs.os }} steps: - uses: actions/checkout@v4 - name: Runner image version @@ -43,7 +33,7 @@ jobs: uses: actions/cache@v4 with: path: config.cache - key: ${{ github.job }}-${{ matrix.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }} + key: ${{ github.job }}-${{ inputs.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }} - name: Install Homebrew dependencies run: brew install pkg-config openssl@3.0 xz gdbm tcl-tk - name: Configure CPython diff --git a/.github/workflows/reusable-tsan.yml b/.github/workflows/reusable-tsan.yml index b6d5d8fa1c7..27f4eacd86f 100644 --- a/.github/workflows/reusable-tsan.yml +++ b/.github/workflows/reusable-tsan.yml @@ -36,11 +36,11 @@ jobs: # Install clang-18 wget https://apt.llvm.org/llvm.sh chmod +x llvm.sh - sudo ./llvm.sh 18 - sudo update-alternatives --install /usr/bin/clang clang /usr/bin/clang-18 100 - sudo update-alternatives --set clang /usr/bin/clang-18 - sudo update-alternatives --install /usr/bin/clang++ clang++ /usr/bin/clang++-18 100 - sudo update-alternatives --set clang++ /usr/bin/clang++-18 + sudo ./llvm.sh 17 # gh-121946: llvm-18 package is temporarily broken + sudo update-alternatives --install /usr/bin/clang clang /usr/bin/clang-17 100 + sudo update-alternatives --set clang /usr/bin/clang-17 + sudo update-alternatives --install /usr/bin/clang++ clang++ /usr/bin/clang++-17 100 + sudo update-alternatives --set clang++ /usr/bin/clang++-17 # Reduce ASLR to avoid TSAN crashing sudo sysctl -w vm.mmap_rnd_bits=28 - name: TSAN Option Setup diff --git a/.github/workflows/reusable-ubuntu.yml b/.github/workflows/reusable-ubuntu.yml index fa450ed3376..753d51712f5 100644 --- a/.github/workflows/reusable-ubuntu.yml +++ b/.github/workflows/reusable-ubuntu.yml @@ -4,18 +4,24 @@ on: config_hash: required: true type: string - options: - required: true - type: string + free-threading: + description: Whether to use free-threaded mode + required: false + type: boolean + default: false jobs: build_ubuntu_reusable: name: 'build and test' timeout-minutes: 60 - runs-on: ubuntu-22.04 + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-22.04] env: FORCE_COLOR: 1 - OPENSSL_VER: 3.0.13 + OPENSSL_VER: 3.0.15 PYTHONSTRICTEXTENSIONBUILD: 1 TERM: linux steps: @@ -34,7 +40,7 @@ jobs: uses: actions/cache@v4 with: path: ./multissl/openssl/${{ env.OPENSSL_VER }} - key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }} + key: ${{ matrix.os }}-multissl-openssl-${{ env.OPENSSL_VER }} - name: Install OpenSSL if: steps.cache-openssl.outputs.cache-hit != 'true' run: python3 Tools/ssl/multissltests.py --steps=library --base-directory $MULTISSL_DIR --openssl $OPENSSL_VER --system Linux @@ -63,7 +69,12 @@ jobs: key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }} - name: Configure CPython out-of-tree working-directory: ${{ env.CPYTHON_BUILDDIR }} - run: ${{ inputs.options }} + run: >- + ../cpython-ro-srcdir/configure + --config-cache + --with-pydebug + --with-openssl=$OPENSSL_DIR + ${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }} - name: Build CPython out-of-tree working-directory: ${{ env.CPYTHON_BUILDDIR }} run: make -j4 diff --git a/.github/workflows/reusable-wasi.yml b/.github/workflows/reusable-wasi.yml index c389fe9e173..ffa143b3457 100644 --- a/.github/workflows/reusable-wasi.yml +++ b/.github/workflows/reusable-wasi.yml @@ -11,8 +11,8 @@ jobs: timeout-minutes: 60 runs-on: ubuntu-22.04 env: - WASMTIME_VERSION: 18.0.3 - WASI_SDK_VERSION: 21 + WASMTIME_VERSION: 22.0.0 + WASI_SDK_VERSION: 22 WASI_SDK_PATH: /opt/wasi-sdk CROSS_BUILD_PYTHON: cross-build/build CROSS_BUILD_WASI: cross-build/wasm32-wasi @@ -20,9 +20,9 @@ jobs: - uses: actions/checkout@v4 # No problem resolver registered as one doesn't currently exist for Clang. - name: "Install wasmtime" - uses: jcbhmr/setup-wasmtime@v2 + uses: bytecodealliance/actions/wasmtime/setup@v1 with: - wasmtime-version: ${{ env.WASMTIME_VERSION }} + version: ${{ env.WASMTIME_VERSION }} - name: "Restore WASI SDK" id: cache-wasi-sdk uses: actions/cache@v4 @@ -50,8 +50,10 @@ jobs: uses: actions/cache@v4 with: path: ${{ env.CROSS_BUILD_PYTHON }}/config.cache - # Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python - key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }}-${{ env.pythonLocation }} + # Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python. + # Include the hash of `Tools/wasm/wasi.py` as it may change the environment variables. + # (Make sure to keep the key in sync with the other config.cache step below.) + key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ env.WASI_SDK_VERSION }}-${{ env.WASMTIME_VERSION }}-${{ inputs.config_hash }}-${{ hashFiles('Tools/wasm/wasi.py') }}-${{ env.pythonLocation }} - name: "Configure build Python" run: python3 Tools/wasm/wasi.py configure-build-python -- --config-cache --with-pydebug - name: "Make build Python" @@ -60,8 +62,8 @@ jobs: uses: actions/cache@v4 with: path: ${{ env.CROSS_BUILD_WASI }}/config.cache - # Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python - key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-wasi-sdk-${{ env.WASI_SDK_VERSION }}-${{ inputs.config_hash }}-${{ env.pythonLocation }} + # Should be kept in sync with the other config.cache step above. + key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ env.WASI_SDK_VERSION }}-${{ env.WASMTIME_VERSION }}-${{ inputs.config_hash }}-${{ hashFiles('Tools/wasm/wasi.py') }}-${{ env.pythonLocation }} - name: "Configure host" # `--with-pydebug` inferred from configure-build-python run: python3 Tools/wasm/wasi.py configure-host -- --config-cache diff --git a/.github/workflows/reusable-windows-msi.yml b/.github/workflows/reusable-windows-msi.yml new file mode 100644 index 00000000000..fc34ab7c3eb --- /dev/null +++ b/.github/workflows/reusable-windows-msi.yml @@ -0,0 +1,24 @@ +name: TestsMSI + +on: + workflow_call: + inputs: + arch: + description: CPU architecture + required: true + type: string + +permissions: + contents: read + +jobs: + build: + name: installer for ${{ inputs.arch }} + runs-on: windows-latest + timeout-minutes: 60 + env: + IncludeFreethreaded: true + steps: + - uses: actions/checkout@v4 + - name: Build CPython installer + run: .\Tools\msi\build.bat --doc -${{ inputs.arch }} diff --git a/.github/workflows/reusable-windows.yml b/.github/workflows/reusable-windows.yml index c0209e0e1c9..e9c3c8e05a8 100644 --- a/.github/workflows/reusable-windows.yml +++ b/.github/workflows/reusable-windows.yml @@ -1,53 +1,45 @@ on: workflow_call: inputs: + arch: + description: CPU architecture + required: true + type: string free-threading: + description: Whether to compile CPython in free-threading mode required: false type: boolean default: false -jobs: - build_win32: - name: 'build and test (x86)' - runs-on: windows-latest - timeout-minutes: 60 - env: - IncludeUwp: 'true' - steps: - - uses: actions/checkout@v4 - - name: Build CPython - run: .\PCbuild\build.bat -e -d -v -p Win32 ${{ inputs.free-threading && '--disable-gil' || '' }} - - name: Display build info - run: .\python.bat -m test.pythoninfo - - name: Tests - run: .\PCbuild\rt.bat -p Win32 -d -q --fast-ci ${{ inputs.free-threading && '--disable-gil' || '' }} +env: + IncludeUwp: >- + true - build_win_amd64: - name: 'build and test (x64)' +jobs: + build: + name: >- + build${{ inputs.arch != 'arm64' && ' and test' || '' }} + (${{ inputs.arch }}) runs-on: windows-latest timeout-minutes: 60 - env: - IncludeUwp: 'true' steps: - uses: actions/checkout@v4 - name: Register MSVC problem matcher + if: inputs.arch != 'Win32' run: echo "::add-matcher::.github/problem-matchers/msvc.json" - name: Build CPython - run: .\PCbuild\build.bat -e -d -v -p x64 ${{ inputs.free-threading && '--disable-gil' || '' }} + run: >- + .\PCbuild\build.bat + -e -d -v + -p ${{ inputs.arch }} + ${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }} - name: Display build info + if: inputs.arch != 'arm64' run: .\python.bat -m test.pythoninfo - name: Tests - run: .\PCbuild\rt.bat -p x64 -d -q --fast-ci ${{ inputs.free-threading && '--disable-gil' || '' }} - - build_win_arm64: - name: 'build (arm64)' - runs-on: windows-latest - timeout-minutes: 60 - env: - IncludeUwp: 'true' - steps: - - uses: actions/checkout@v4 - - name: Register MSVC problem matcher - run: echo "::add-matcher::.github/problem-matchers/msvc.json" - - name: Build CPython - run: .\PCbuild\build.bat -e -d -v -p arm64 ${{ inputs.free-threading && '--disable-gil' || '' }} + if: inputs.arch != 'arm64' + run: >- + .\PCbuild\rt.bat + -p ${{ inputs.arch }} + -d -q --fast-ci + ${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }} diff --git a/.gitignore b/.gitignore index 8872e9d5508..1248d6dbcbe 100644 --- a/.gitignore +++ b/.gitignore @@ -169,5 +169,3 @@ Python/frozen_modules/MANIFEST /python !/Python/ -# main branch only: ABI files are not checked/maintained. -Doc/data/python*.abi diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fde9d9149bf..7e0bdd50b51 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,15 +1,23 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.4 + rev: v0.6.7 hooks: - id: ruff - name: Run Ruff on Lib/test/ + name: Run Ruff (lint) on Doc/ + args: [--exit-non-zero-on-fix] + files: ^Doc/ + - id: ruff + name: Run Ruff (lint) on Lib/test/ args: [--exit-non-zero-on-fix] files: ^Lib/test/ - id: ruff - name: Run Ruff on Argument Clinic + name: Run Ruff (lint) on Argument Clinic args: [--exit-non-zero-on-fix, --config=Tools/clinic/.ruff.toml] files: ^Tools/clinic/|Lib/test/test_clinic.py + - id: ruff-format + name: Run Ruff (format) on Doc/ + args: [--check] + files: ^Doc/ - repo: https://github.com/psf/black-pre-commit-mirror rev: 24.4.2 diff --git a/.readthedocs.yml b/.readthedocs.yml index d0d0c3b93ed..a57de00544e 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -8,7 +8,7 @@ sphinx: configuration: Doc/conf.py build: - os: ubuntu-22.04 + os: ubuntu-24.04 tools: python: "3" diff --git a/Android/README.md b/Android/README.md index f5f463ca116..3daa545cc93 100644 --- a/Android/README.md +++ b/Android/README.md @@ -1,19 +1,19 @@ # Python for Android These instructions are only needed if you're planning to compile Python for -Android yourself. Most users should *not* need to do this. If you're looking to -use Python on Android, one of the following tools will provide a much more -approachable user experience: - -* [Briefcase](https://briefcase.readthedocs.io), from the BeeWare project -* [Buildozer](https://buildozer.readthedocs.io), from the Kivy project -* [Chaquopy](https://chaquo.com/chaquopy/) +Android yourself. Most users should *not* need to do this. Instead, use one of +the tools listed in `Doc/using/android.rst`, which will provide a much easier +experience. ## Prerequisites -Export the `ANDROID_HOME` environment variable to point at your Android SDK. If -you don't already have the SDK, here's how to install it: +First, make sure you have all the usual tools and libraries needed to build +Python for your development machine. + +Second, you'll need an Android SDK. If you already have the SDK installed, +export the `ANDROID_HOME` environment variable to point at its location. +Otherwise, here's how to install it: * Download the "Command line tools" from . * Create a directory `android-sdk/cmdline-tools`, and unzip the command line @@ -25,7 +25,7 @@ you don't already have the SDK, here's how to install it: The `android.py` script also requires the following commands to be on the `PATH`: * `curl` -* `java` +* `java` (or set the `JAVA_HOME` environment variable) * `tar` * `unzip` @@ -37,11 +37,6 @@ development tools, which currently means Linux or macOS. This involves doing a cross-build where you use a "build" Python (for your development machine) to help produce a "host" Python for Android. -First, make sure you have all the usual tools and libraries needed to build -Python for your development machine. The only Android tool you need to install -is the command line tools package above: the build script will download the -rest. - The easiest way to do a build is to use the `android.py` script. You can either have it perform the entire build process from start to finish in one step, or you can do it in discrete steps that mirror running `configure` and `make` for @@ -80,18 +75,62 @@ call. For example, if you want a pydebug build that also caches the results from ## Testing -To run the Python test suite on Android: - -* Install Android Studio, if you don't already have it. -* Follow the instructions in the previous section to build all supported - architectures. -* Run `./android.py setup-testbed` to download the Gradle wrapper. -* Open the `testbed` directory in Android Studio. -* In the *Device Manager* dock, connect a device or start an emulator. - Then select it from the drop-down list in the toolbar. -* Click the "Run" button in the toolbar. -* The testbed app displays nothing on screen while running. To see its output, - open the [Logcat window](https://developer.android.com/studio/debug/logcat). - -To run specific tests, or pass any other arguments to the test suite, edit the -command line in testbed/app/src/main/python/main.py. +The test suite can be run on Linux, macOS, or Windows: + +* On Linux, the emulator needs access to the KVM virtualization interface, and + a DISPLAY environment variable pointing at an X server. +* On Windows, you won't be able to do the build on the same machine, so you'll + have to copy the `cross-build/HOST` directory from somewhere else. + +The test suite can usually be run on a device with 2 GB of RAM, but this is +borderline, so you may need to increase it to 4 GB. As of Android +Studio Koala, 2 GB is the default for all emulators, although the user interface +may indicate otherwise. Locate the emulator's directory under `~/.android/avd`, +and find `hw.ramSize` in both config.ini and hardware-qemu.ini. Either set these +manually to the same value, or use the Android Studio Device Manager, which will +update both files. + +Before running the test suite, follow the instructions in the previous section +to build the architecture you want to test. Then run the test script in one of +the following modes: + +* In `--connected` mode, it runs on a device or emulator you have already + connected to the build machine. List the available devices with + `$ANDROID_HOME/platform-tools/adb devices -l`, then pass a device ID to the + script like this: + + ```sh + ./android.py test --connected emulator-5554 + ``` + +* In `--managed` mode, it uses a temporary headless emulator defined in the + `managedDevices` section of testbed/app/build.gradle.kts. This mode is slower, + but more reproducible. + + We currently define two devices: `minVersion` and `maxVersion`, corresponding + to our minimum and maximum supported Android versions. For example: + + ```sh + ./android.py test --managed maxVersion + ``` + +By default, the only messages the script will show are Python's own stdout and +stderr. Add the `-v` option to also show Gradle output, and non-Python logcat +messages. + +Any other arguments on the `android.py test` command line will be passed through +to `python -m test` – use `--` to separate them from android.py's own options. +See the [Python Developer's +Guide](https://devguide.python.org/testing/run-write-tests/) for common options +– most of them will work on Android, except for those that involve subprocesses, +such as `-j`. + +Every time you run `android.py test`, changes in pure-Python files in the +repository's `Lib` directory will be picked up immediately. Changes in C files, +and architecture-specific files such as sysconfigdata, will not take effect +until you re-run `android.py make-host` or `build`. + + +## Using in your own app + +See `Doc/using/android.rst`. diff --git a/Android/android-env.sh b/Android/android-env.sh index 545d559d93a..93372e3fe1c 100644 --- a/Android/android-env.sh +++ b/Android/android-env.sh @@ -28,7 +28,7 @@ ndk_version=26.2.11394342 ndk=$ANDROID_HOME/ndk/$ndk_version if ! [ -e $ndk ]; then - log "Installing NDK: this may take several minutes" + log "Installing NDK - this may take several minutes" yes | $ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager "ndk;$ndk_version" fi diff --git a/Android/android.py b/Android/android.py index 0a1393e61dd..8696d9eaeca 100755 --- a/Android/android.py +++ b/Android/android.py @@ -1,31 +1,67 @@ #!/usr/bin/env python3 +import asyncio import argparse +from glob import glob import os import re +import shlex import shutil +import signal import subprocess import sys import sysconfig +from asyncio import wait_for +from contextlib import asynccontextmanager from os.path import basename, relpath from pathlib import Path +from subprocess import CalledProcessError from tempfile import TemporaryDirectory + SCRIPT_NAME = Path(__file__).name CHECKOUT = Path(__file__).resolve().parent.parent +ANDROID_DIR = CHECKOUT / "Android" +TESTBED_DIR = ANDROID_DIR / "testbed" CROSS_BUILD_DIR = CHECKOUT / "cross-build" +APP_ID = "org.python.testbed" +DECODE_ARGS = ("UTF-8", "backslashreplace") + + +try: + android_home = Path(os.environ['ANDROID_HOME']) +except KeyError: + sys.exit("The ANDROID_HOME environment variable is required.") + +adb = Path( + f"{android_home}/platform-tools/adb" + + (".exe" if os.name == "nt" else "") +) + +gradlew = Path( + f"{TESTBED_DIR}/gradlew" + + (".bat" if os.name == "nt" else "") +) + +logcat_started = False -def delete_if_exists(path): - if path.exists(): + +def delete_glob(pattern): + # Path.glob doesn't accept non-relative patterns. + for path in glob(str(pattern)): + path = Path(path) print(f"Deleting {path} ...") - shutil.rmtree(path) + if path.is_dir() and not path.is_symlink(): + shutil.rmtree(path) + else: + path.unlink() def subdir(name, *, clean=None): path = CROSS_BUILD_DIR / name if clean: - delete_if_exists(path) + delete_glob(path) if not path.exists(): if clean is None: sys.exit( @@ -36,10 +72,14 @@ def subdir(name, *, clean=None): return path -def run(command, *, host=None, **kwargs): - env = os.environ.copy() +def run(command, *, host=None, env=None, log=True, **kwargs): + kwargs.setdefault("check", True) + if env is None: + env = os.environ.copy() + original_env = env.copy() + if host: - env_script = CHECKOUT / "Android/android-env.sh" + env_script = ANDROID_DIR / "android-env.sh" env_output = subprocess.run( f"set -eu; " f"HOST={host}; " @@ -60,15 +100,13 @@ def run(command, *, host=None, **kwargs): print(line) env[key] = value - if env == os.environ: + if env == original_env: raise ValueError(f"Found no variables in {env_script.name} output:\n" + env_output) - print(">", " ".join(map(str, command))) - try: - subprocess.run(command, check=True, env=env, **kwargs) - except subprocess.CalledProcessError as e: - sys.exit(e) + if log: + print(">", " ".join(map(str, command))) + return subprocess.run(command, env=env, **kwargs) def build_python_path(): @@ -100,7 +138,7 @@ def make_build_python(context): def unpack_deps(host): deps_url = "https://github.com/beeware/cpython-android-source-deps/releases/download" - for name_ver in ["bzip2-1.0.8-1", "libffi-3.4.4-2", "openssl-3.0.13-1", + for name_ver in ["bzip2-1.0.8-1", "libffi-3.4.4-2", "openssl-3.0.15-0", "sqlite-3.45.1-0", "xz-5.4.6-0"]: filename = f"{name_ver}-{host}.tar.gz" download(f"{deps_url}/{name_ver}/{filename}") @@ -150,10 +188,17 @@ def configure_host_python(context): def make_host_python(context): + # The CFLAGS and LDFLAGS set in android-env include the prefix dir, so + # delete any previously-installed Python libs and include files to prevent + # them being used during the build. host_dir = subdir(context.host) + prefix_dir = host_dir / "prefix" + delete_glob(f"{prefix_dir}/include/python*") + delete_glob(f"{prefix_dir}/lib/libpython*") + os.chdir(host_dir / "build") run(["make", "-j", str(os.cpu_count())], host=context.host) - run(["make", "install", f"prefix={host_dir}/prefix"], host=context.host) + run(["make", "install", f"prefix={prefix_dir}"], host=context.host) def build_all(context): @@ -164,34 +209,344 @@ def build_all(context): def clean_all(context): - delete_if_exists(CROSS_BUILD_DIR) + delete_glob(CROSS_BUILD_DIR) + + +def setup_sdk(): + sdkmanager = android_home / ( + "cmdline-tools/latest/bin/sdkmanager" + + (".bat" if os.name == "nt" else "") + ) + + # Gradle will fail if it needs to install an SDK package whose license + # hasn't been accepted, so pre-accept all licenses. + if not all((android_home / "licenses" / path).exists() for path in [ + "android-sdk-arm-dbt-license", "android-sdk-license" + ]): + run([sdkmanager, "--licenses"], text=True, input="y\n" * 100) + + # Gradle may install this automatically, but we can't rely on that because + # we need to run adb within the logcat task. + if not adb.exists(): + run([sdkmanager, "platform-tools"]) # To avoid distributing compiled artifacts without corresponding source code, # the Gradle wrapper is not included in the CPython repository. Instead, we # extract it from the Gradle release. -def setup_testbed(context): +def setup_testbed(): + if all((TESTBED_DIR / path).exists() for path in [ + "gradlew", "gradlew.bat", "gradle/wrapper/gradle-wrapper.jar", + ]): + return + ver_long = "8.7.0" ver_short = ver_long.removesuffix(".0") - testbed_dir = CHECKOUT / "Android/testbed" for filename in ["gradlew", "gradlew.bat"]: out_path = download( f"https://raw.githubusercontent.com/gradle/gradle/v{ver_long}/{filename}", - testbed_dir) + TESTBED_DIR) os.chmod(out_path, 0o755) with TemporaryDirectory(prefix=SCRIPT_NAME) as temp_dir: - os.chdir(temp_dir) bin_zip = download( - f"https://services.gradle.org/distributions/gradle-{ver_short}-bin.zip") + f"https://services.gradle.org/distributions/gradle-{ver_short}-bin.zip", + temp_dir) outer_jar = f"gradle-{ver_short}/lib/plugins/gradle-wrapper-{ver_short}.jar" - run(["unzip", bin_zip, outer_jar]) - run(["unzip", "-o", "-d", f"{testbed_dir}/gradle/wrapper", outer_jar, - "gradle-wrapper.jar"]) + run(["unzip", "-d", temp_dir, bin_zip, outer_jar]) + run(["unzip", "-o", "-d", f"{TESTBED_DIR}/gradle/wrapper", + f"{temp_dir}/{outer_jar}", "gradle-wrapper.jar"]) -def main(): +# run_testbed will build the app automatically, but it's useful to have this as +# a separate command to allow running the app outside of this script. +def build_testbed(context): + setup_sdk() + setup_testbed() + run( + [gradlew, "--console", "plain", "packageDebug", "packageDebugAndroidTest"], + cwd=TESTBED_DIR, + ) + + +# Work around a bug involving sys.exit and TaskGroups +# (https://github.com/python/cpython/issues/101515). +def exit(*args): + raise MySystemExit(*args) + + +class MySystemExit(Exception): + pass + + +# The `test` subcommand runs all subprocesses through this context manager so +# that no matter what happens, they can always be cancelled from another task, +# and they will always be cleaned up on exit. +@asynccontextmanager +async def async_process(*args, **kwargs): + process = await asyncio.create_subprocess_exec(*args, **kwargs) + try: + yield process + finally: + if process.returncode is None: + # Allow a reasonably long time for Gradle to clean itself up, + # because we don't want stale emulators left behind. + timeout = 10 + process.terminate() + try: + await wait_for(process.wait(), timeout) + except TimeoutError: + print( + f"Command {args} did not terminate after {timeout} seconds " + f" - sending SIGKILL" + ) + process.kill() + + # Even after killing the process we must still wait for it, + # otherwise we'll get the warning "Exception ignored in __del__". + await wait_for(process.wait(), timeout=1) + + +async def async_check_output(*args, **kwargs): + async with async_process( + *args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs + ) as process: + stdout, stderr = await process.communicate() + if process.returncode == 0: + return stdout.decode(*DECODE_ARGS) + else: + raise CalledProcessError( + process.returncode, args, + stdout.decode(*DECODE_ARGS), stderr.decode(*DECODE_ARGS) + ) + + +# Return a list of the serial numbers of connected devices. Emulators will have +# serials of the form "emulator-5678". +async def list_devices(): + serials = [] + header_found = False + + lines = (await async_check_output(adb, "devices")).splitlines() + for line in lines: + # Ignore blank lines, and all lines before the header. + line = line.strip() + if line == "List of devices attached": + header_found = True + elif header_found and line: + try: + serial, status = line.split() + except ValueError: + raise ValueError(f"failed to parse {line!r}") + if status == "device": + serials.append(serial) + + if not header_found: + raise ValueError(f"failed to parse {lines}") + return serials + + +async def find_device(context, initial_devices): + if context.managed: + print("Waiting for managed device - this may take several minutes") + while True: + new_devices = set(await list_devices()).difference(initial_devices) + if len(new_devices) == 0: + await asyncio.sleep(1) + elif len(new_devices) == 1: + serial = new_devices.pop() + print(f"Serial: {serial}") + return serial + else: + exit(f"Found more than one new device: {new_devices}") + else: + return context.connected + + +# An older version of this script in #121595 filtered the logs by UID instead. +# But logcat can't filter by UID until API level 31. If we ever switch back to +# filtering by UID, we'll also have to filter by time so we only show messages +# produced after the initial call to `stop_app`. +# +# We're more likely to miss the PID because it's shorter-lived, so there's a +# workaround in PythonSuite.kt to stop it being *too* short-lived. +async def find_pid(serial): + print("Waiting for app to start - this may take several minutes") + shown_error = False + while True: + try: + # `pidof` requires API level 24 or higher. The level 23 emulator + # includes it, but it doesn't work (it returns all processes). + pid = (await async_check_output( + adb, "-s", serial, "shell", "pidof", "-s", APP_ID + )).strip() + except CalledProcessError as e: + # If the app isn't running yet, pidof gives no output. So if there + # is output, there must have been some other error. However, this + # sometimes happens transiently, especially when running a managed + # emulator for the first time, so don't make it fatal. + if (e.stdout or e.stderr) and not shown_error: + print_called_process_error(e) + print("This may be transient, so continuing to wait") + shown_error = True + else: + # Some older devices (e.g. Nexus 4) return zero even when no process + # was found, so check whether we actually got any output. + if pid: + print(f"PID: {pid}") + return pid + + # Loop fairly rapidly to avoid missing a short-lived process. + await asyncio.sleep(0.2) + + +async def logcat_task(context, initial_devices): + # Gradle may need to do some large downloads of libraries and emulator + # images. This will happen during find_device in --managed mode, or find_pid + # in --connected mode. + startup_timeout = 600 + serial = await wait_for(find_device(context, initial_devices), startup_timeout) + pid = await wait_for(find_pid(serial), startup_timeout) + + # `--pid` requires API level 24 or higher. + args = [adb, "-s", serial, "logcat", "--pid", pid, "--format", "tag"] + hidden_output = [] + async with async_process( + *args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + ) as process: + while line := (await process.stdout.readline()).decode(*DECODE_ARGS): + if match := re.fullmatch(r"([A-Z])/(.*)", line, re.DOTALL): + level, message = match.groups() + else: + # If the regex doesn't match, this is probably the second or + # subsequent line of a multi-line message. Python won't produce + # such messages, but other components might. + level, message = None, line + + # Exclude high-volume messages which are rarely useful. + if context.verbose < 2 and "from python test_syslog" in message: + continue + + # Put high-level messages on stderr so they're highlighted in the + # buildbot logs. This will include Python's own stderr. + stream = ( + sys.stderr + if level in ["W", "E", "F"] # WARNING, ERROR, FATAL (aka ASSERT) + else sys.stdout + ) + + # To simplify automated processing of the output, e.g. a buildbot + # posting a failure notice on a GitHub PR, we strip the level and + # tag indicators from Python's stdout and stderr. + for prefix in ["python.stdout: ", "python.stderr: "]: + if message.startswith(prefix): + global logcat_started + logcat_started = True + stream.write(message.removeprefix(prefix)) + break + else: + if context.verbose: + # Non-Python messages add a lot of noise, but they may + # sometimes help explain a failure. + stream.write(line) + else: + hidden_output.append(line) + + # If the device disconnects while logcat is running, which always + # happens in --managed mode, some versions of adb return non-zero. + # Distinguish this from a logcat startup error by checking whether we've + # received a message from Python yet. + status = await wait_for(process.wait(), timeout=1) + if status != 0 and not logcat_started: + raise CalledProcessError(status, args, "".join(hidden_output)) + + +def stop_app(serial): + run([adb, "-s", serial, "shell", "am", "force-stop", APP_ID], log=False) + + +async def gradle_task(context): + env = os.environ.copy() + if context.managed: + task_prefix = context.managed + else: + task_prefix = "connected" + env["ANDROID_SERIAL"] = context.connected + + args = [ + gradlew, "--console", "plain", f"{task_prefix}DebugAndroidTest", + "-Pandroid.testInstrumentationRunnerArguments.pythonArgs=" + + shlex.join(context.args), + ] + hidden_output = [] + try: + async with async_process( + *args, cwd=TESTBED_DIR, env=env, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + ) as process: + while line := (await process.stdout.readline()).decode(*DECODE_ARGS): + # Gradle may take several minutes to install SDK packages, so + # it's worth showing those messages even in non-verbose mode. + if context.verbose or line.startswith('Preparing "Install'): + sys.stdout.write(line) + else: + hidden_output.append(line) + + status = await wait_for(process.wait(), timeout=1) + if status == 0: + exit(0) + else: + raise CalledProcessError(status, args) + finally: + # If logcat never started, then something has gone badly wrong, so the + # user probably wants to see the Gradle output even in non-verbose mode. + if hidden_output and not logcat_started: + sys.stdout.write("".join(hidden_output)) + + # Gradle does not stop the tests when interrupted. + if context.connected: + stop_app(context.connected) + + +async def run_testbed(context): + setup_sdk() + setup_testbed() + + if context.managed: + # In this mode, Gradle will create a device with an unpredictable name. + # So we save a list of the running devices before starting Gradle, and + # find_device then waits for a new device to appear. + initial_devices = await list_devices() + else: + # In case the previous shutdown was unclean, make sure the app isn't + # running, otherwise we might show logs from a previous run. This is + # unnecessary in --managed mode, because Gradle creates a new emulator + # every time. + stop_app(context.connected) + initial_devices = None + + try: + async with asyncio.TaskGroup() as tg: + tg.create_task(logcat_task(context, initial_devices)) + tg.create_task(gradle_task(context)) + except* MySystemExit as e: + raise SystemExit(*e.exceptions[0].args) from None + except* CalledProcessError as e: + # Extract it from the ExceptionGroup so it can be handled by `main`. + raise e.exceptions[0] + + +# Handle SIGTERM the same way as SIGINT. This ensures that if we're terminated +# by the buildbot worker, we'll make an attempt to clean up our subprocesses. +def install_signal_handler(): + def signal_handler(*args): + os.kill(os.getpid(), signal.SIGINT) + + signal.signal(signal.SIGTERM, signal_handler) + + +def parse_args(): parser = argparse.ArgumentParser() subcommands = parser.add_subparsers(dest="subcommand") build = subcommands.add_parser("build", help="Build everything") @@ -206,8 +561,6 @@ def main(): help="Run `make` for Android") subcommands.add_parser( "clean", help="Delete the cross-build directory") - subcommands.add_parser( - "setup-testbed", help="Download the testbed Gradle wrapper") for subcommand in build, configure_build, configure_host: subcommand.add_argument( @@ -222,15 +575,74 @@ def main(): subcommand.add_argument("args", nargs="*", help="Extra arguments to pass to `configure`") - context = parser.parse_args() + subcommands.add_parser( + "build-testbed", help="Build the testbed app") + test = subcommands.add_parser( + "test", help="Run the test suite") + test.add_argument( + "-v", "--verbose", action="count", default=0, + help="Show Gradle output, and non-Python logcat messages. " + "Use twice to include high-volume messages which are rarely useful.") + device_group = test.add_mutually_exclusive_group(required=True) + device_group.add_argument( + "--connected", metavar="SERIAL", help="Run on a connected device. " + "Connect it yourself, then get its serial from `adb devices`.") + device_group.add_argument( + "--managed", metavar="NAME", help="Run on a Gradle-managed device. " + "These are defined in `managedDevices` in testbed/app/build.gradle.kts.") + test.add_argument( + "args", nargs="*", help=f"Arguments for `python -m test`. " + f"Separate them from {SCRIPT_NAME}'s own arguments with `--`.") + + return parser.parse_args() + + +def main(): + install_signal_handler() + + # Under the buildbot, stdout is not a TTY, but we must still flush after + # every line to make sure our output appears in the correct order relative + # to the output of our subprocesses. + for stream in [sys.stdout, sys.stderr]: + stream.reconfigure(line_buffering=True) + + context = parse_args() dispatch = {"configure-build": configure_build_python, "make-build": make_build_python, "configure-host": configure_host_python, "make-host": make_host_python, "build": build_all, "clean": clean_all, - "setup-testbed": setup_testbed} - dispatch[context.subcommand](context) + "build-testbed": build_testbed, + "test": run_testbed} + + try: + result = dispatch[context.subcommand](context) + if asyncio.iscoroutine(result): + asyncio.run(result) + except CalledProcessError as e: + print_called_process_error(e) + sys.exit(1) + + +def print_called_process_error(e): + for stream_name in ["stdout", "stderr"]: + content = getattr(e, stream_name) + stream = getattr(sys, stream_name) + if content: + stream.write(content) + if not content.endswith("\n"): + stream.write("\n") + + # Format the command so it can be copied into a shell. shlex uses single + # quotes, so we surround the whole command with double quotes. + args_joined = ( + e.cmd if isinstance(e.cmd, str) + else " ".join(shlex.quote(str(arg)) for arg in e.cmd) + ) + print( + f'Command "{args_joined}" returned exit status {e.returncode}' + ) if __name__ == "__main__": diff --git a/Android/testbed/app/build.gradle.kts b/Android/testbed/app/build.gradle.kts index 7690d3fd86b..7e0bef58ed8 100644 --- a/Android/testbed/app/build.gradle.kts +++ b/Android/testbed/app/build.gradle.kts @@ -1,18 +1,26 @@ import com.android.build.api.variant.* +import kotlin.math.max plugins { id("com.android.application") id("org.jetbrains.kotlin.android") } -val PYTHON_DIR = File(projectDir, "../../..").canonicalPath +val PYTHON_DIR = file("../../..").canonicalPath val PYTHON_CROSS_DIR = "$PYTHON_DIR/cross-build" + val ABIS = mapOf( "arm64-v8a" to "aarch64-linux-android", "x86_64" to "x86_64-linux-android", -) +).filter { file("$PYTHON_CROSS_DIR/${it.value}").exists() } +if (ABIS.isEmpty()) { + throw GradleException( + "No Android ABIs found in $PYTHON_CROSS_DIR: see Android/README.md " + + "for building instructions." + ) +} -val PYTHON_VERSION = File("$PYTHON_DIR/Include/patchlevel.h").useLines { +val PYTHON_VERSION = file("$PYTHON_DIR/Include/patchlevel.h").useLines { for (line in it) { val match = """#define PY_VERSION\s+"(\d+\.\d+)""".toRegex().find(line) if (match != null) { @@ -22,6 +30,16 @@ val PYTHON_VERSION = File("$PYTHON_DIR/Include/patchlevel.h").useLines { throw GradleException("Failed to find Python version") } +android.ndkVersion = file("../../android-env.sh").useLines { + for (line in it) { + val match = """ndk_version=(\S+)""".toRegex().find(line) + if (match != null) { + return@useLines match.groupValues[1] + } + } + throw GradleException("Failed to find NDK version") +} + android { namespace = "org.python.testbed" @@ -38,6 +56,8 @@ android { externalNativeBuild.cmake.arguments( "-DPYTHON_CROSS_DIR=$PYTHON_CROSS_DIR", "-DPYTHON_VERSION=$PYTHON_VERSION") + + testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner" } externalNativeBuild.cmake { @@ -55,41 +75,81 @@ android { kotlinOptions { jvmTarget = "1.8" } + + testOptions { + managedDevices { + localDevices { + create("minVersion") { + device = "Small Phone" + + // Managed devices have a minimum API level of 27. + apiLevel = max(27, defaultConfig.minSdk!!) + + // ATD devices are smaller and faster, but have a minimum + // API level of 30. + systemImageSource = if (apiLevel >= 30) "aosp-atd" else "aosp" + } + + create("maxVersion") { + device = "Small Phone" + apiLevel = defaultConfig.targetSdk!! + systemImageSource = "aosp-atd" + } + } + + // If the previous test run succeeded and nothing has changed, + // Gradle thinks there's no need to run it again. Override that. + afterEvaluate { + (localDevices.names + listOf("connected")).forEach { + tasks.named("${it}DebugAndroidTest") { + outputs.upToDateWhen { false } + } + } + } + } + } } dependencies { implementation("androidx.appcompat:appcompat:1.6.1") implementation("com.google.android.material:material:1.11.0") implementation("androidx.constraintlayout:constraintlayout:2.1.4") + androidTestImplementation("androidx.test.ext:junit:1.1.5") + androidTestImplementation("androidx.test:rules:1.5.0") } // Create some custom tasks to copy Python and its standard library from // elsewhere in the repository. androidComponents.onVariants { variant -> + val pyPlusVer = "python$PYTHON_VERSION" generateTask(variant, variant.sources.assets!!) { into("python") { - for (triplet in ABIS.values) { - for (subDir in listOf("include", "lib")) { - into(subDir) { - from("$PYTHON_CROSS_DIR/$triplet/prefix/$subDir") - include("python$PYTHON_VERSION/**") - duplicatesStrategy = DuplicatesStrategy.EXCLUDE - } + into("include/$pyPlusVer") { + for (triplet in ABIS.values) { + from("$PYTHON_CROSS_DIR/$triplet/prefix/include/$pyPlusVer") } + duplicatesStrategy = DuplicatesStrategy.EXCLUDE } - into("lib/python$PYTHON_VERSION") { - // Uncomment this to pick up edits from the source directory - // without having to rerun `make install`. - // from("$PYTHON_DIR/Lib") - // duplicatesStrategy = DuplicatesStrategy.INCLUDE + + into("lib/$pyPlusVer") { + // To aid debugging, the source directory takes priority. + from("$PYTHON_DIR/Lib") + + // The cross-build directory provides ABI-specific files such as + // sysconfigdata. + for (triplet in ABIS.values) { + from("$PYTHON_CROSS_DIR/$triplet/prefix/lib/$pyPlusVer") + } into("site-packages") { from("$projectDir/src/main/python") } + + duplicatesStrategy = DuplicatesStrategy.EXCLUDE + exclude("**/__pycache__") } } - exclude("**/__pycache__") } generateTask(variant, variant.sources.jniLibs!!) { diff --git a/Android/testbed/app/src/androidTest/java/org/python/testbed/PythonSuite.kt b/Android/testbed/app/src/androidTest/java/org/python/testbed/PythonSuite.kt new file mode 100644 index 00000000000..0e888ab71d8 --- /dev/null +++ b/Android/testbed/app/src/androidTest/java/org/python/testbed/PythonSuite.kt @@ -0,0 +1,35 @@ +package org.python.testbed + +import androidx.test.annotation.UiThreadTest +import androidx.test.platform.app.InstrumentationRegistry +import androidx.test.ext.junit.runners.AndroidJUnit4 + +import org.junit.Test +import org.junit.runner.RunWith + +import org.junit.Assert.* + + +@RunWith(AndroidJUnit4::class) +class PythonSuite { + @Test + @UiThreadTest + fun testPython() { + val start = System.currentTimeMillis() + try { + val context = + InstrumentationRegistry.getInstrumentation().targetContext + val args = + InstrumentationRegistry.getArguments().getString("pythonArgs", "") + val status = PythonTestRunner(context).run(args) + assertEquals(0, status) + } finally { + // Make sure the process lives long enough for the test script to + // detect it (see `find_pid` in android.py). + val delay = 2000 - (System.currentTimeMillis() - start) + if (delay > 0) { + Thread.sleep(delay) + } + } + } +} diff --git a/Android/testbed/app/src/main/c/main_activity.c b/Android/testbed/app/src/main/c/main_activity.c index 73aba4164d0..53470904899 100644 --- a/Android/testbed/app/src/main/c/main_activity.c +++ b/Android/testbed/app/src/main/c/main_activity.c @@ -84,7 +84,7 @@ static char *redirect_stream(StreamInfo *si) { return 0; } -JNIEXPORT void JNICALL Java_org_python_testbed_MainActivity_redirectStdioToLogcat( +JNIEXPORT void JNICALL Java_org_python_testbed_PythonTestRunner_redirectStdioToLogcat( JNIEnv *env, jobject obj ) { for (StreamInfo *si = STREAMS; si->file; si++) { @@ -115,7 +115,7 @@ static void throw_status(JNIEnv *env, PyStatus status) { throw_runtime_exception(env, status.err_msg ? status.err_msg : ""); } -JNIEXPORT void JNICALL Java_org_python_testbed_MainActivity_runPython( +JNIEXPORT int JNICALL Java_org_python_testbed_PythonTestRunner_runPython( JNIEnv *env, jobject obj, jstring home, jstring runModule ) { PyConfig config; @@ -125,13 +125,13 @@ JNIEXPORT void JNICALL Java_org_python_testbed_MainActivity_runPython( status = set_config_string(env, &config, &config.home, home); if (PyStatus_Exception(status)) { throw_status(env, status); - return; + return 1; } status = set_config_string(env, &config, &config.run_module, runModule); if (PyStatus_Exception(status)) { throw_status(env, status); - return; + return 1; } // Some tests generate SIGPIPE and SIGXFSZ, which should be ignored. @@ -140,8 +140,8 @@ JNIEXPORT void JNICALL Java_org_python_testbed_MainActivity_runPython( status = Py_InitializeFromConfig(&config); if (PyStatus_Exception(status)) { throw_status(env, status); - return; + return 1; } - Py_RunMain(); + return Py_RunMain(); } diff --git a/Android/testbed/app/src/main/java/org/python/testbed/MainActivity.kt b/Android/testbed/app/src/main/java/org/python/testbed/MainActivity.kt index 5a590d5d04e..c4bf6cbe83d 100644 --- a/Android/testbed/app/src/main/java/org/python/testbed/MainActivity.kt +++ b/Android/testbed/app/src/main/java/org/python/testbed/MainActivity.kt @@ -1,38 +1,56 @@ package org.python.testbed +import android.content.Context import android.os.* import android.system.Os import android.widget.TextView import androidx.appcompat.app.* import java.io.* + +// Launching the tests from an activity is OK for a quick check, but for +// anything more complicated it'll be more convenient to use `android.py test` +// to launch the tests via PythonSuite. class MainActivity : AppCompatActivity() { override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) setContentView(R.layout.activity_main) + val status = PythonTestRunner(this).run("-W -uall") + findViewById(R.id.tvHello).text = "Exit status $status" + } +} + + +class PythonTestRunner(val context: Context) { + /** @param args Extra arguments for `python -m test`. + * @return The Python exit status: zero if the tests passed, nonzero if + * they failed. */ + fun run(args: String = "") : Int { + Os.setenv("PYTHON_ARGS", args, true) // Python needs this variable to help it find the temporary directory, // but Android only sets it on API level 33 and later. - Os.setenv("TMPDIR", cacheDir.toString(), false) + Os.setenv("TMPDIR", context.cacheDir.toString(), false) val pythonHome = extractAssets() System.loadLibrary("main_activity") redirectStdioToLogcat() - runPython(pythonHome.toString(), "main") - findViewById(R.id.tvHello).text = "Python complete" + + // The main module is in src/main/python/main.py. + return runPython(pythonHome.toString(), "main") } private fun extractAssets() : File { - val pythonHome = File(filesDir, "python") + val pythonHome = File(context.filesDir, "python") if (pythonHome.exists() && !pythonHome.deleteRecursively()) { throw RuntimeException("Failed to delete $pythonHome") } - extractAssetDir("python", filesDir) + extractAssetDir("python", context.filesDir) return pythonHome } private fun extractAssetDir(path: String, targetDir: File) { - val names = assets.list(path) + val names = context.assets.list(path) ?: throw RuntimeException("Failed to list $path") val targetSubdir = File(targetDir, path) if (!targetSubdir.mkdirs()) { @@ -43,7 +61,7 @@ class MainActivity : AppCompatActivity() { val subPath = "$path/$name" val input: InputStream try { - input = assets.open(subPath) + input = context.assets.open(subPath) } catch (e: FileNotFoundException) { extractAssetDir(subPath, targetDir) continue @@ -57,5 +75,5 @@ class MainActivity : AppCompatActivity() { } private external fun redirectStdioToLogcat() - private external fun runPython(home: String, runModule: String) -} \ No newline at end of file + private external fun runPython(home: String, runModule: String) : Int +} diff --git a/Android/testbed/app/src/main/python/main.py b/Android/testbed/app/src/main/python/main.py index a1b6def34ed..d6941b14412 100644 --- a/Android/testbed/app/src/main/python/main.py +++ b/Android/testbed/app/src/main/python/main.py @@ -1,17 +1,32 @@ +import os import runpy +import shlex import signal import sys # Some tests use SIGUSR1, but that's blocked by default in an Android app in -# order to make it available to `sigwait` in the "Signal Catcher" thread. That -# thread's functionality is only relevant to the JVM ("forcing GC (no HPROF) and -# profile save"), so disabling it should not weaken the tests. +# order to make it available to `sigwait` in the Signal Catcher thread. +# (https://cs.android.com/android/platform/superproject/+/android14-qpr3-release:art/runtime/signal_catcher.cc). +# That thread's functionality is only useful for debugging the JVM, so disabling +# it should not weaken the tests. +# +# There's no safe way of stopping the thread completely (#123982), but simply +# unblocking SIGUSR1 is enough to fix most tests. +# +# However, in tests that generate multiple different signals in quick +# succession, it's possible for SIGUSR1 to arrive while the main thread is busy +# running the C-level handler for a different signal. In that case, the SIGUSR1 +# may be sent to the Signal Catcher thread instead, which will generate a log +# message containing the text "reacting to signal". +# +# Such tests may need to be changed in one of the following ways: +# * Use a signal other than SIGUSR1 (e.g. test_stress_delivery_simultaneous in +# test_signal.py). +# * Send the signal to a specific thread rather than the whole process (e.g. +# test_signals in test_threadsignals.py. signal.pthread_sigmask(signal.SIG_UNBLOCK, [signal.SIGUSR1]) -# To run specific tests, or pass any other arguments to the test suite, edit -# this command line. -sys.argv[1:] = [ - "--use", "all,-cpu", - "--verbose3", -] +sys.argv[1:] = shlex.split(os.environ["PYTHON_ARGS"]) + +# The test module will call sys.exit to indicate whether the tests passed. runpy.run_module("test") diff --git a/Android/testbed/build.gradle.kts b/Android/testbed/build.gradle.kts index 53f4a67287f..2dad1501c24 100644 --- a/Android/testbed/build.gradle.kts +++ b/Android/testbed/build.gradle.kts @@ -1,5 +1,5 @@ // Top-level build file where you can add configuration options common to all sub-projects/modules. plugins { - id("com.android.application") version "8.2.2" apply false + id("com.android.application") version "8.4.2" apply false id("org.jetbrains.kotlin.android") version "1.9.22" apply false -} \ No newline at end of file +} diff --git a/Android/testbed/gradle.properties b/Android/testbed/gradle.properties index 3c5031eb7d6..e9f345c8c26 100644 --- a/Android/testbed/gradle.properties +++ b/Android/testbed/gradle.properties @@ -20,4 +20,9 @@ kotlin.code.style=official # Enables namespacing of each library's R class so that its R class includes only the # resources declared in the library itself and none from the library's dependencies, # thereby reducing the size of the R class for that library -android.nonTransitiveRClass=true \ No newline at end of file +android.nonTransitiveRClass=true + +# By default, the app will be uninstalled after the tests finish (apparently +# after 10 seconds in case of an unclean shutdown). We disable this, because +# when using android.py it can conflict with the installation of the next run. +android.injected.androidTest.leaveApksInstalledAfterRun=true diff --git a/Android/testbed/gradle/wrapper/gradle-wrapper.properties b/Android/testbed/gradle/wrapper/gradle-wrapper.properties index 2dc3339a3ef..57b2f57cc86 100644 --- a/Android/testbed/gradle/wrapper/gradle-wrapper.properties +++ b/Android/testbed/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ #Mon Feb 19 20:29:06 GMT 2024 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.2-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/Doc/.ruff.toml b/Doc/.ruff.toml new file mode 100644 index 00000000000..111ce03b91d --- /dev/null +++ b/Doc/.ruff.toml @@ -0,0 +1,42 @@ +target-version = "py312" # Align with the version in oldest_supported_sphinx +fix = true +output-format = "full" +line-length = 79 +extend-exclude = [ + "includes/*", + # Temporary exclusions: + "tools/extensions/pyspecific.py", +] + +[lint] +preview = true +select = [ + "C4", # flake8-comprehensions + "B", # flake8-bugbear + "E", # pycodestyle + "F", # pyflakes + "FA", # flake8-future-annotations + "FLY", # flynt + "FURB", # refurb + "G", # flake8-logging-format + "I", # isort + "LOG", # flake8-logging + "N", # pep8-naming + "PERF", # perflint + "PGH", # pygrep-hooks + "PT", # flake8-pytest-style + "TCH", # flake8-type-checking + "UP", # pyupgrade + "W", # pycodestyle +] +ignore = [ + "E501", # Ignore line length errors (we use auto-formatting) +] + +[format] +preview = true +quote-style = "preserve" +docstring-code-format = true +exclude = [ + "tools/extensions/lexers/*", +] diff --git a/Doc/Makefile b/Doc/Makefile index c7076875483..a090ee5ba92 100644 --- a/Doc/Makefile +++ b/Doc/Makefile @@ -6,6 +6,7 @@ # You can set these variables from the command line. PYTHON = python3 VENVDIR = ./venv +UV = uv SPHINXBUILD = PATH=$(VENVDIR)/bin:$$PATH sphinx-build BLURB = PATH=$(VENVDIR)/bin:$$PATH blurb JOBS = auto @@ -143,21 +144,17 @@ pydoc-topics: build .PHONY: gettext gettext: BUILDER = gettext -gettext: SPHINXOPTS += '-d build/doctrees-gettext' +gettext: SPHINXOPTS += -d build/doctrees-gettext gettext: build .PHONY: htmlview htmlview: html $(PYTHON) -c "import os, webbrowser; webbrowser.open('file://' + os.path.realpath('build/html/index.html'))" -.PHONY: ensure-sphinx-autobuild -ensure-sphinx-autobuild: venv - $(call ensure_package,sphinx-autobuild) - .PHONY: htmllive -htmllive: SPHINXBUILD = $(VENVDIR)/bin/sphinx-autobuild +htmllive: SPHINXBUILD = PATH=$(VENVDIR)/bin:$$PATH sphinx-autobuild htmllive: SPHINXOPTS = --re-ignore="/venv/" --open-browser --delay 0 -htmllive: ensure-sphinx-autobuild html +htmllive: _ensure-sphinx-autobuild html .PHONY: clean clean: clean-venv @@ -174,83 +171,125 @@ venv: echo "To recreate it, remove it first with \`make clean-venv'."; \ else \ echo "Creating venv in $(VENVDIR)"; \ - if uv --version > /dev/null; then \ - uv venv $(VENVDIR); \ - VIRTUAL_ENV=$(VENVDIR) uv pip install -r $(REQUIREMENTS); \ + if $(UV) --version >/dev/null 2>&1; then \ + $(UV) venv $(VENVDIR); \ + VIRTUAL_ENV=$(VENVDIR) $(UV) pip install -r $(REQUIREMENTS); \ else \ $(PYTHON) -m venv $(VENVDIR); \ $(VENVDIR)/bin/python3 -m pip install --upgrade pip; \ $(VENVDIR)/bin/python3 -m pip install -r $(REQUIREMENTS); \ - echo "The venv has been created in the $(VENVDIR) directory"; \ fi; \ + echo "The venv has been created in the $(VENVDIR) directory"; \ fi +.PHONY: dist-no-html +dist-no-html: dist-text dist-pdf dist-epub dist-texinfo + .PHONY: dist dist: rm -rf dist mkdir -p dist - + $(MAKE) dist-html + $(MAKE) dist-text + $(MAKE) dist-pdf + $(MAKE) dist-epub + $(MAKE) dist-texinfo + +.PHONY: dist-html +dist-html: # archive the HTML - make html + @echo "Building HTML..." + mkdir -p dist + rm -rf build/html + find dist -name 'python-$(DISTVERSION)-docs-html*' -exec rm -rf {} \; + $(MAKE) html cp -pPR build/html dist/python-$(DISTVERSION)-docs-html tar -C dist -cf dist/python-$(DISTVERSION)-docs-html.tar python-$(DISTVERSION)-docs-html bzip2 -9 -k dist/python-$(DISTVERSION)-docs-html.tar (cd dist; zip -q -r -9 python-$(DISTVERSION)-docs-html.zip python-$(DISTVERSION)-docs-html) rm -r dist/python-$(DISTVERSION)-docs-html rm dist/python-$(DISTVERSION)-docs-html.tar + @echo "Build finished and archived!" +.PHONY: dist-text +dist-text: # archive the text build - make text + @echo "Building text..." + mkdir -p dist + rm -rf build/text + find dist -name 'python-$(DISTVERSION)-docs-text*' -exec rm -rf {} \; + $(MAKE) text cp -pPR build/text dist/python-$(DISTVERSION)-docs-text tar -C dist -cf dist/python-$(DISTVERSION)-docs-text.tar python-$(DISTVERSION)-docs-text bzip2 -9 -k dist/python-$(DISTVERSION)-docs-text.tar (cd dist; zip -q -r -9 python-$(DISTVERSION)-docs-text.zip python-$(DISTVERSION)-docs-text) rm -r dist/python-$(DISTVERSION)-docs-text rm dist/python-$(DISTVERSION)-docs-text.tar + @echo "Build finished and archived!" +.PHONY: dist-pdf +dist-pdf: # archive the A4 latex + @echo "Building LaTeX (A4 paper)..." + mkdir -p dist rm -rf build/latex - make latex PAPER=a4 - -sed -i 's/makeindex/makeindex -q/' build/latex/Makefile - (cd build/latex; make clean && make all-pdf && make FMT=pdf zip bz2) + find dist -name 'python-$(DISTVERSION)-docs-pdf*' -exec rm -rf {} \; + $(MAKE) latex PAPER=a4 + # remove zip & bz2 dependency on all-pdf, + # as otherwise the full latexmk process is run twice. + # ($$ is needed to escape the $; https://www.gnu.org/software/make/manual/make.html#Basics-of-Variable-References) + -sed -i 's/: all-$$(FMT)/:/' build/latex/Makefile + (cd build/latex; $(MAKE) clean && $(MAKE) --jobs=$$((`nproc`+1)) --output-sync LATEXMKOPTS='-quiet' all-pdf && $(MAKE) FMT=pdf zip bz2) cp build/latex/docs-pdf.zip dist/python-$(DISTVERSION)-docs-pdf-a4.zip cp build/latex/docs-pdf.tar.bz2 dist/python-$(DISTVERSION)-docs-pdf-a4.tar.bz2 + @echo "Build finished and archived!" - # archive the letter latex - rm -rf build/latex - make latex PAPER=letter - -sed -i 's/makeindex/makeindex -q/' build/latex/Makefile - (cd build/latex; make clean && make all-pdf && make FMT=pdf zip bz2) - cp build/latex/docs-pdf.zip dist/python-$(DISTVERSION)-docs-pdf-letter.zip - cp build/latex/docs-pdf.tar.bz2 dist/python-$(DISTVERSION)-docs-pdf-letter.tar.bz2 - +.PHONY: dist-epub +dist-epub: # copy the epub build + @echo "Building EPUB..." + mkdir -p dist rm -rf build/epub - make epub + rm -f dist/python-$(DISTVERSION)-docs.epub + $(MAKE) epub cp -pPR build/epub/Python.epub dist/python-$(DISTVERSION)-docs.epub + @echo "Build finished and archived!" +.PHONY: dist-texinfo +dist-texinfo: # archive the texinfo build + @echo "Building Texinfo..." + mkdir -p dist rm -rf build/texinfo - make texinfo - make info --directory=build/texinfo + find dist -name 'python-$(DISTVERSION)-docs-texinfo*' -exec rm -rf {} \; + $(MAKE) texinfo + $(MAKE) info --directory=build/texinfo cp -pPR build/texinfo dist/python-$(DISTVERSION)-docs-texinfo tar -C dist -cf dist/python-$(DISTVERSION)-docs-texinfo.tar python-$(DISTVERSION)-docs-texinfo bzip2 -9 -k dist/python-$(DISTVERSION)-docs-texinfo.tar (cd dist; zip -q -r -9 python-$(DISTVERSION)-docs-texinfo.zip python-$(DISTVERSION)-docs-texinfo) rm -r dist/python-$(DISTVERSION)-docs-texinfo rm dist/python-$(DISTVERSION)-docs-texinfo.tar + @echo "Build finished and archived!" -define ensure_package - if uv --version > /dev/null; then \ - $(VENVDIR)/bin/python3 -m $(1) --version > /dev/null || VIRTUAL_ENV=$(VENVDIR) uv pip install $(1); \ +.PHONY: _ensure-package +_ensure-package: venv + if $(UV) --version >/dev/null 2>&1; then \ + VIRTUAL_ENV=$(VENVDIR) $(UV) pip install $(PACKAGE); \ else \ - $(VENVDIR)/bin/python3 -m $(1) --version > /dev/null || $(VENVDIR)/bin/python3 -m pip install $(1); \ + $(VENVDIR)/bin/python3 -m pip install $(PACKAGE); \ fi -endef + +.PHONY: _ensure-pre-commit +_ensure-pre-commit: + $(MAKE) _ensure-package PACKAGE=pre-commit + +.PHONY: _ensure-sphinx-autobuild +_ensure-sphinx-autobuild: + $(MAKE) _ensure-package PACKAGE=sphinx-autobuild .PHONY: check -check: venv - $(call ensure_package,pre_commit) +check: _ensure-pre-commit $(VENVDIR)/bin/python3 -m pre_commit run --all-files .PHONY: serve @@ -266,13 +305,15 @@ serve: # for development releases: always build .PHONY: autobuild-dev +autobuild-dev: DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py --short) autobuild-dev: - make dist SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' + $(MAKE) dist-no-html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' DISTVERSION=$(DISTVERSION) -# for quick rebuilds (HTML only) +# for HTML-only rebuilds .PHONY: autobuild-dev-html +autobuild-dev-html: DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py --short) autobuild-dev-html: - make html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' + $(MAKE) dist-html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' DISTVERSION=$(DISTVERSION) # for stable releases: only build if not in pre-release stage (alpha, beta) # release candidate downloads are okay, since the stable tree can be in that stage @@ -282,7 +323,7 @@ autobuild-stable: echo "Not building; $(DISTVERSION) is not a release version."; \ exit 1;; \ esac - @make autobuild-dev + @$(MAKE) autobuild-dev .PHONY: autobuild-stable-html autobuild-stable-html: @@ -290,4 +331,4 @@ autobuild-stable-html: echo "Not building; $(DISTVERSION) is not a release version."; \ exit 1;; \ esac - @make autobuild-dev-html + @$(MAKE) autobuild-dev-html diff --git a/Doc/c-api/arg.rst b/Doc/c-api/arg.rst index 834aae9372f..3201bdc8269 100644 --- a/Doc/c-api/arg.rst +++ b/Doc/c-api/arg.rst @@ -280,10 +280,10 @@ Numbers length 1, to a C :c:expr:`int`. ``f`` (:class:`float`) [float] - Convert a Python floating point number to a C :c:expr:`float`. + Convert a Python floating-point number to a C :c:expr:`float`. ``d`` (:class:`float`) [double] - Convert a Python floating point number to a C :c:expr:`double`. + Convert a Python floating-point number to a C :c:expr:`double`. ``D`` (:class:`complex`) [Py_complex] Convert a Python complex number to a C :c:type:`Py_complex` structure. @@ -642,10 +642,10 @@ Building values object of length 1. ``d`` (:class:`float`) [double] - Convert a C :c:expr:`double` to a Python floating point number. + Convert a C :c:expr:`double` to a Python floating-point number. ``f`` (:class:`float`) [float] - Convert a C :c:expr:`float` to a Python floating point number. + Convert a C :c:expr:`float` to a Python floating-point number. ``D`` (:class:`complex`) [Py_complex \*] Convert a C :c:type:`Py_complex` structure to a Python complex number. diff --git a/Doc/c-api/buffer.rst b/Doc/c-api/buffer.rst index 9500fe465c7..dc43a3d5fcb 100644 --- a/Doc/c-api/buffer.rst +++ b/Doc/c-api/buffer.rst @@ -244,7 +244,6 @@ The following fields are not influenced by *flags* and must always be filled in with the correct values: :c:member:`~Py_buffer.obj`, :c:member:`~Py_buffer.buf`, :c:member:`~Py_buffer.len`, :c:member:`~Py_buffer.itemsize`, :c:member:`~Py_buffer.ndim`. - readonly, format ~~~~~~~~~~~~~~~~ @@ -253,7 +252,8 @@ readonly, format Controls the :c:member:`~Py_buffer.readonly` field. If set, the exporter MUST provide a writable buffer or else report failure. Otherwise, the exporter MAY provide either a read-only or writable buffer, but the choice - MUST be consistent for all consumers. + MUST be consistent for all consumers. For example, :c:expr:`PyBUF_SIMPLE | PyBUF_WRITABLE` + can be used to request a simple writable buffer. .. c:macro:: PyBUF_FORMAT @@ -265,8 +265,9 @@ readonly, format Since :c:macro:`PyBUF_SIMPLE` is defined as 0, :c:macro:`PyBUF_WRITABLE` can be used as a stand-alone flag to request a simple writable buffer. -:c:macro:`PyBUF_FORMAT` can be \|'d to any of the flags except :c:macro:`PyBUF_SIMPLE`. -The latter already implies format ``B`` (unsigned bytes). +:c:macro:`PyBUF_FORMAT` must be \|'d to any of the flags except :c:macro:`PyBUF_SIMPLE`, because +the latter already implies format ``B`` (unsigned bytes). :c:macro:`!PyBUF_FORMAT` cannot be +used on its own. shape, strides, suboffsets diff --git a/Doc/c-api/bytearray.rst b/Doc/c-api/bytearray.rst index 456f7d89bca..9045689a6be 100644 --- a/Doc/c-api/bytearray.rst +++ b/Doc/c-api/bytearray.rst @@ -42,17 +42,22 @@ Direct API functions Return a new bytearray object from any object, *o*, that implements the :ref:`buffer protocol `. + On failure, return ``NULL`` with an exception set. + .. c:function:: PyObject* PyByteArray_FromStringAndSize(const char *string, Py_ssize_t len) - Create a new bytearray object from *string* and its length, *len*. On - failure, ``NULL`` is returned. + Create a new bytearray object from *string* and its length, *len*. + + On failure, return ``NULL`` with an exception set. .. c:function:: PyObject* PyByteArray_Concat(PyObject *a, PyObject *b) Concat bytearrays *a* and *b* and return a new bytearray with the result. + On failure, return ``NULL`` with an exception set. + .. c:function:: Py_ssize_t PyByteArray_Size(PyObject *bytearray) diff --git a/Doc/c-api/cell.rst b/Doc/c-api/cell.rst index f8cd0344fdd..61eb994c370 100644 --- a/Doc/c-api/cell.rst +++ b/Doc/c-api/cell.rst @@ -39,7 +39,8 @@ Cell objects are not likely to be useful elsewhere. .. c:function:: PyObject* PyCell_Get(PyObject *cell) - Return the contents of the cell *cell*. + Return the contents of the cell *cell*, which can be ``NULL``. + If *cell* is not a cell object, returns ``NULL`` with an exception set. .. c:function:: PyObject* PyCell_GET(PyObject *cell) @@ -52,8 +53,10 @@ Cell objects are not likely to be useful elsewhere. Set the contents of the cell object *cell* to *value*. This releases the reference to any current content of the cell. *value* may be ``NULL``. *cell* - must be non-``NULL``; if it is not a cell object, ``-1`` will be returned. On - success, ``0`` will be returned. + must be non-``NULL``. + + On success, return ``0``. + If *cell* is not a cell object, set an exception and return ``-1``. .. c:function:: void PyCell_SET(PyObject *cell, PyObject *value) diff --git a/Doc/c-api/code.rst b/Doc/c-api/code.rst index 968c472219c..6ae6bfe4aa6 100644 --- a/Doc/c-api/code.rst +++ b/Doc/c-api/code.rst @@ -96,8 +96,8 @@ bound into a function. Return the line number of the instruction that occurs on or before ``byte_offset`` and ends after it. If you just need the line number of a frame, use :c:func:`PyFrame_GetLineNumber` instead. - For efficiently iterating over the line numbers in a code object, use `the API described in PEP 626 - `_. + For efficiently iterating over the line numbers in a code object, use :pep:`the API described in PEP 626 + <0626#out-of-process-debuggers-and-profilers>`. .. c:function:: int PyCode_Addr2Location(PyObject *co, int byte_offset, int *start_line, int *start_column, int *end_line, int *end_column) diff --git a/Doc/c-api/complex.rst b/Doc/c-api/complex.rst index 5a047486907..67d0c5f144e 100644 --- a/Doc/c-api/complex.rst +++ b/Doc/c-api/complex.rst @@ -25,12 +25,16 @@ pointers. This is consistent throughout the API. The C structure which corresponds to the value portion of a Python complex number object. Most of the functions for dealing with complex number objects - use structures of this type as input or output values, as appropriate. It is - defined as:: + use structures of this type as input or output values, as appropriate. + + .. c:member:: double real + double imag + + The structure is defined as:: typedef struct { - double real; - double imag; + double real; + double imag; } Py_complex; @@ -106,11 +110,13 @@ Complex Numbers as Python Objects .. c:function:: PyObject* PyComplex_FromCComplex(Py_complex v) Create a new Python complex number object from a C :c:type:`Py_complex` value. + Return ``NULL`` with an exception set on error. .. c:function:: PyObject* PyComplex_FromDoubles(double real, double imag) Return a new :c:type:`PyComplexObject` object from *real* and *imag*. + Return ``NULL`` with an exception set on error. .. c:function:: double PyComplex_RealAsDouble(PyObject *op) @@ -121,7 +127,9 @@ Complex Numbers as Python Objects :meth:`~object.__complex__` method, this method will first be called to convert *op* to a Python complex number object. If :meth:`!__complex__` is not defined then it falls back to call :c:func:`PyFloat_AsDouble` and - returns its result. Upon failure, this method returns ``-1.0``, so one + returns its result. + + Upon failure, this method returns ``-1.0`` with an exception set, so one should call :c:func:`PyErr_Occurred` to check for errors. .. versionchanged:: 3.13 @@ -135,8 +143,10 @@ Complex Numbers as Python Objects :meth:`~object.__complex__` method, this method will first be called to convert *op* to a Python complex number object. If :meth:`!__complex__` is not defined then it falls back to call :c:func:`PyFloat_AsDouble` and - returns ``0.0`` on success. Upon failure, this method returns ``-1.0``, so - one should call :c:func:`PyErr_Occurred` to check for errors. + returns ``0.0`` on success. + + Upon failure, this method returns ``-1.0`` with an exception set, so one + should call :c:func:`PyErr_Occurred` to check for errors. .. versionchanged:: 3.13 Use :meth:`~object.__complex__` if available. @@ -149,8 +159,11 @@ Complex Numbers as Python Objects method, this method will first be called to convert *op* to a Python complex number object. If :meth:`!__complex__` is not defined then it falls back to :meth:`~object.__float__`. If :meth:`!__float__` is not defined then it falls back - to :meth:`~object.__index__`. Upon failure, this method returns ``-1.0`` as a real - value. + to :meth:`~object.__index__`. + + Upon failure, this method returns :c:type:`Py_complex` + with :c:member:`~Py_complex.real` set to ``-1.0`` and with an exception set, so one + should call :c:func:`PyErr_Occurred` to check for errors. .. versionchanged:: 3.8 Use :meth:`~object.__index__` if available. diff --git a/Doc/c-api/datetime.rst b/Doc/c-api/datetime.rst index 97522da7734..d2d4d5309c7 100644 --- a/Doc/c-api/datetime.rst +++ b/Doc/c-api/datetime.rst @@ -318,10 +318,10 @@ Macros for the convenience of modules implementing the DB API: .. c:function:: PyObject* PyDateTime_FromTimestamp(PyObject *args) Create and return a new :class:`datetime.datetime` object given an argument - tuple suitable for passing to :meth:`datetime.datetime.fromtimestamp()`. + tuple suitable for passing to :meth:`datetime.datetime.fromtimestamp`. .. c:function:: PyObject* PyDate_FromTimestamp(PyObject *args) Create and return a new :class:`datetime.date` object given an argument - tuple suitable for passing to :meth:`datetime.date.fromtimestamp()`. + tuple suitable for passing to :meth:`datetime.date.fromtimestamp`. diff --git a/Doc/c-api/exceptions.rst b/Doc/c-api/exceptions.rst index 499bfb47cc4..fc2336d120c 100644 --- a/Doc/c-api/exceptions.rst +++ b/Doc/c-api/exceptions.rst @@ -34,7 +34,7 @@ propagated, additional calls into the Python/C API may not behave as intended and may fail in mysterious ways. .. note:: - The error indicator is **not** the result of :func:`sys.exc_info()`. + The error indicator is **not** the result of :func:`sys.exc_info`. The former corresponds to an exception that is not yet caught (and is therefore still propagating), while the latter returns an exception after it is caught (and has therefore stopped propagating). @@ -733,7 +733,7 @@ Exception Classes This creates a class object derived from :exc:`Exception` (accessible in C as :c:data:`PyExc_Exception`). - The :attr:`!__module__` attribute of the new class is set to the first part (up + The :attr:`~type.__module__` attribute of the new class is set to the first part (up to the last dot) of the *name* argument, and the class name is set to the last part (after the last dot). The *base* argument can be used to specify alternate base classes; it can either be only one class or a tuple of classes. The *dict* @@ -1004,6 +1004,7 @@ the variables: single: PyExc_OverflowError (C var) single: PyExc_PermissionError (C var) single: PyExc_ProcessLookupError (C var) + single: PyExc_PythonFinalizationError (C var) single: PyExc_RecursionError (C var) single: PyExc_ReferenceError (C var) single: PyExc_RuntimeError (C var) @@ -1096,6 +1097,8 @@ the variables: +-----------------------------------------+---------------------------------+----------+ | :c:data:`PyExc_ProcessLookupError` | :exc:`ProcessLookupError` | | +-----------------------------------------+---------------------------------+----------+ +| :c:data:`PyExc_PythonFinalizationError` | :exc:`PythonFinalizationError` | | ++-----------------------------------------+---------------------------------+----------+ | :c:data:`PyExc_RecursionError` | :exc:`RecursionError` | | +-----------------------------------------+---------------------------------+----------+ | :c:data:`PyExc_ReferenceError` | :exc:`ReferenceError` | | diff --git a/Doc/c-api/float.rst b/Doc/c-api/float.rst index 4f6ac0d8175..1da37a5bcae 100644 --- a/Doc/c-api/float.rst +++ b/Doc/c-api/float.rst @@ -2,20 +2,20 @@ .. _floatobjects: -Floating Point Objects +Floating-Point Objects ====================== -.. index:: pair: object; floating point +.. index:: pair: object; floating-point .. c:type:: PyFloatObject - This subtype of :c:type:`PyObject` represents a Python floating point object. + This subtype of :c:type:`PyObject` represents a Python floating-point object. .. c:var:: PyTypeObject PyFloat_Type - This instance of :c:type:`PyTypeObject` represents the Python floating point + This instance of :c:type:`PyTypeObject` represents the Python floating-point type. This is the same object as :class:`float` in the Python layer. @@ -45,7 +45,7 @@ Floating Point Objects .. c:function:: double PyFloat_AsDouble(PyObject *pyfloat) Return a C :c:expr:`double` representation of the contents of *pyfloat*. If - *pyfloat* is not a Python floating point object but has a :meth:`~object.__float__` + *pyfloat* is not a Python floating-point object but has a :meth:`~object.__float__` method, this method will first be called to convert *pyfloat* into a float. If :meth:`!__float__` is not defined then it falls back to :meth:`~object.__index__`. This method returns ``-1.0`` upon failure, so one should call diff --git a/Doc/c-api/import.rst b/Doc/c-api/import.rst index 1054b38cb92..8108a5015be 100644 --- a/Doc/c-api/import.rst +++ b/Doc/c-api/import.rst @@ -190,7 +190,7 @@ Importing Modules .. versionadded:: 3.2 .. versionchanged:: 3.3 - Uses :func:`!imp.source_from_cache()` in calculating the source path if + Uses :func:`!imp.source_from_cache` in calculating the source path if only the bytecode path is provided. .. versionchanged:: 3.12 No longer uses the removed :mod:`!imp` module. diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst index 1fab3f577f2..fd97d1d6ec3 100644 --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -1218,7 +1218,7 @@ All of the following functions must be called after :c:func:`Py_Initialize`. .. c:function:: void PyThreadState_DeleteCurrent(void) Destroy the current thread state and release the global interpreter lock. - Like :c:func:`PyThreadState_Delete`, the global interpreter lock need not + Like :c:func:`PyThreadState_Delete`, the global interpreter lock must be held. The thread state must have been reset with a previous call to :c:func:`PyThreadState_Clear`. diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst index 5195f6cccfe..918c8669e8f 100644 --- a/Doc/c-api/init_config.rst +++ b/Doc/c-api/init_config.rst @@ -321,7 +321,7 @@ PyPreConfig * Set :c:member:`PyConfig.filesystem_encoding` to ``"mbcs"``, * Set :c:member:`PyConfig.filesystem_errors` to ``"replace"``. - Initialized the from :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment + Initialized from the :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment variable value. Only available on Windows. ``#ifdef MS_WINDOWS`` macro can be used for @@ -509,7 +509,7 @@ PyConfig The :c:func:`PyConfig_Read` function only parses :c:member:`PyConfig.argv` arguments once: :c:member:`PyConfig.parse_argv` is set to ``2`` after arguments are parsed. Since Python arguments are - strippped from :c:member:`PyConfig.argv`, parsing arguments twice would + stripped from :c:member:`PyConfig.argv`, parsing arguments twice would parse the application options as Python options. :ref:`Preinitialize Python ` if needed. @@ -1041,7 +1041,7 @@ PyConfig The :c:func:`PyConfig_Read` function only parses :c:member:`PyConfig.argv` arguments once: :c:member:`PyConfig.parse_argv` is set to ``2`` after arguments are parsed. Since Python arguments are - strippped from :c:member:`PyConfig.argv`, parsing arguments twice would + stripped from :c:member:`PyConfig.argv`, parsing arguments twice would parse the application options as Python options. Default: ``1`` in Python mode, ``0`` in isolated mode. diff --git a/Doc/c-api/list.rst b/Doc/c-api/list.rst index 53eb54d3e10..758415a76e5 100644 --- a/Doc/c-api/list.rst +++ b/Doc/c-api/list.rst @@ -38,9 +38,12 @@ List Objects .. note:: If *len* is greater than zero, the returned list object's items are - set to ``NULL``. Thus you cannot use abstract API functions such as - :c:func:`PySequence_SetItem` or expose the object to Python code before - setting all items to a real object with :c:func:`PyList_SetItem`. + set to ``NULL``. Thus you cannot use abstract API functions such as + :c:func:`PySequence_SetItem` or expose the object to Python code before + setting all items to a real object with :c:func:`PyList_SetItem` or + :c:func:`PyList_SET_ITEM()`. The following APIs are safe APIs before + the list is fully initialized: :c:func:`PyList_SetItem()` and :c:func:`PyList_SET_ITEM()`. + .. c:function:: Py_ssize_t PyList_Size(PyObject *list) diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst index 522c028cfb8..c9e277680ab 100644 --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -139,7 +139,6 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. versionadded:: 3.13 -.. XXX alias PyLong_AS_LONG (for now) .. c:function:: long PyLong_AsLong(PyObject *obj) .. index:: @@ -161,6 +160,16 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. versionchanged:: 3.10 This function will no longer use :meth:`~object.__int__`. + .. c:namespace:: NULL + + .. c:function:: long PyLong_AS_LONG(PyObject *obj) + + A :term:`soft deprecated` alias. + Exactly equivalent to the preferred ``PyLong_AsLong``. In particular, + it can fail with :exc:`OverflowError` or another exception. + + .. deprecated:: 3.14 + The function is soft deprecated. .. c:function:: int PyLong_AsInt(PyObject *obj) @@ -405,14 +414,13 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. Passing zero to *n_bytes* will return the size of a buffer that would be large enough to hold the value. This may be larger than technically - necessary, but not unreasonably so. + necessary, but not unreasonably so. If *n_bytes=0*, *buffer* may be + ``NULL``. .. note:: Passing *n_bytes=0* to this function is not an accurate way to determine - the bit length of a value. - - If *n_bytes=0*, *buffer* may be ``NULL``. + the bit length of the value. To get at the entire Python value of an unknown size, the function can be called twice: first to determine the buffer size, then to fill it:: @@ -453,6 +461,8 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. Currently, ``-1`` corresponds to ``Py_ASNATIVEBYTES_NATIVE_ENDIAN | Py_ASNATIVEBYTES_UNSIGNED_BUFFER``. + .. c:namespace:: NULL + ============================================= ====== Flag Value ============================================= ====== @@ -462,6 +472,7 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. c:macro:: Py_ASNATIVEBYTES_NATIVE_ENDIAN ``3`` .. c:macro:: Py_ASNATIVEBYTES_UNSIGNED_BUFFER ``4`` .. c:macro:: Py_ASNATIVEBYTES_REJECT_NEGATIVE ``8`` + .. c:macro:: Py_ASNATIVEBYTES_ALLOW_INDEX ``16`` ============================================= ====== Specifying ``Py_ASNATIVEBYTES_NATIVE_ENDIAN`` will override any other endian @@ -483,6 +494,13 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. provided there is enough space for at least one sign bit, regardless of whether ``Py_ASNATIVEBYTES_UNSIGNED_BUFFER`` was specified. + If ``Py_ASNATIVEBYTES_ALLOW_INDEX`` is specified and a non-integer value is + passed, its :meth:`~object.__index__` method will be called first. This may + result in Python code executing and other threads being allowed to run, which + could cause changes to other objects or values in use. When *flags* is + ``-1``, this option is not set, and non-integer values will raise + :exc:`TypeError`. + .. note:: With the default *flags* (``-1``, or *UNSIGNED_BUFFER* without @@ -494,6 +512,17 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. versionadded:: 3.13 +.. c:function:: PyObject* PyLong_GetInfo(void) + + On success, return a read only :term:`named tuple`, that holds + information about Python's internal representation of integers. + See :data:`sys.int_info` for description of individual fields. + + On failure, return ``NULL`` with an exception set. + + .. versionadded:: 3.1 + + .. c:function:: int PyUnstable_Long_IsCompact(const PyLongObject* op) Return 1 if *op* is compact, 0 otherwise. diff --git a/Doc/c-api/marshal.rst b/Doc/c-api/marshal.rst index 489f1580a41..b9085ad3ec3 100644 --- a/Doc/c-api/marshal.rst +++ b/Doc/c-api/marshal.rst @@ -15,7 +15,7 @@ Numeric values are stored with the least significant byte first. The module supports two versions of the data format: version 0 is the historical version, version 1 shares interned strings in the file, and upon -unmarshalling. Version 2 uses a binary format for floating point numbers. +unmarshalling. Version 2 uses a binary format for floating-point numbers. ``Py_MARSHAL_VERSION`` indicates the current file format (currently 2). diff --git a/Doc/c-api/memory.rst b/Doc/c-api/memory.rst index 9da09a21607..f7618a025ba 100644 --- a/Doc/c-api/memory.rst +++ b/Doc/c-api/memory.rst @@ -102,30 +102,38 @@ All allocating functions belong to one of three different "domains" (see also strategies and are optimized for different purposes. The specific details on how every domain allocates memory or what internal functions each domain calls is considered an implementation detail, but for debugging purposes a simplified -table can be found at :ref:`here `. There is no hard -requirement to use the memory returned by the allocation functions belonging to -a given domain for only the purposes hinted by that domain (although this is the -recommended practice). For example, one could use the memory returned by -:c:func:`PyMem_RawMalloc` for allocating Python objects or the memory returned -by :c:func:`PyObject_Malloc` for allocating memory for buffers. +table can be found at :ref:`here `. +The APIs used to allocate and free a block of memory must be from the same domain. +For example, :c:func:`PyMem_Free` must be used to free memory allocated using :c:func:`PyMem_Malloc`. The three allocation domains are: * Raw domain: intended for allocating memory for general-purpose memory buffers where the allocation *must* go to the system allocator or where the allocator can operate without the :term:`GIL`. The memory is requested directly - to the system. + from the system. See :ref:`Raw Memory Interface `. * "Mem" domain: intended for allocating memory for Python buffers and general-purpose memory buffers where the allocation must be performed with the :term:`GIL` held. The memory is taken from the Python private heap. + See :ref:`Memory Interface `. -* Object domain: intended for allocating memory belonging to Python objects. The - memory is taken from the Python private heap. +* Object domain: intended for allocating memory for Python objects. The + memory is taken from the Python private heap. See :ref:`Object allocators `. -When freeing memory previously allocated by the allocating functions belonging to a -given domain,the matching specific deallocating functions must be used. For example, -:c:func:`PyMem_Free` must be used to free memory allocated using :c:func:`PyMem_Malloc`. +.. note:: + + The :term:`free-threaded ` build requires that only Python objects are allocated using the "object" domain + and that all Python objects are allocated using that domain. This differs from the prior Python versions, + where this was only a best practice and not a hard requirement. + + For example, buffers (non-Python objects) should be allocated using :c:func:`PyMem_Malloc`, + :c:func:`PyMem_RawMalloc`, or :c:func:`malloc`, but not :c:func:`PyObject_Malloc`. + + See :ref:`Memory Allocation APIs `. + + +.. _raw-memoryinterface: Raw Memory Interface ==================== @@ -299,6 +307,8 @@ versions and is therefore deprecated in extension modules. * ``PyMem_DEL(ptr)`` +.. _objectinterface: + Object allocators ================= diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst index 63e3bed6727..ec61be284ca 100644 --- a/Doc/c-api/module.rst +++ b/Doc/c-api/module.rst @@ -43,6 +43,8 @@ Module Objects to ``None``); the caller is responsible for providing a :attr:`__file__` attribute. + Return ``NULL`` with an exception set on error. + .. versionadded:: 3.3 .. versionchanged:: 3.4 @@ -265,6 +267,8 @@ of the following two module creation functions: API version *module_api_version*. If that version does not match the version of the running interpreter, a :exc:`RuntimeWarning` is emitted. + Return ``NULL`` with an exception set on error. + .. note:: Most uses of this function should be using :c:func:`PyModule_Create` @@ -338,7 +342,8 @@ The available slot types are: The *value* pointer of this slot must point to a function of the signature: .. c:function:: PyObject* create_module(PyObject *spec, PyModuleDef *def) - :noindex: + :no-index-entry: + :no-contents-entry: The function receives a :py:class:`~importlib.machinery.ModuleSpec` instance, as defined in :PEP:`451`, and the module definition. @@ -373,7 +378,8 @@ The available slot types are: The signature of the function is: .. c:function:: int exec_module(PyObject* module) - :noindex: + :no-index-entry: + :no-contents-entry: If multiple ``Py_mod_exec`` slots are specified, they are processed in the order they appear in the *m_slots* array. @@ -415,6 +421,8 @@ The available slot types are: Specifies one of the following values: + .. c:namespace:: NULL + .. c:macro:: Py_MOD_GIL_USED The module depends on the presence of the global interpreter lock (GIL), @@ -427,7 +435,7 @@ The available slot types are: This slot is ignored by Python builds not configured with :option:`--disable-gil`. Otherwise, it determines whether or not importing this module will cause the GIL to be automatically enabled. See - :ref:`free-threaded-cpython` for more detail. + :ref:`whatsnew313-free-threaded-cpython` for more detail. Multiple ``Py_mod_gil`` slots may not be specified in one module definition. @@ -461,6 +469,8 @@ objects dynamically. Note that both ``PyModule_FromDefAndSpec`` and If that version does not match the version of the running interpreter, a :exc:`RuntimeWarning` is emitted. + Return ``NULL`` with an exception set on error. + .. note:: Most uses of this function should be using :c:func:`PyModule_FromDefAndSpec` @@ -511,7 +521,7 @@ state: On success, return ``0``. On error, raise an exception and return ``-1``. - Return ``NULL`` if *value* is ``NULL``. It must be called with an exception + Return ``-1`` if *value* is ``NULL``. It must be called with an exception raised in this case. Example usage:: @@ -543,6 +553,14 @@ state: Note that ``Py_XDECREF()`` should be used instead of ``Py_DECREF()`` in this case, since *obj* can be ``NULL``. + The number of different *name* strings passed to this function + should be kept small, usually by only using statically allocated strings + as *name*. + For names that aren't known at compile time, prefer calling + :c:func:`PyUnicode_FromString` and :c:func:`PyObject_SetAttr` directly. + For more details, see :c:func:`PyUnicode_InternFromString`, which may be + used internally to create a key object. + .. versionadded:: 3.10 @@ -601,15 +619,23 @@ state: .. c:function:: int PyModule_AddIntConstant(PyObject *module, const char *name, long value) Add an integer constant to *module* as *name*. This convenience function can be - used from the module's initialization function. Return ``-1`` on error, ``0`` on - success. + used from the module's initialization function. + Return ``-1`` with an exception set on error, ``0`` on success. + + This is a convenience function that calls :c:func:`PyLong_FromLong` and + :c:func:`PyModule_AddObjectRef`; see their documentation for details. .. c:function:: int PyModule_AddStringConstant(PyObject *module, const char *name, const char *value) Add a string constant to *module* as *name*. This convenience function can be used from the module's initialization function. The string *value* must be - ``NULL``-terminated. Return ``-1`` on error, ``0`` on success. + ``NULL``-terminated. + Return ``-1`` with an exception set on error, ``0`` on success. + + This is a convenience function that calls + :c:func:`PyUnicode_InternFromString` and :c:func:`PyModule_AddObjectRef`; + see their documentation for details. .. c:macro:: PyModule_AddIntMacro(module, macro) @@ -617,7 +643,7 @@ state: Add an int constant to *module*. The name and the value are taken from *macro*. For example ``PyModule_AddIntMacro(module, AF_INET)`` adds the int constant *AF_INET* with the value of *AF_INET* to *module*. - Return ``-1`` on error, ``0`` on success. + Return ``-1`` with an exception set on error, ``0`` on success. .. c:macro:: PyModule_AddStringMacro(module, macro) @@ -630,7 +656,7 @@ state: The type object is finalized by calling internally :c:func:`PyType_Ready`. The name of the type object is taken from the last component of :c:member:`~PyTypeObject.tp_name` after dot. - Return ``-1`` on error, ``0`` on success. + Return ``-1`` with an exception set on error, ``0`` on success. .. versionadded:: 3.9 @@ -643,7 +669,7 @@ state: import machinery assumes the module does not support running without the GIL. This function is only available in Python builds configured with :option:`--disable-gil`. - Return ``-1`` on error, ``0`` on success. + Return ``-1`` with an exception set on error, ``0`` on success. .. versionadded:: 3.13 @@ -682,14 +708,14 @@ since multiple such modules can be created from a single definition. The caller must hold the GIL. - Return 0 on success or -1 on failure. + Return ``-1`` with an exception set on error, ``0`` on success. .. versionadded:: 3.3 .. c:function:: int PyState_RemoveModule(PyModuleDef *def) Removes the module object created from *def* from the interpreter state. - Return 0 on success or -1 on failure. + Return ``-1`` with an exception set on error, ``0`` on success. The caller must hold the GIL. diff --git a/Doc/c-api/monitoring.rst b/Doc/c-api/monitoring.rst index b34035b5548..285ddb2889a 100644 --- a/Doc/c-api/monitoring.rst +++ b/Doc/c-api/monitoring.rst @@ -1,6 +1,6 @@ .. highlight:: c -.. _monitoring: +.. _c-api-monitoring: Monitoring C API ================ @@ -133,7 +133,7 @@ Managing the Monitoring State Monitoring states can be managed with the help of monitoring scopes. A scope would typically correspond to a python function. -.. :c:function:: int PyMonitoring_EnterScope(PyMonitoringState *state_array, uint64_t *version, const uint8_t *event_types, Py_ssize_t length) +.. c:function:: int PyMonitoring_EnterScope(PyMonitoringState *state_array, uint64_t *version, const uint8_t *event_types, Py_ssize_t length) Enter a monitored scope. ``event_types`` is an array of the event IDs for events that may be fired from the scope. For example, the ID of a ``PY_START`` @@ -141,24 +141,52 @@ would typically correspond to a python function. to the base-2 logarithm of ``sys.monitoring.events.PY_START``. ``state_array`` is an array with a monitoring state entry for each event in ``event_types``, it is allocated by the user but populated by - ``PyMonitoring_EnterScope`` with information about the activation state of + :c:func:`!PyMonitoring_EnterScope` with information about the activation state of the event. The size of ``event_types`` (and hence also of ``state_array``) is given in ``length``. The ``version`` argument is a pointer to a value which should be allocated by the user together with ``state_array`` and initialized to 0, - and then set only by ``PyMonitoring_EnterScope`` itelf. It allows this + and then set only by :c:func:`!PyMonitoring_EnterScope` itelf. It allows this function to determine whether event states have changed since the previous call, and to return quickly if they have not. The scopes referred to here are lexical scopes: a function, class or method. - ``PyMonitoring_EnterScope`` should be called whenever the lexical scope is + :c:func:`!PyMonitoring_EnterScope` should be called whenever the lexical scope is entered. Scopes can be reentered, reusing the same *state_array* and *version*, in situations like when emulating a recursive Python function. When a code-like's execution is paused, such as when emulating a generator, the scope needs to be exited and re-entered. - -.. :c:function:: int PyMonitoring_ExitScope(void) - - Exit the last scope that was entered with ``PyMonitoring_EnterScope``. + The macros for *event_types* are: + + .. c:namespace:: NULL + + .. The table is here to make the docs searchable, and to allow automatic + links to the identifiers. + + ================================================== ===================================== + Macro Event + ================================================== ===================================== + .. c:macro:: PY_MONITORING_EVENT_BRANCH :monitoring-event:`BRANCH` + .. c:macro:: PY_MONITORING_EVENT_CALL :monitoring-event:`CALL` + .. c:macro:: PY_MONITORING_EVENT_C_RAISE :monitoring-event:`C_RAISE` + .. c:macro:: PY_MONITORING_EVENT_C_RETURN :monitoring-event:`C_RETURN` + .. c:macro:: PY_MONITORING_EVENT_EXCEPTION_HANDLED :monitoring-event:`EXCEPTION_HANDLED` + .. c:macro:: PY_MONITORING_EVENT_INSTRUCTION :monitoring-event:`INSTRUCTION` + .. c:macro:: PY_MONITORING_EVENT_JUMP :monitoring-event:`JUMP` + .. c:macro:: PY_MONITORING_EVENT_LINE :monitoring-event:`LINE` + .. c:macro:: PY_MONITORING_EVENT_PY_RESUME :monitoring-event:`PY_RESUME` + .. c:macro:: PY_MONITORING_EVENT_PY_RETURN :monitoring-event:`PY_RETURN` + .. c:macro:: PY_MONITORING_EVENT_PY_START :monitoring-event:`PY_START` + .. c:macro:: PY_MONITORING_EVENT_PY_THROW :monitoring-event:`PY_THROW` + .. c:macro:: PY_MONITORING_EVENT_PY_UNWIND :monitoring-event:`PY_UNWIND` + .. c:macro:: PY_MONITORING_EVENT_PY_YIELD :monitoring-event:`PY_YIELD` + .. c:macro:: PY_MONITORING_EVENT_RAISE :monitoring-event:`RAISE` + .. c:macro:: PY_MONITORING_EVENT_RERAISE :monitoring-event:`RERAISE` + .. c:macro:: PY_MONITORING_EVENT_STOP_ITERATION :monitoring-event:`STOP_ITERATION` + ================================================== ===================================== + +.. c:function:: int PyMonitoring_ExitScope(void) + + Exit the last scope that was entered with :c:func:`!PyMonitoring_EnterScope`. diff --git a/Doc/c-api/number.rst b/Doc/c-api/number.rst index 13d3c5af956..ad8b5935258 100644 --- a/Doc/c-api/number.rst +++ b/Doc/c-api/number.rst @@ -51,8 +51,8 @@ Number Protocol Return a reasonable approximation for the mathematical value of *o1* divided by *o2*, or ``NULL`` on failure. The return value is "approximate" because binary - floating point numbers are approximate; it is not possible to represent all real - numbers in base two. This function can return a floating point value when + floating-point numbers are approximate; it is not possible to represent all real + numbers in base two. This function can return a floating-point value when passed two integers. This is the equivalent of the Python expression ``o1 / o2``. @@ -177,8 +177,8 @@ Number Protocol Return a reasonable approximation for the mathematical value of *o1* divided by *o2*, or ``NULL`` on failure. The return value is "approximate" because binary - floating point numbers are approximate; it is not possible to represent all real - numbers in base two. This function can return a floating point value when + floating-point numbers are approximate; it is not possible to represent all real + numbers in base two. This function can return a floating-point value when passed two integers. The operation is done *in-place* when *o1* supports it. This is the equivalent of the Python statement ``o1 /= o2``. diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index 8eeac3fc8a1..630114a4339 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -52,6 +52,7 @@ Object Protocol The reference is borrowed from the interpreter, and is valid until the interpreter finalization. + .. versionadded:: 3.13 @@ -205,6 +206,13 @@ Object Protocol If *v* is ``NULL``, the attribute is deleted, but this feature is deprecated in favour of using :c:func:`PyObject_DelAttrString`. + The number of different attribute names passed to this function + should be kept small, usually by using a statically allocated string + as *attr_name*. + For attribute names that aren't known at compile time, prefer calling + :c:func:`PyUnicode_FromString` and :c:func:`PyObject_SetAttr` directly. + For more details, see :c:func:`PyUnicode_InternFromString`, which may be + used internally to create a key object. .. c:function:: int PyObject_GenericSetAttr(PyObject *o, PyObject *name, PyObject *value) @@ -230,6 +238,14 @@ Object Protocol specified as a :c:expr:`const char*` UTF-8 encoded bytes string, rather than a :c:expr:`PyObject*`. + The number of different attribute names passed to this function + should be kept small, usually by using a statically allocated string + as *attr_name*. + For attribute names that aren't known at compile time, prefer calling + :c:func:`PyUnicode_FromString` and :c:func:`PyObject_DelAttr` directly. + For more details, see :c:func:`PyUnicode_InternFromString`, which may be + used internally to create a key object for lookup. + .. c:function:: PyObject* PyObject_GenericGetDict(PyObject *o, void *context) @@ -351,14 +367,14 @@ Object Protocol The result will be ``1`` when at least one of the checks returns ``1``, otherwise it will be ``0``. - If *cls* has a :meth:`~class.__subclasscheck__` method, it will be called to + If *cls* has a :meth:`~type.__subclasscheck__` method, it will be called to determine the subclass status as described in :pep:`3119`. Otherwise, *derived* is a subclass of *cls* if it is a direct or indirect subclass, - i.e. contained in ``cls.__mro__``. + i.e. contained in :attr:`cls.__mro__ `. Normally only class objects, i.e. instances of :class:`type` or a derived class, are considered classes. However, objects can override this by having - a :attr:`~class.__bases__` attribute (which must be a tuple of base classes). + a :attr:`~type.__bases__` attribute (which must be a tuple of base classes). .. c:function:: int PyObject_IsInstance(PyObject *inst, PyObject *cls) @@ -370,15 +386,15 @@ Object Protocol The result will be ``1`` when at least one of the checks returns ``1``, otherwise it will be ``0``. - If *cls* has a :meth:`~class.__instancecheck__` method, it will be called to + If *cls* has a :meth:`~type.__instancecheck__` method, it will be called to determine the subclass status as described in :pep:`3119`. Otherwise, *inst* is an instance of *cls* if its class is a subclass of *cls*. An instance *inst* can override what is considered its class by having a - :attr:`~instance.__class__` attribute. + :attr:`~object.__class__` attribute. An object *cls* can override if it is considered a class, and what its base - classes are, by having a :attr:`~class.__bases__` attribute (which must be a tuple + classes are, by having a :attr:`~type.__bases__` attribute (which must be a tuple of base classes). diff --git a/Doc/c-api/refcounting.rst b/Doc/c-api/refcounting.rst index bf50107347e..d75dad737bc 100644 --- a/Doc/c-api/refcounting.rst +++ b/Doc/c-api/refcounting.rst @@ -62,7 +62,7 @@ of Python objects. ``NULL``, use :c:func:`Py_XINCREF`. Do not expect this function to actually modify *o* in any way. - For at least `some objects `_, + For at least :pep:`some objects <0683>`, this function has no effect. .. versionchanged:: 3.12 @@ -130,7 +130,7 @@ of Python objects. use :c:func:`Py_XDECREF`. Do not expect this function to actually modify *o* in any way. - For at least `some objects `_, + For at least :pep:`some objects <683>`, this function has no effect. .. warning:: diff --git a/Doc/c-api/slice.rst b/Doc/c-api/slice.rst index 27a1757c745..8adf6a96137 100644 --- a/Doc/c-api/slice.rst +++ b/Doc/c-api/slice.rst @@ -23,7 +23,9 @@ Slice Objects Return a new slice object with the given values. The *start*, *stop*, and *step* parameters are used as the values of the slice object attributes of the same names. Any of the values may be ``NULL``, in which case the - ``None`` will be used for the corresponding attribute. Return ``NULL`` if + ``None`` will be used for the corresponding attribute. + + Return ``NULL`` with an exception set if the new object could not be allocated. @@ -52,7 +54,7 @@ Slice Objects of bounds indices are clipped in a manner consistent with the handling of normal slices. - Returns ``0`` on success and ``-1`` on error with exception set. + Return ``0`` on success and ``-1`` on error with an exception set. .. note:: This function is considered not safe for resizable sequences. @@ -95,7 +97,7 @@ Slice Objects ``PY_SSIZE_T_MIN`` to ``PY_SSIZE_T_MIN``, and silently boost the step values less than ``-PY_SSIZE_T_MAX`` to ``-PY_SSIZE_T_MAX``. - Return ``-1`` on error, ``0`` on success. + Return ``-1`` with an exception set on error, ``0`` on success. .. versionadded:: 3.6.1 diff --git a/Doc/c-api/time.rst b/Doc/c-api/time.rst index 5cfdef71b3e..7032cc48aa6 100644 --- a/Doc/c-api/time.rst +++ b/Doc/c-api/time.rst @@ -1,5 +1,7 @@ .. highlight:: c +.. _c-api-time: + PyTime C API ============ diff --git a/Doc/c-api/tuple.rst b/Doc/c-api/tuple.rst index 0d68a360f34..a2c3a75daa6 100644 --- a/Doc/c-api/tuple.rst +++ b/Doc/c-api/tuple.rst @@ -33,12 +33,14 @@ Tuple Objects .. c:function:: PyObject* PyTuple_New(Py_ssize_t len) - Return a new tuple object of size *len*, or ``NULL`` on failure. + Return a new tuple object of size *len*, + or ``NULL`` with an exception set on failure. .. c:function:: PyObject* PyTuple_Pack(Py_ssize_t n, ...) - Return a new tuple object of size *n*, or ``NULL`` on failure. The tuple values + Return a new tuple object of size *n*, + or ``NULL`` with an exception set on failure. The tuple values are initialized to the subsequent *n* C arguments pointing to Python objects. ``PyTuple_Pack(2, a, b)`` is equivalent to ``Py_BuildValue("(OO)", a, b)``. @@ -46,12 +48,12 @@ Tuple Objects .. c:function:: Py_ssize_t PyTuple_Size(PyObject *p) Take a pointer to a tuple object, and return the size of that tuple. + On error, return ``-1`` and with an exception set. .. c:function:: Py_ssize_t PyTuple_GET_SIZE(PyObject *p) - Return the size of the tuple *p*, which must be non-``NULL`` and point to a tuple; - no error checking is performed. + Like :c:func:`PyTuple_Size`, but without error checking. .. c:function:: PyObject* PyTuple_GetItem(PyObject *p, Py_ssize_t pos) @@ -74,8 +76,10 @@ Tuple Objects .. c:function:: PyObject* PyTuple_GetSlice(PyObject *p, Py_ssize_t low, Py_ssize_t high) Return the slice of the tuple pointed to by *p* between *low* and *high*, - or ``NULL`` on failure. This is the equivalent of the Python expression - ``p[low:high]``. Indexing from the end of the tuple is not supported. + or ``NULL`` with an exception set on failure. + + This is the equivalent of the Python expression ``p[low:high]``. + Indexing from the end of the tuple is not supported. .. c:function:: int PyTuple_SetItem(PyObject *p, Py_ssize_t pos, PyObject *o) @@ -135,6 +139,8 @@ type. Create a new struct sequence type from the data in *desc*, described below. Instances of the resulting type can be created with :c:func:`PyStructSequence_New`. + Return ``NULL`` with an exception set on failure. + .. c:function:: void PyStructSequence_InitType(PyTypeObject *type, PyStructSequence_Desc *desc) @@ -143,8 +149,8 @@ type. .. c:function:: int PyStructSequence_InitType2(PyTypeObject *type, PyStructSequence_Desc *desc) - The same as ``PyStructSequence_InitType``, but returns ``0`` on success and ``-1`` on - failure. + Like :c:func:`PyStructSequence_InitType`, but returns ``0`` on success + and ``-1`` with an exception set on failure. .. versionadded:: 3.4 @@ -201,6 +207,8 @@ type. Creates an instance of *type*, which must have been created with :c:func:`PyStructSequence_NewType`. + Return ``NULL`` with an exception set on failure. + .. c:function:: PyObject* PyStructSequence_GetItem(PyObject *p, Py_ssize_t pos) diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst index 0cae5c09505..2a18733138d 100644 --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -53,7 +53,8 @@ Type Objects .. c:function:: PyObject* PyType_GetDict(PyTypeObject* type) Return the type object's internal namespace, which is otherwise only - exposed via a read-only proxy (``cls.__dict__``). This is a + exposed via a read-only proxy (:attr:`cls.__dict__ `). + This is a replacement for accessing :c:member:`~PyTypeObject.tp_dict` directly. The returned dictionary must be treated as read-only. @@ -140,7 +141,7 @@ Type Objects Return true if *a* is a subtype of *b*. This function only checks for actual subtypes, which means that - :meth:`~class.__subclasscheck__` is not called on *b*. Call + :meth:`~type.__subclasscheck__` is not called on *b*. Call :c:func:`PyObject_IsSubclass` to do the same check that :func:`issubclass` would do. @@ -174,29 +175,30 @@ Type Objects .. c:function:: PyObject* PyType_GetName(PyTypeObject *type) - Return the type's name. Equivalent to getting the type's ``__name__`` attribute. + Return the type's name. Equivalent to getting the type's + :attr:`~type.__name__` attribute. .. versionadded:: 3.11 .. c:function:: PyObject* PyType_GetQualName(PyTypeObject *type) Return the type's qualified name. Equivalent to getting the - type's ``__qualname__`` attribute. + type's :attr:`~type.__qualname__` attribute. .. versionadded:: 3.11 .. c:function:: PyObject* PyType_GetFullyQualifiedName(PyTypeObject *type) Return the type's fully qualified name. Equivalent to - ``f"{type.__module__}.{type.__qualname__}"``, or ``type.__qualname__`` if - ``type.__module__`` is not a string or is equal to ``"builtins"``. + ``f"{type.__module__}.{type.__qualname__}"``, or :attr:`type.__qualname__` + if :attr:`type.__module__` is not a string or is equal to ``"builtins"``. .. versionadded:: 3.13 .. c:function:: PyObject* PyType_GetModuleName(PyTypeObject *type) - Return the type's module name. Equivalent to getting the ``type.__module__`` - attribute. + Return the type's module name. Equivalent to getting the + :attr:`type.__module__` attribute. .. versionadded:: 3.13 diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index c9ef076c78c..be3effb0efe 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -567,12 +567,12 @@ and :c:data:`PyType_Type` effectively act as defaults.) For :ref:`statically allocated type objects `, the *tp_name* field should contain a dot. - Everything before the last dot is made accessible as the :attr:`__module__` + Everything before the last dot is made accessible as the :attr:`~type.__module__` attribute, and everything after the last dot is made accessible as the - :attr:`~definition.__name__` attribute. + :attr:`~type.__name__` attribute. If no dot is present, the entire :c:member:`~PyTypeObject.tp_name` field is made accessible as the - :attr:`~definition.__name__` attribute, and the :attr:`__module__` attribute is undefined + :attr:`~type.__name__` attribute, and the :attr:`~type.__module__` attribute is undefined (unless explicitly set in the dictionary, as explained above). This means your type will be impossible to pickle. Additionally, it will not be listed in module documentations created with pydoc. @@ -1131,7 +1131,7 @@ and :c:data:`PyType_Type` effectively act as defaults.) .. c:macro:: Py_TPFLAGS_MANAGED_DICT - This bit indicates that instances of the class have a ``__dict__`` + This bit indicates that instances of the class have a `~object.__dict__` attribute, and that the space for the dictionary is managed by the VM. If this flag is set, :c:macro:`Py_TPFLAGS_HAVE_GC` should also be set. @@ -1335,8 +1335,8 @@ and :c:data:`PyType_Type` effectively act as defaults.) .. c:member:: const char* PyTypeObject.tp_doc An optional pointer to a NUL-terminated C string giving the docstring for this - type object. This is exposed as the :attr:`__doc__` attribute on the type and - instances of the type. + type object. This is exposed as the :attr:`~type.__doc__` attribute on the + type and instances of the type. **Inheritance:** @@ -1592,7 +1592,7 @@ and :c:data:`PyType_Type` effectively act as defaults.) weak references to the type object itself. It is an error to set both the :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` bit and - :c:member:`~PyTypeObject.tp_weaklist`. + :c:member:`~PyTypeObject.tp_weaklistoffset`. **Inheritance:** @@ -1604,7 +1604,7 @@ and :c:data:`PyType_Type` effectively act as defaults.) **Default:** If the :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` bit is set in the - :c:member:`~PyTypeObject.tp_dict` field, then + :c:member:`~PyTypeObject.tp_flags` field, then :c:member:`~PyTypeObject.tp_weaklistoffset` will be set to a negative value, to indicate that it is unsafe to use this field. @@ -2036,7 +2036,7 @@ and :c:data:`PyType_Type` effectively act as defaults.) A collection of subclasses. Internal use only. May be an invalid pointer. To get a list of subclasses, call the Python method - :py:meth:`~class.__subclasses__`. + :py:meth:`~type.__subclasses__`. .. versionchanged:: 3.12 diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst index 7320d035bab..d0a1b9ca126 100644 --- a/Doc/c-api/unicode.rst +++ b/Doc/c-api/unicode.rst @@ -317,7 +317,7 @@ These APIs can be used to work with surrogates: .. c:function:: Py_UCS4 Py_UNICODE_JOIN_SURROGATES(Py_UCS4 high, Py_UCS4 low) - Join two surrogate characters and return a single :c:type:`Py_UCS4` value. + Join two surrogate code points and return a single :c:type:`Py_UCS4` value. *high* and *low* are respectively the leading and trailing surrogates in a surrogate pair. *high* must be in the range [0xD800; 0xDBFF] and *low* must be in the range [0xDC00; 0xDFFF]. @@ -338,6 +338,8 @@ APIs: This is the recommended way to allocate a new Unicode object. Objects created using this function are not resizable. + On error, set an exception and return ``NULL``. + .. versionadded:: 3.3 @@ -614,6 +616,8 @@ APIs: Return the length of the Unicode object, in code points. + On error, set an exception and return ``-1``. + .. versionadded:: 3.3 @@ -657,6 +661,8 @@ APIs: not out of bounds, and that the object can be modified safely (i.e. that it its reference count is one). + Return ``0`` on success, ``-1`` on error with an exception set. + .. versionadded:: 3.3 @@ -666,6 +672,8 @@ APIs: Unicode object and the index is not out of bounds, in contrast to :c:func:`PyUnicode_READ_CHAR`, which performs no error checking. + Return character on success, ``-1`` on error with an exception set. + .. versionadded:: 3.3 @@ -674,6 +682,7 @@ APIs: Return a substring of *unicode*, from character index *start* (included) to character index *end* (excluded). Negative indices are not supported. + On error, set an exception and return ``NULL``. .. versionadded:: 3.3 @@ -990,6 +999,9 @@ These are the UTF-8 codec APIs: object. Error handling is "strict". Return ``NULL`` if an exception was raised by the codec. + The function fails if the string contains surrogate code points + (``U+D800`` - ``U+DFFF``). + .. c:function:: const char* PyUnicode_AsUTF8AndSize(PyObject *unicode, Py_ssize_t *size) @@ -1002,6 +1014,9 @@ These are the UTF-8 codec APIs: On error, set an exception, set *size* to ``-1`` (if it's not NULL) and return ``NULL``. + The function fails if the string contains surrogate code points + (``U+D800`` - ``U+DFFF``). + This caches the UTF-8 representation of the string in the Unicode object, and subsequent calls will return a pointer to the same buffer. The caller is not responsible for deallocating the buffer. The buffer is deallocated and @@ -1429,8 +1444,9 @@ They all return ``NULL`` or ``-1`` if an exception occurs. Compare a Unicode object with a char buffer which is interpreted as being UTF-8 or ASCII encoded and return true (``1``) if they are equal, or false (``0``) otherwise. - If the Unicode object contains surrogate characters or - the C string is not valid UTF-8, false (``0``) is returned. + If the Unicode object contains surrogate code points + (``U+D800`` - ``U+DFFF``) or the C string is not valid UTF-8, + false (``0``) is returned. This function does not raise exceptions. @@ -1490,15 +1506,41 @@ They all return ``NULL`` or ``-1`` if an exception occurs. existing interned string that is the same as :c:expr:`*p_unicode`, it sets :c:expr:`*p_unicode` to it (releasing the reference to the old string object and creating a new :term:`strong reference` to the interned string object), otherwise it leaves - :c:expr:`*p_unicode` alone and interns it (creating a new :term:`strong reference`). + :c:expr:`*p_unicode` alone and interns it. + (Clarification: even though there is a lot of talk about references, think - of this function as reference-neutral; you own the object after the call - if and only if you owned it before the call.) + of this function as reference-neutral. You must own the object you pass in; + after the call you no longer own the passed-in reference, but you newly own + the result.) + + This function never raises an exception. + On error, it leaves its argument unchanged without interning it. + + Instances of subclasses of :py:class:`str` may not be interned, that is, + :c:expr:`PyUnicode_CheckExact(*p_unicode)` must be true. If it is not, + then -- as with any other error -- the argument is left unchanged. + + Note that interned strings are not “immortal”. + You must keep a reference to the result to benefit from interning. .. c:function:: PyObject* PyUnicode_InternFromString(const char *str) A combination of :c:func:`PyUnicode_FromString` and - :c:func:`PyUnicode_InternInPlace`, returning either a new Unicode string - object that has been interned, or a new ("owned") reference to an earlier - interned string object with the same value. + :c:func:`PyUnicode_InternInPlace`, meant for statically allocated strings. + + Return a new ("owned") reference to either a new Unicode string object + that has been interned, or an earlier interned string object with the + same value. + + Python may keep a reference to the result, or make it :term:`immortal`, + preventing it from being garbage-collected promptly. + For interning an unbounded number of different strings, such as ones coming + from user input, prefer calling :c:func:`PyUnicode_FromString` and + :c:func:`PyUnicode_InternInPlace` directly. + + .. impl-detail:: + + Strings interned this way are made :term:`immortal`. + + diff --git a/Doc/conf.py b/Doc/conf.py index 8a14646801e..5f22340ac43 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -6,9 +6,11 @@ # The contents of this file are pickled, so don't put values in the namespace # that aren't pickleable (module imports are okay, they're removed automatically). +import importlib import os import sys import time + sys.path.append(os.path.abspath('tools/extensions')) sys.path.append(os.path.abspath('includes')) @@ -18,11 +20,10 @@ # --------------------- extensions = [ - 'asdl_highlight', + 'audit_events', 'c_annotations', - 'escape4chm', 'glossary_search', - 'peg_highlight', + 'lexers', 'pyspecific', 'sphinx.ext.coverage', 'sphinx.ext.doctest', @@ -31,13 +32,13 @@ # Skip if downstream redistributors haven't installed them try: - import notfound.extension + import notfound.extension # noqa: F401 except ImportError: pass else: extensions.append('notfound.extension') try: - import sphinxext.opengraph + import sphinxext.opengraph # noqa: F401 except ImportError: pass else: @@ -64,8 +65,8 @@ # We look for the Include/patchlevel.h file in the current Python source tree # and replace the values accordingly. -import patchlevel -version, release = patchlevel.get_version_info() +# See Doc/tools/extensions/patchlevel.py +version, release = importlib.import_module('patchlevel').get_version_info() rst_epilog = f""" .. |python_version_literal| replace:: ``Python {version}`` @@ -83,7 +84,7 @@ highlight_language = 'python3' # Minimum version of sphinx required -needs_sphinx = '4.2' +needs_sphinx = '6.2.1' # Create table of contents entries for domain objects (e.g. functions, classes, # attributes, etc.). Default is True. @@ -131,6 +132,8 @@ ('c:func', 'vsnprintf'), # Standard C types ('c:type', 'FILE'), + ('c:type', 'int8_t'), + ('c:type', 'int16_t'), ('c:type', 'int32_t'), ('c:type', 'int64_t'), ('c:type', 'intmax_t'), @@ -140,6 +143,9 @@ ('c:type', 'size_t'), ('c:type', 'ssize_t'), ('c:type', 'time_t'), + ('c:type', 'uint8_t'), + ('c:type', 'uint16_t'), + ('c:type', 'uint32_t'), ('c:type', 'uint64_t'), ('c:type', 'uintmax_t'), ('c:type', 'uintptr_t'), @@ -242,6 +248,7 @@ ('c:data', 'PyExc_OverflowError'), ('c:data', 'PyExc_PermissionError'), ('c:data', 'PyExc_ProcessLookupError'), + ('c:data', 'PyExc_PythonFinalizationError'), ('c:data', 'PyExc_RecursionError'), ('c:data', 'PyExc_ReferenceError'), ('c:data', 'PyExc_RuntimeError'), @@ -272,6 +279,9 @@ ('c:data', 'PyExc_UnicodeWarning'), ('c:data', 'PyExc_UserWarning'), ('c:data', 'PyExc_Warning'), + # Undocumented public C macros + ('c:macro', 'Py_BUILD_ASSERT'), + ('c:macro', 'Py_BUILD_ASSERT_EXPR'), # Do not error nit-picky mode builds when _SubParsersAction.add_parser cannot # be resolved, as the method is currently undocumented. For context, see # https://github.com/python/cpython/pull/103289. @@ -296,7 +306,8 @@ # Disable Docutils smartquotes for several translations smartquotes_excludes = { - 'languages': ['ja', 'fr', 'zh_TW', 'zh_CN'], 'builders': ['man', 'text'], + 'languages': ['ja', 'fr', 'zh_TW', 'zh_CN'], + 'builders': ['man', 'text'], } # Avoid a warning with Sphinx >= 4.0 @@ -305,6 +316,7 @@ # Allow translation of index directives gettext_additional_targets = [ 'index', + 'literal-block', ] # Options for HTML output @@ -317,11 +329,13 @@ 'collapsiblesidebar': True, 'issues_url': '/bugs.html', 'license_url': '/license.html', - 'root_include_title': False # We use the version switcher instead. + 'root_include_title': False, # We use the version switcher instead. } if os.getenv("READTHEDOCS"): - html_theme_options["hosted_on"] = 'Read the Docs' + html_theme_options["hosted_on"] = ( + 'Read the Docs' + ) # Override stylesheet fingerprinting for Windows CHM htmlhelp to fix GH-91207 # https://github.com/python/cpython/issues/91207 @@ -335,16 +349,21 @@ # Deployment preview information # (See .readthedocs.yml and https://docs.readthedocs.io/en/stable/reference/environment-variables.html) -repository_url = os.getenv("READTHEDOCS_GIT_CLONE_URL") +is_deployment_preview = os.getenv("READTHEDOCS_VERSION_TYPE") == "external" +repository_url = os.getenv("READTHEDOCS_GIT_CLONE_URL", "") +repository_url = repository_url.removesuffix(".git") html_context = { - "is_deployment_preview": os.getenv("READTHEDOCS_VERSION_TYPE") == "external", - "repository_url": repository_url.removesuffix(".git") if repository_url else None, + "is_deployment_preview": is_deployment_preview, + "repository_url": repository_url or None, "pr_id": os.getenv("READTHEDOCS_VERSION"), "enable_analytics": os.getenv("PYTHON_DOCS_ENABLE_ANALYTICS"), } # This 'Last updated on:' timestamp is inserted at the bottom of every page. -html_last_updated_fmt = time.strftime('%b %d, %Y (%H:%M UTC)', time.gmtime()) +html_time = int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) +html_last_updated_fmt = time.strftime( + '%b %d, %Y (%H:%M UTC)', time.gmtime(html_time) +) # Path to find HTML templates. templates_path = ['tools/templates'] @@ -394,8 +413,8 @@ \let\endVerbatim=\endOriginalVerbatim \setcounter{tocdepth}{2} ''', - # The paper size ('letter' or 'a4'). - 'papersize': 'a4', + # The paper size ('letterpaper' or 'a4paper'). + 'papersize': 'a4paper', # The font size ('10pt', '11pt' or '12pt'). 'pointsize': '10pt', } @@ -404,30 +423,70 @@ # (source start file, target name, title, author, document class [howto/manual]). _stdauthor = 'Guido van Rossum and the Python development team' latex_documents = [ - ('c-api/index', 'c-api.tex', - 'The Python/C API', _stdauthor, 'manual'), - ('extending/index', 'extending.tex', - 'Extending and Embedding Python', _stdauthor, 'manual'), - ('installing/index', 'installing.tex', - 'Installing Python Modules', _stdauthor, 'manual'), - ('library/index', 'library.tex', - 'The Python Library Reference', _stdauthor, 'manual'), - ('reference/index', 'reference.tex', - 'The Python Language Reference', _stdauthor, 'manual'), - ('tutorial/index', 'tutorial.tex', - 'Python Tutorial', _stdauthor, 'manual'), - ('using/index', 'using.tex', - 'Python Setup and Usage', _stdauthor, 'manual'), - ('faq/index', 'faq.tex', - 'Python Frequently Asked Questions', _stdauthor, 'manual'), - ('whatsnew/' + version, 'whatsnew.tex', - 'What\'s New in Python', 'A. M. Kuchling', 'howto'), + ('c-api/index', 'c-api.tex', 'The Python/C API', _stdauthor, 'manual'), + ( + 'extending/index', + 'extending.tex', + 'Extending and Embedding Python', + _stdauthor, + 'manual', + ), + ( + 'installing/index', + 'installing.tex', + 'Installing Python Modules', + _stdauthor, + 'manual', + ), + ( + 'library/index', + 'library.tex', + 'The Python Library Reference', + _stdauthor, + 'manual', + ), + ( + 'reference/index', + 'reference.tex', + 'The Python Language Reference', + _stdauthor, + 'manual', + ), + ( + 'tutorial/index', + 'tutorial.tex', + 'Python Tutorial', + _stdauthor, + 'manual', + ), + ( + 'using/index', + 'using.tex', + 'Python Setup and Usage', + _stdauthor, + 'manual', + ), + ( + 'faq/index', + 'faq.tex', + 'Python Frequently Asked Questions', + _stdauthor, + 'manual', + ), + ( + 'whatsnew/' + version, + 'whatsnew.tex', + 'What\'s New in Python', + 'A. M. Kuchling', + 'howto', + ), ] # Collect all HOWTOs individually -latex_documents.extend(('howto/' + fn[:-4], 'howto-' + fn[:-4] + '.tex', - '', _stdauthor, 'howto') - for fn in os.listdir('howto') - if fn.endswith('.rst') and fn != 'index.rst') +latex_documents.extend( + ('howto/' + fn[:-4], 'howto-' + fn[:-4] + '.tex', '', _stdauthor, 'howto') + for fn in os.listdir('howto') + if fn.endswith('.rst') and fn != 'index.rst' +) # Documents to append as an appendix to all manuals. latex_appendices = ['glossary', 'about', 'license', 'copyright'] @@ -455,8 +514,7 @@ 'test($|_)', ] -coverage_ignore_classes = [ -] +coverage_ignore_classes = [] # Glob patterns for C source files for C API coverage, relative to this directory. coverage_c_path = [ @@ -473,7 +531,7 @@ # The coverage checker will ignore all C items whose names match these regexes # (using re.match) -- the keys must be the same as in coverage_c_regexes. coverage_ignore_c_items = { -# 'cfunction': [...] + # 'cfunction': [...] } @@ -495,9 +553,15 @@ r'https://msdn.microsoft.com/.*': 'https://learn.microsoft.com/.*', r'https://docs.microsoft.com/.*': 'https://learn.microsoft.com/.*', r'https://go.microsoft.com/fwlink/\?LinkID=\d+': 'https://learn.microsoft.com/.*', + # Debian's man page redirects to its current stable version + r'https://manpages.debian.org/\w+\(\d(\w+)?\)': r'https://manpages.debian.org/\w+/[\w/\-\.]*\.\d(\w+)?\.en\.html', # Language redirects r'https://toml.io': 'https://toml.io/en/', r'https://www.redhat.com': 'https://www.redhat.com/en', + # pypi.org project name normalization (upper to lowercase, underscore to hyphen) + r'https://pypi.org/project/[A-Za-z\d_\-\.]+/': r'https://pypi.org/project/[a-z\d\-\.]+/', + # Discourse title name expansion (text changes when title is edited) + r'https://discuss\.python\.org/t/\d+': r'https://discuss\.python\.org/t/.*/\d+', # Other redirects r'https://www.boost.org/libs/.+': r'https://www.boost.org/doc/libs/\d_\d+_\d/.+', r'https://support.microsoft.com/en-us/help/\d+': 'https://support.microsoft.com/en-us/topic/.+', @@ -538,14 +602,16 @@ } extlinks_detect_hardcoded_links = True -# Options for extensions -# ---------------------- +# Options for c_annotations +# ------------------------- # Relative filename of the data files refcount_file = 'data/refcounts.dat' stable_abi_file = 'data/stable_abi.dat' -# sphinxext-opengraph config +# Options for sphinxext-opengraph +# ------------------------------- + ogp_site_url = 'https://docs.python.org/3/' ogp_site_name = 'Python documentation' ogp_image = '_static/og-image.png' diff --git a/Doc/constraints.txt b/Doc/constraints.txt index 147de1271eb..26ac1862dba 100644 --- a/Doc/constraints.txt +++ b/Doc/constraints.txt @@ -7,18 +7,20 @@ # Direct dependencies of Sphinx babel<3 colorama<0.5 -imagesize<1.5 -Jinja2<3.2 -packaging<24 -Pygments>=2.16.1,<3 +imagesize<2 +Jinja2<4 +packaging<25 +Pygments<3 requests<3 snowballstemmer<3 -sphinxcontrib-applehelp<1.1 -sphinxcontrib-devhelp<1.1 -sphinxcontrib-htmlhelp<2.1 -sphinxcontrib-jsmath<1.1 -sphinxcontrib-qthelp<1.1 -sphinxcontrib-serializinghtml<1.2 +# keep lower-bounds until Sphinx 8.1 is released +# https://github.com/sphinx-doc/sphinx/pull/12756 +sphinxcontrib-applehelp>=1.0.7,<3 +sphinxcontrib-devhelp>=1.0.6,<3 +sphinxcontrib-htmlhelp>=2.0.6,<3 +sphinxcontrib-jsmath>=1.0.1,<2 +sphinxcontrib-qthelp>=1.0.6,<3 +sphinxcontrib-serializinghtml>=1.1.9,<3 # Direct dependencies of Jinja2 (Jinja is a dependency of Sphinx, see above) -MarkupSafe<2.2 +MarkupSafe<3 diff --git a/Doc/contents.rst b/Doc/contents.rst index 24ceacb0076..b57f4b09a5d 100644 --- a/Doc/contents.rst +++ b/Doc/contents.rst @@ -14,6 +14,7 @@ installing/index.rst howto/index.rst faq/index.rst + deprecations/index.rst glossary.rst about.rst diff --git a/Doc/data/python3.13.abi b/Doc/data/python3.13.abi new file mode 100644 index 00000000000..55112e1e43c --- /dev/null +++ b/Doc/data/python3.13.abi @@ -0,0 +1,29385 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Doc/data/stable_abi.dat b/Doc/data/stable_abi.dat index 9c17223a49a..75bc5ea9456 100644 --- a/Doc/data/stable_abi.dat +++ b/Doc/data/stable_abi.dat @@ -1,887 +1,887 @@ role,name,added,ifdef_note,struct_abi_kind macro,PY_VECTORCALL_ARGUMENTS_OFFSET,3.12,, -function,PyAIter_Check,3.10,, -function,PyArg_Parse,3.2,, -function,PyArg_ParseTuple,3.2,, -function,PyArg_ParseTupleAndKeywords,3.2,, -function,PyArg_UnpackTuple,3.2,, -function,PyArg_VaParse,3.2,, -function,PyArg_VaParseTupleAndKeywords,3.2,, -function,PyArg_ValidateKeywordArguments,3.2,, -var,PyBaseObject_Type,3.2,, -function,PyBool_FromLong,3.2,, -var,PyBool_Type,3.2,, -function,PyBuffer_FillContiguousStrides,3.11,, -function,PyBuffer_FillInfo,3.11,, -function,PyBuffer_FromContiguous,3.11,, -function,PyBuffer_GetPointer,3.11,, -function,PyBuffer_IsContiguous,3.11,, -function,PyBuffer_Release,3.11,, -function,PyBuffer_SizeFromFormat,3.11,, -function,PyBuffer_ToContiguous,3.11,, -var,PyByteArrayIter_Type,3.2,, -function,PyByteArray_AsString,3.2,, -function,PyByteArray_Concat,3.2,, -function,PyByteArray_FromObject,3.2,, -function,PyByteArray_FromStringAndSize,3.2,, -function,PyByteArray_Resize,3.2,, -function,PyByteArray_Size,3.2,, -var,PyByteArray_Type,3.2,, -var,PyBytesIter_Type,3.2,, -function,PyBytes_AsString,3.2,, -function,PyBytes_AsStringAndSize,3.2,, -function,PyBytes_Concat,3.2,, -function,PyBytes_ConcatAndDel,3.2,, -function,PyBytes_DecodeEscape,3.2,, -function,PyBytes_FromFormat,3.2,, -function,PyBytes_FromFormatV,3.2,, -function,PyBytes_FromObject,3.2,, -function,PyBytes_FromString,3.2,, -function,PyBytes_FromStringAndSize,3.2,, -function,PyBytes_Repr,3.2,, -function,PyBytes_Size,3.2,, -var,PyBytes_Type,3.2,, +func,PyAIter_Check,3.10,, +func,PyArg_Parse,3.2,, +func,PyArg_ParseTuple,3.2,, +func,PyArg_ParseTupleAndKeywords,3.2,, +func,PyArg_UnpackTuple,3.2,, +func,PyArg_VaParse,3.2,, +func,PyArg_VaParseTupleAndKeywords,3.2,, +func,PyArg_ValidateKeywordArguments,3.2,, +data,PyBaseObject_Type,3.2,, +func,PyBool_FromLong,3.2,, +data,PyBool_Type,3.2,, +func,PyBuffer_FillContiguousStrides,3.11,, +func,PyBuffer_FillInfo,3.11,, +func,PyBuffer_FromContiguous,3.11,, +func,PyBuffer_GetPointer,3.11,, +func,PyBuffer_IsContiguous,3.11,, +func,PyBuffer_Release,3.11,, +func,PyBuffer_SizeFromFormat,3.11,, +func,PyBuffer_ToContiguous,3.11,, +data,PyByteArrayIter_Type,3.2,, +func,PyByteArray_AsString,3.2,, +func,PyByteArray_Concat,3.2,, +func,PyByteArray_FromObject,3.2,, +func,PyByteArray_FromStringAndSize,3.2,, +func,PyByteArray_Resize,3.2,, +func,PyByteArray_Size,3.2,, +data,PyByteArray_Type,3.2,, +data,PyBytesIter_Type,3.2,, +func,PyBytes_AsString,3.2,, +func,PyBytes_AsStringAndSize,3.2,, +func,PyBytes_Concat,3.2,, +func,PyBytes_ConcatAndDel,3.2,, +func,PyBytes_DecodeEscape,3.2,, +func,PyBytes_FromFormat,3.2,, +func,PyBytes_FromFormatV,3.2,, +func,PyBytes_FromObject,3.2,, +func,PyBytes_FromString,3.2,, +func,PyBytes_FromStringAndSize,3.2,, +func,PyBytes_Repr,3.2,, +func,PyBytes_Size,3.2,, +data,PyBytes_Type,3.2,, type,PyCFunction,3.2,, type,PyCFunctionFast,3.13,, type,PyCFunctionFastWithKeywords,3.13,, type,PyCFunctionWithKeywords,3.2,, -function,PyCFunction_GetFlags,3.2,, -function,PyCFunction_GetFunction,3.2,, -function,PyCFunction_GetSelf,3.2,, -function,PyCFunction_New,3.4,, -function,PyCFunction_NewEx,3.2,, -var,PyCFunction_Type,3.2,, -function,PyCMethod_New,3.9,, -function,PyCallIter_New,3.2,, -var,PyCallIter_Type,3.2,, -function,PyCallable_Check,3.2,, +func,PyCFunction_GetFlags,3.2,, +func,PyCFunction_GetFunction,3.2,, +func,PyCFunction_GetSelf,3.2,, +func,PyCFunction_New,3.4,, +func,PyCFunction_NewEx,3.2,, +data,PyCFunction_Type,3.2,, +func,PyCMethod_New,3.9,, +func,PyCallIter_New,3.2,, +data,PyCallIter_Type,3.2,, +func,PyCallable_Check,3.2,, type,PyCapsule_Destructor,3.2,, -function,PyCapsule_GetContext,3.2,, -function,PyCapsule_GetDestructor,3.2,, -function,PyCapsule_GetName,3.2,, -function,PyCapsule_GetPointer,3.2,, -function,PyCapsule_Import,3.2,, -function,PyCapsule_IsValid,3.2,, -function,PyCapsule_New,3.2,, -function,PyCapsule_SetContext,3.2,, -function,PyCapsule_SetDestructor,3.2,, -function,PyCapsule_SetName,3.2,, -function,PyCapsule_SetPointer,3.2,, -var,PyCapsule_Type,3.2,, -var,PyClassMethodDescr_Type,3.2,, -function,PyCodec_BackslashReplaceErrors,3.2,, -function,PyCodec_Decode,3.2,, -function,PyCodec_Decoder,3.2,, -function,PyCodec_Encode,3.2,, -function,PyCodec_Encoder,3.2,, -function,PyCodec_IgnoreErrors,3.2,, -function,PyCodec_IncrementalDecoder,3.2,, -function,PyCodec_IncrementalEncoder,3.2,, -function,PyCodec_KnownEncoding,3.2,, -function,PyCodec_LookupError,3.2,, -function,PyCodec_NameReplaceErrors,3.7,, -function,PyCodec_Register,3.2,, -function,PyCodec_RegisterError,3.2,, -function,PyCodec_ReplaceErrors,3.2,, -function,PyCodec_StreamReader,3.2,, -function,PyCodec_StreamWriter,3.2,, -function,PyCodec_StrictErrors,3.2,, -function,PyCodec_Unregister,3.10,, -function,PyCodec_XMLCharRefReplaceErrors,3.2,, -function,PyComplex_FromDoubles,3.2,, -function,PyComplex_ImagAsDouble,3.2,, -function,PyComplex_RealAsDouble,3.2,, -var,PyComplex_Type,3.2,, -function,PyDescr_NewClassMethod,3.2,, -function,PyDescr_NewGetSet,3.2,, -function,PyDescr_NewMember,3.2,, -function,PyDescr_NewMethod,3.2,, -var,PyDictItems_Type,3.2,, -var,PyDictIterItem_Type,3.2,, -var,PyDictIterKey_Type,3.2,, -var,PyDictIterValue_Type,3.2,, -var,PyDictKeys_Type,3.2,, -function,PyDictProxy_New,3.2,, -var,PyDictProxy_Type,3.2,, -var,PyDictRevIterItem_Type,3.8,, -var,PyDictRevIterKey_Type,3.8,, -var,PyDictRevIterValue_Type,3.8,, -var,PyDictValues_Type,3.2,, -function,PyDict_Clear,3.2,, -function,PyDict_Contains,3.2,, -function,PyDict_Copy,3.2,, -function,PyDict_DelItem,3.2,, -function,PyDict_DelItemString,3.2,, -function,PyDict_GetItem,3.2,, -function,PyDict_GetItemRef,3.13,, -function,PyDict_GetItemString,3.2,, -function,PyDict_GetItemStringRef,3.13,, -function,PyDict_GetItemWithError,3.2,, -function,PyDict_Items,3.2,, -function,PyDict_Keys,3.2,, -function,PyDict_Merge,3.2,, -function,PyDict_MergeFromSeq2,3.2,, -function,PyDict_New,3.2,, -function,PyDict_Next,3.2,, -function,PyDict_SetItem,3.2,, -function,PyDict_SetItemString,3.2,, -function,PyDict_Size,3.2,, -var,PyDict_Type,3.2,, -function,PyDict_Update,3.2,, -function,PyDict_Values,3.2,, -var,PyEllipsis_Type,3.2,, -var,PyEnum_Type,3.2,, -function,PyErr_BadArgument,3.2,, -function,PyErr_BadInternalCall,3.2,, -function,PyErr_CheckSignals,3.2,, -function,PyErr_Clear,3.2,, -function,PyErr_Display,3.2,, -function,PyErr_DisplayException,3.12,, -function,PyErr_ExceptionMatches,3.2,, -function,PyErr_Fetch,3.2,, -function,PyErr_Format,3.2,, -function,PyErr_FormatV,3.5,, -function,PyErr_GetExcInfo,3.7,, -function,PyErr_GetHandledException,3.11,, -function,PyErr_GetRaisedException,3.12,, -function,PyErr_GivenExceptionMatches,3.2,, -function,PyErr_NewException,3.2,, -function,PyErr_NewExceptionWithDoc,3.2,, -function,PyErr_NoMemory,3.2,, -function,PyErr_NormalizeException,3.2,, -function,PyErr_Occurred,3.2,, -function,PyErr_Print,3.2,, -function,PyErr_PrintEx,3.2,, -function,PyErr_ProgramText,3.2,, -function,PyErr_ResourceWarning,3.6,, -function,PyErr_Restore,3.2,, -function,PyErr_SetExcFromWindowsErr,3.7,on Windows, -function,PyErr_SetExcFromWindowsErrWithFilename,3.7,on Windows, -function,PyErr_SetExcFromWindowsErrWithFilenameObject,3.7,on Windows, -function,PyErr_SetExcFromWindowsErrWithFilenameObjects,3.7,on Windows, -function,PyErr_SetExcInfo,3.7,, -function,PyErr_SetFromErrno,3.2,, -function,PyErr_SetFromErrnoWithFilename,3.2,, -function,PyErr_SetFromErrnoWithFilenameObject,3.2,, -function,PyErr_SetFromErrnoWithFilenameObjects,3.7,, -function,PyErr_SetFromWindowsErr,3.7,on Windows, -function,PyErr_SetFromWindowsErrWithFilename,3.7,on Windows, -function,PyErr_SetHandledException,3.11,, -function,PyErr_SetImportError,3.7,, -function,PyErr_SetImportErrorSubclass,3.6,, -function,PyErr_SetInterrupt,3.2,, -function,PyErr_SetInterruptEx,3.10,, -function,PyErr_SetNone,3.2,, -function,PyErr_SetObject,3.2,, -function,PyErr_SetRaisedException,3.12,, -function,PyErr_SetString,3.2,, -function,PyErr_SyntaxLocation,3.2,, -function,PyErr_SyntaxLocationEx,3.7,, -function,PyErr_WarnEx,3.2,, -function,PyErr_WarnExplicit,3.2,, -function,PyErr_WarnFormat,3.2,, -function,PyErr_WriteUnraisable,3.2,, -function,PyEval_AcquireThread,3.2,, -function,PyEval_EvalCode,3.2,, -function,PyEval_EvalCodeEx,3.2,, -function,PyEval_EvalFrame,3.2,, -function,PyEval_EvalFrameEx,3.2,, -function,PyEval_GetBuiltins,3.2,, -function,PyEval_GetFrame,3.2,, -function,PyEval_GetFrameBuiltins,3.13,, -function,PyEval_GetFrameGlobals,3.13,, -function,PyEval_GetFrameLocals,3.13,, -function,PyEval_GetFuncDesc,3.2,, -function,PyEval_GetFuncName,3.2,, -function,PyEval_GetGlobals,3.2,, -function,PyEval_GetLocals,3.2,, -function,PyEval_InitThreads,3.2,, -function,PyEval_ReleaseThread,3.2,, -function,PyEval_RestoreThread,3.2,, -function,PyEval_SaveThread,3.2,, -var,PyExc_ArithmeticError,3.2,, -var,PyExc_AssertionError,3.2,, -var,PyExc_AttributeError,3.2,, -var,PyExc_BaseException,3.2,, -var,PyExc_BaseExceptionGroup,3.11,, -var,PyExc_BlockingIOError,3.7,, -var,PyExc_BrokenPipeError,3.7,, -var,PyExc_BufferError,3.2,, -var,PyExc_BytesWarning,3.2,, -var,PyExc_ChildProcessError,3.7,, -var,PyExc_ConnectionAbortedError,3.7,, -var,PyExc_ConnectionError,3.7,, -var,PyExc_ConnectionRefusedError,3.7,, -var,PyExc_ConnectionResetError,3.7,, -var,PyExc_DeprecationWarning,3.2,, -var,PyExc_EOFError,3.2,, -var,PyExc_EncodingWarning,3.10,, -var,PyExc_EnvironmentError,3.2,, -var,PyExc_Exception,3.2,, -var,PyExc_FileExistsError,3.7,, -var,PyExc_FileNotFoundError,3.7,, -var,PyExc_FloatingPointError,3.2,, -var,PyExc_FutureWarning,3.2,, -var,PyExc_GeneratorExit,3.2,, -var,PyExc_IOError,3.2,, -var,PyExc_ImportError,3.2,, -var,PyExc_ImportWarning,3.2,, -var,PyExc_IndentationError,3.2,, -var,PyExc_IndexError,3.2,, -var,PyExc_InterruptedError,3.7,, -var,PyExc_IsADirectoryError,3.7,, -var,PyExc_KeyError,3.2,, -var,PyExc_KeyboardInterrupt,3.2,, -var,PyExc_LookupError,3.2,, -var,PyExc_MemoryError,3.2,, -var,PyExc_ModuleNotFoundError,3.6,, -var,PyExc_NameError,3.2,, -var,PyExc_NotADirectoryError,3.7,, -var,PyExc_NotImplementedError,3.2,, -var,PyExc_OSError,3.2,, -var,PyExc_OverflowError,3.2,, -var,PyExc_PendingDeprecationWarning,3.2,, -var,PyExc_PermissionError,3.7,, -var,PyExc_ProcessLookupError,3.7,, -var,PyExc_RecursionError,3.7,, -var,PyExc_ReferenceError,3.2,, -var,PyExc_ResourceWarning,3.7,, -var,PyExc_RuntimeError,3.2,, -var,PyExc_RuntimeWarning,3.2,, -var,PyExc_StopAsyncIteration,3.7,, -var,PyExc_StopIteration,3.2,, -var,PyExc_SyntaxError,3.2,, -var,PyExc_SyntaxWarning,3.2,, -var,PyExc_SystemError,3.2,, -var,PyExc_SystemExit,3.2,, -var,PyExc_TabError,3.2,, -var,PyExc_TimeoutError,3.7,, -var,PyExc_TypeError,3.2,, -var,PyExc_UnboundLocalError,3.2,, -var,PyExc_UnicodeDecodeError,3.2,, -var,PyExc_UnicodeEncodeError,3.2,, -var,PyExc_UnicodeError,3.2,, -var,PyExc_UnicodeTranslateError,3.2,, -var,PyExc_UnicodeWarning,3.2,, -var,PyExc_UserWarning,3.2,, -var,PyExc_ValueError,3.2,, -var,PyExc_Warning,3.2,, -var,PyExc_WindowsError,3.7,on Windows, -var,PyExc_ZeroDivisionError,3.2,, -function,PyExceptionClass_Name,3.8,, -function,PyException_GetArgs,3.12,, -function,PyException_GetCause,3.2,, -function,PyException_GetContext,3.2,, -function,PyException_GetTraceback,3.2,, -function,PyException_SetArgs,3.12,, -function,PyException_SetCause,3.2,, -function,PyException_SetContext,3.2,, -function,PyException_SetTraceback,3.2,, -function,PyFile_FromFd,3.2,, -function,PyFile_GetLine,3.2,, -function,PyFile_WriteObject,3.2,, -function,PyFile_WriteString,3.2,, -var,PyFilter_Type,3.2,, -function,PyFloat_AsDouble,3.2,, -function,PyFloat_FromDouble,3.2,, -function,PyFloat_FromString,3.2,, -function,PyFloat_GetInfo,3.2,, -function,PyFloat_GetMax,3.2,, -function,PyFloat_GetMin,3.2,, -var,PyFloat_Type,3.2,, +func,PyCapsule_GetContext,3.2,, +func,PyCapsule_GetDestructor,3.2,, +func,PyCapsule_GetName,3.2,, +func,PyCapsule_GetPointer,3.2,, +func,PyCapsule_Import,3.2,, +func,PyCapsule_IsValid,3.2,, +func,PyCapsule_New,3.2,, +func,PyCapsule_SetContext,3.2,, +func,PyCapsule_SetDestructor,3.2,, +func,PyCapsule_SetName,3.2,, +func,PyCapsule_SetPointer,3.2,, +data,PyCapsule_Type,3.2,, +data,PyClassMethodDescr_Type,3.2,, +func,PyCodec_BackslashReplaceErrors,3.2,, +func,PyCodec_Decode,3.2,, +func,PyCodec_Decoder,3.2,, +func,PyCodec_Encode,3.2,, +func,PyCodec_Encoder,3.2,, +func,PyCodec_IgnoreErrors,3.2,, +func,PyCodec_IncrementalDecoder,3.2,, +func,PyCodec_IncrementalEncoder,3.2,, +func,PyCodec_KnownEncoding,3.2,, +func,PyCodec_LookupError,3.2,, +func,PyCodec_NameReplaceErrors,3.7,, +func,PyCodec_Register,3.2,, +func,PyCodec_RegisterError,3.2,, +func,PyCodec_ReplaceErrors,3.2,, +func,PyCodec_StreamReader,3.2,, +func,PyCodec_StreamWriter,3.2,, +func,PyCodec_StrictErrors,3.2,, +func,PyCodec_Unregister,3.10,, +func,PyCodec_XMLCharRefReplaceErrors,3.2,, +func,PyComplex_FromDoubles,3.2,, +func,PyComplex_ImagAsDouble,3.2,, +func,PyComplex_RealAsDouble,3.2,, +data,PyComplex_Type,3.2,, +func,PyDescr_NewClassMethod,3.2,, +func,PyDescr_NewGetSet,3.2,, +func,PyDescr_NewMember,3.2,, +func,PyDescr_NewMethod,3.2,, +data,PyDictItems_Type,3.2,, +data,PyDictIterItem_Type,3.2,, +data,PyDictIterKey_Type,3.2,, +data,PyDictIterValue_Type,3.2,, +data,PyDictKeys_Type,3.2,, +func,PyDictProxy_New,3.2,, +data,PyDictProxy_Type,3.2,, +data,PyDictRevIterItem_Type,3.8,, +data,PyDictRevIterKey_Type,3.8,, +data,PyDictRevIterValue_Type,3.8,, +data,PyDictValues_Type,3.2,, +func,PyDict_Clear,3.2,, +func,PyDict_Contains,3.2,, +func,PyDict_Copy,3.2,, +func,PyDict_DelItem,3.2,, +func,PyDict_DelItemString,3.2,, +func,PyDict_GetItem,3.2,, +func,PyDict_GetItemRef,3.13,, +func,PyDict_GetItemString,3.2,, +func,PyDict_GetItemStringRef,3.13,, +func,PyDict_GetItemWithError,3.2,, +func,PyDict_Items,3.2,, +func,PyDict_Keys,3.2,, +func,PyDict_Merge,3.2,, +func,PyDict_MergeFromSeq2,3.2,, +func,PyDict_New,3.2,, +func,PyDict_Next,3.2,, +func,PyDict_SetItem,3.2,, +func,PyDict_SetItemString,3.2,, +func,PyDict_Size,3.2,, +data,PyDict_Type,3.2,, +func,PyDict_Update,3.2,, +func,PyDict_Values,3.2,, +data,PyEllipsis_Type,3.2,, +data,PyEnum_Type,3.2,, +func,PyErr_BadArgument,3.2,, +func,PyErr_BadInternalCall,3.2,, +func,PyErr_CheckSignals,3.2,, +func,PyErr_Clear,3.2,, +func,PyErr_Display,3.2,, +func,PyErr_DisplayException,3.12,, +func,PyErr_ExceptionMatches,3.2,, +func,PyErr_Fetch,3.2,, +func,PyErr_Format,3.2,, +func,PyErr_FormatV,3.5,, +func,PyErr_GetExcInfo,3.7,, +func,PyErr_GetHandledException,3.11,, +func,PyErr_GetRaisedException,3.12,, +func,PyErr_GivenExceptionMatches,3.2,, +func,PyErr_NewException,3.2,, +func,PyErr_NewExceptionWithDoc,3.2,, +func,PyErr_NoMemory,3.2,, +func,PyErr_NormalizeException,3.2,, +func,PyErr_Occurred,3.2,, +func,PyErr_Print,3.2,, +func,PyErr_PrintEx,3.2,, +func,PyErr_ProgramText,3.2,, +func,PyErr_ResourceWarning,3.6,, +func,PyErr_Restore,3.2,, +func,PyErr_SetExcFromWindowsErr,3.7,on Windows, +func,PyErr_SetExcFromWindowsErrWithFilename,3.7,on Windows, +func,PyErr_SetExcFromWindowsErrWithFilenameObject,3.7,on Windows, +func,PyErr_SetExcFromWindowsErrWithFilenameObjects,3.7,on Windows, +func,PyErr_SetExcInfo,3.7,, +func,PyErr_SetFromErrno,3.2,, +func,PyErr_SetFromErrnoWithFilename,3.2,, +func,PyErr_SetFromErrnoWithFilenameObject,3.2,, +func,PyErr_SetFromErrnoWithFilenameObjects,3.7,, +func,PyErr_SetFromWindowsErr,3.7,on Windows, +func,PyErr_SetFromWindowsErrWithFilename,3.7,on Windows, +func,PyErr_SetHandledException,3.11,, +func,PyErr_SetImportError,3.7,, +func,PyErr_SetImportErrorSubclass,3.6,, +func,PyErr_SetInterrupt,3.2,, +func,PyErr_SetInterruptEx,3.10,, +func,PyErr_SetNone,3.2,, +func,PyErr_SetObject,3.2,, +func,PyErr_SetRaisedException,3.12,, +func,PyErr_SetString,3.2,, +func,PyErr_SyntaxLocation,3.2,, +func,PyErr_SyntaxLocationEx,3.7,, +func,PyErr_WarnEx,3.2,, +func,PyErr_WarnExplicit,3.2,, +func,PyErr_WarnFormat,3.2,, +func,PyErr_WriteUnraisable,3.2,, +func,PyEval_AcquireThread,3.2,, +func,PyEval_EvalCode,3.2,, +func,PyEval_EvalCodeEx,3.2,, +func,PyEval_EvalFrame,3.2,, +func,PyEval_EvalFrameEx,3.2,, +func,PyEval_GetBuiltins,3.2,, +func,PyEval_GetFrame,3.2,, +func,PyEval_GetFrameBuiltins,3.13,, +func,PyEval_GetFrameGlobals,3.13,, +func,PyEval_GetFrameLocals,3.13,, +func,PyEval_GetFuncDesc,3.2,, +func,PyEval_GetFuncName,3.2,, +func,PyEval_GetGlobals,3.2,, +func,PyEval_GetLocals,3.2,, +func,PyEval_InitThreads,3.2,, +func,PyEval_ReleaseThread,3.2,, +func,PyEval_RestoreThread,3.2,, +func,PyEval_SaveThread,3.2,, +data,PyExc_ArithmeticError,3.2,, +data,PyExc_AssertionError,3.2,, +data,PyExc_AttributeError,3.2,, +data,PyExc_BaseException,3.2,, +data,PyExc_BaseExceptionGroup,3.11,, +data,PyExc_BlockingIOError,3.7,, +data,PyExc_BrokenPipeError,3.7,, +data,PyExc_BufferError,3.2,, +data,PyExc_BytesWarning,3.2,, +data,PyExc_ChildProcessError,3.7,, +data,PyExc_ConnectionAbortedError,3.7,, +data,PyExc_ConnectionError,3.7,, +data,PyExc_ConnectionRefusedError,3.7,, +data,PyExc_ConnectionResetError,3.7,, +data,PyExc_DeprecationWarning,3.2,, +data,PyExc_EOFError,3.2,, +data,PyExc_EncodingWarning,3.10,, +data,PyExc_EnvironmentError,3.2,, +data,PyExc_Exception,3.2,, +data,PyExc_FileExistsError,3.7,, +data,PyExc_FileNotFoundError,3.7,, +data,PyExc_FloatingPointError,3.2,, +data,PyExc_FutureWarning,3.2,, +data,PyExc_GeneratorExit,3.2,, +data,PyExc_IOError,3.2,, +data,PyExc_ImportError,3.2,, +data,PyExc_ImportWarning,3.2,, +data,PyExc_IndentationError,3.2,, +data,PyExc_IndexError,3.2,, +data,PyExc_InterruptedError,3.7,, +data,PyExc_IsADirectoryError,3.7,, +data,PyExc_KeyError,3.2,, +data,PyExc_KeyboardInterrupt,3.2,, +data,PyExc_LookupError,3.2,, +data,PyExc_MemoryError,3.2,, +data,PyExc_ModuleNotFoundError,3.6,, +data,PyExc_NameError,3.2,, +data,PyExc_NotADirectoryError,3.7,, +data,PyExc_NotImplementedError,3.2,, +data,PyExc_OSError,3.2,, +data,PyExc_OverflowError,3.2,, +data,PyExc_PendingDeprecationWarning,3.2,, +data,PyExc_PermissionError,3.7,, +data,PyExc_ProcessLookupError,3.7,, +data,PyExc_RecursionError,3.7,, +data,PyExc_ReferenceError,3.2,, +data,PyExc_ResourceWarning,3.7,, +data,PyExc_RuntimeError,3.2,, +data,PyExc_RuntimeWarning,3.2,, +data,PyExc_StopAsyncIteration,3.7,, +data,PyExc_StopIteration,3.2,, +data,PyExc_SyntaxError,3.2,, +data,PyExc_SyntaxWarning,3.2,, +data,PyExc_SystemError,3.2,, +data,PyExc_SystemExit,3.2,, +data,PyExc_TabError,3.2,, +data,PyExc_TimeoutError,3.7,, +data,PyExc_TypeError,3.2,, +data,PyExc_UnboundLocalError,3.2,, +data,PyExc_UnicodeDecodeError,3.2,, +data,PyExc_UnicodeEncodeError,3.2,, +data,PyExc_UnicodeError,3.2,, +data,PyExc_UnicodeTranslateError,3.2,, +data,PyExc_UnicodeWarning,3.2,, +data,PyExc_UserWarning,3.2,, +data,PyExc_ValueError,3.2,, +data,PyExc_Warning,3.2,, +data,PyExc_WindowsError,3.7,on Windows, +data,PyExc_ZeroDivisionError,3.2,, +func,PyExceptionClass_Name,3.8,, +func,PyException_GetArgs,3.12,, +func,PyException_GetCause,3.2,, +func,PyException_GetContext,3.2,, +func,PyException_GetTraceback,3.2,, +func,PyException_SetArgs,3.12,, +func,PyException_SetCause,3.2,, +func,PyException_SetContext,3.2,, +func,PyException_SetTraceback,3.2,, +func,PyFile_FromFd,3.2,, +func,PyFile_GetLine,3.2,, +func,PyFile_WriteObject,3.2,, +func,PyFile_WriteString,3.2,, +data,PyFilter_Type,3.2,, +func,PyFloat_AsDouble,3.2,, +func,PyFloat_FromDouble,3.2,, +func,PyFloat_FromString,3.2,, +func,PyFloat_GetInfo,3.2,, +func,PyFloat_GetMax,3.2,, +func,PyFloat_GetMin,3.2,, +data,PyFloat_Type,3.2,, type,PyFrameObject,3.2,,opaque -function,PyFrame_GetCode,3.10,, -function,PyFrame_GetLineNumber,3.10,, -function,PyFrozenSet_New,3.2,, -var,PyFrozenSet_Type,3.2,, -function,PyGC_Collect,3.2,, -function,PyGC_Disable,3.10,, -function,PyGC_Enable,3.10,, -function,PyGC_IsEnabled,3.10,, -function,PyGILState_Ensure,3.2,, -function,PyGILState_GetThisThreadState,3.2,, -function,PyGILState_Release,3.2,, +func,PyFrame_GetCode,3.10,, +func,PyFrame_GetLineNumber,3.10,, +func,PyFrozenSet_New,3.2,, +data,PyFrozenSet_Type,3.2,, +func,PyGC_Collect,3.2,, +func,PyGC_Disable,3.10,, +func,PyGC_Enable,3.10,, +func,PyGC_IsEnabled,3.10,, +func,PyGILState_Ensure,3.2,, +func,PyGILState_GetThisThreadState,3.2,, +func,PyGILState_Release,3.2,, type,PyGILState_STATE,3.2,, type,PyGetSetDef,3.2,,full-abi -var,PyGetSetDescr_Type,3.2,, -function,PyImport_AddModule,3.2,, -function,PyImport_AddModuleObject,3.7,, -function,PyImport_AddModuleRef,3.13,, -function,PyImport_AppendInittab,3.2,, -function,PyImport_ExecCodeModule,3.2,, -function,PyImport_ExecCodeModuleEx,3.2,, -function,PyImport_ExecCodeModuleObject,3.7,, -function,PyImport_ExecCodeModuleWithPathnames,3.2,, -function,PyImport_GetImporter,3.2,, -function,PyImport_GetMagicNumber,3.2,, -function,PyImport_GetMagicTag,3.2,, -function,PyImport_GetModule,3.8,, -function,PyImport_GetModuleDict,3.2,, -function,PyImport_Import,3.2,, -function,PyImport_ImportFrozenModule,3.2,, -function,PyImport_ImportFrozenModuleObject,3.7,, -function,PyImport_ImportModule,3.2,, -function,PyImport_ImportModuleLevel,3.2,, -function,PyImport_ImportModuleLevelObject,3.7,, -function,PyImport_ImportModuleNoBlock,3.2,, -function,PyImport_ReloadModule,3.2,, -function,PyIndex_Check,3.8,, +data,PyGetSetDescr_Type,3.2,, +func,PyImport_AddModule,3.2,, +func,PyImport_AddModuleObject,3.7,, +func,PyImport_AddModuleRef,3.13,, +func,PyImport_AppendInittab,3.2,, +func,PyImport_ExecCodeModule,3.2,, +func,PyImport_ExecCodeModuleEx,3.2,, +func,PyImport_ExecCodeModuleObject,3.7,, +func,PyImport_ExecCodeModuleWithPathnames,3.2,, +func,PyImport_GetImporter,3.2,, +func,PyImport_GetMagicNumber,3.2,, +func,PyImport_GetMagicTag,3.2,, +func,PyImport_GetModule,3.8,, +func,PyImport_GetModuleDict,3.2,, +func,PyImport_Import,3.2,, +func,PyImport_ImportFrozenModule,3.2,, +func,PyImport_ImportFrozenModuleObject,3.7,, +func,PyImport_ImportModule,3.2,, +func,PyImport_ImportModuleLevel,3.2,, +func,PyImport_ImportModuleLevelObject,3.7,, +func,PyImport_ImportModuleNoBlock,3.2,, +func,PyImport_ReloadModule,3.2,, +func,PyIndex_Check,3.8,, type,PyInterpreterState,3.2,,opaque -function,PyInterpreterState_Clear,3.2,, -function,PyInterpreterState_Delete,3.2,, -function,PyInterpreterState_Get,3.9,, -function,PyInterpreterState_GetDict,3.8,, -function,PyInterpreterState_GetID,3.7,, -function,PyInterpreterState_New,3.2,, -function,PyIter_Check,3.8,, -function,PyIter_Next,3.2,, -function,PyIter_Send,3.10,, -var,PyListIter_Type,3.2,, -var,PyListRevIter_Type,3.2,, -function,PyList_Append,3.2,, -function,PyList_AsTuple,3.2,, -function,PyList_GetItem,3.2,, -function,PyList_GetItemRef,3.13,, -function,PyList_GetSlice,3.2,, -function,PyList_Insert,3.2,, -function,PyList_New,3.2,, -function,PyList_Reverse,3.2,, -function,PyList_SetItem,3.2,, -function,PyList_SetSlice,3.2,, -function,PyList_Size,3.2,, -function,PyList_Sort,3.2,, -var,PyList_Type,3.2,, +func,PyInterpreterState_Clear,3.2,, +func,PyInterpreterState_Delete,3.2,, +func,PyInterpreterState_Get,3.9,, +func,PyInterpreterState_GetDict,3.8,, +func,PyInterpreterState_GetID,3.7,, +func,PyInterpreterState_New,3.2,, +func,PyIter_Check,3.8,, +func,PyIter_Next,3.2,, +func,PyIter_Send,3.10,, +data,PyListIter_Type,3.2,, +data,PyListRevIter_Type,3.2,, +func,PyList_Append,3.2,, +func,PyList_AsTuple,3.2,, +func,PyList_GetItem,3.2,, +func,PyList_GetItemRef,3.13,, +func,PyList_GetSlice,3.2,, +func,PyList_Insert,3.2,, +func,PyList_New,3.2,, +func,PyList_Reverse,3.2,, +func,PyList_SetItem,3.2,, +func,PyList_SetSlice,3.2,, +func,PyList_Size,3.2,, +func,PyList_Sort,3.2,, +data,PyList_Type,3.2,, type,PyLongObject,3.2,,opaque -var,PyLongRangeIter_Type,3.2,, -function,PyLong_AsDouble,3.2,, -function,PyLong_AsInt,3.13,, -function,PyLong_AsLong,3.2,, -function,PyLong_AsLongAndOverflow,3.2,, -function,PyLong_AsLongLong,3.2,, -function,PyLong_AsLongLongAndOverflow,3.2,, -function,PyLong_AsSize_t,3.2,, -function,PyLong_AsSsize_t,3.2,, -function,PyLong_AsUnsignedLong,3.2,, -function,PyLong_AsUnsignedLongLong,3.2,, -function,PyLong_AsUnsignedLongLongMask,3.2,, -function,PyLong_AsUnsignedLongMask,3.2,, -function,PyLong_AsVoidPtr,3.2,, -function,PyLong_FromDouble,3.2,, -function,PyLong_FromLong,3.2,, -function,PyLong_FromLongLong,3.2,, -function,PyLong_FromSize_t,3.2,, -function,PyLong_FromSsize_t,3.2,, -function,PyLong_FromString,3.2,, -function,PyLong_FromUnsignedLong,3.2,, -function,PyLong_FromUnsignedLongLong,3.2,, -function,PyLong_FromVoidPtr,3.2,, -function,PyLong_GetInfo,3.2,, -var,PyLong_Type,3.2,, -var,PyMap_Type,3.2,, -function,PyMapping_Check,3.2,, -function,PyMapping_GetItemString,3.2,, -function,PyMapping_GetOptionalItem,3.13,, -function,PyMapping_GetOptionalItemString,3.13,, -function,PyMapping_HasKey,3.2,, -function,PyMapping_HasKeyString,3.2,, -function,PyMapping_HasKeyStringWithError,3.13,, -function,PyMapping_HasKeyWithError,3.13,, -function,PyMapping_Items,3.2,, -function,PyMapping_Keys,3.2,, -function,PyMapping_Length,3.2,, -function,PyMapping_SetItemString,3.2,, -function,PyMapping_Size,3.2,, -function,PyMapping_Values,3.2,, -function,PyMem_Calloc,3.7,, -function,PyMem_Free,3.2,, -function,PyMem_Malloc,3.2,, -function,PyMem_RawCalloc,3.13,, -function,PyMem_RawFree,3.13,, -function,PyMem_RawMalloc,3.13,, -function,PyMem_RawRealloc,3.13,, -function,PyMem_Realloc,3.2,, +data,PyLongRangeIter_Type,3.2,, +func,PyLong_AsDouble,3.2,, +func,PyLong_AsInt,3.13,, +func,PyLong_AsLong,3.2,, +func,PyLong_AsLongAndOverflow,3.2,, +func,PyLong_AsLongLong,3.2,, +func,PyLong_AsLongLongAndOverflow,3.2,, +func,PyLong_AsSize_t,3.2,, +func,PyLong_AsSsize_t,3.2,, +func,PyLong_AsUnsignedLong,3.2,, +func,PyLong_AsUnsignedLongLong,3.2,, +func,PyLong_AsUnsignedLongLongMask,3.2,, +func,PyLong_AsUnsignedLongMask,3.2,, +func,PyLong_AsVoidPtr,3.2,, +func,PyLong_FromDouble,3.2,, +func,PyLong_FromLong,3.2,, +func,PyLong_FromLongLong,3.2,, +func,PyLong_FromSize_t,3.2,, +func,PyLong_FromSsize_t,3.2,, +func,PyLong_FromString,3.2,, +func,PyLong_FromUnsignedLong,3.2,, +func,PyLong_FromUnsignedLongLong,3.2,, +func,PyLong_FromVoidPtr,3.2,, +func,PyLong_GetInfo,3.2,, +data,PyLong_Type,3.2,, +data,PyMap_Type,3.2,, +func,PyMapping_Check,3.2,, +func,PyMapping_GetItemString,3.2,, +func,PyMapping_GetOptionalItem,3.13,, +func,PyMapping_GetOptionalItemString,3.13,, +func,PyMapping_HasKey,3.2,, +func,PyMapping_HasKeyString,3.2,, +func,PyMapping_HasKeyStringWithError,3.13,, +func,PyMapping_HasKeyWithError,3.13,, +func,PyMapping_Items,3.2,, +func,PyMapping_Keys,3.2,, +func,PyMapping_Length,3.2,, +func,PyMapping_SetItemString,3.2,, +func,PyMapping_Size,3.2,, +func,PyMapping_Values,3.2,, +func,PyMem_Calloc,3.7,, +func,PyMem_Free,3.2,, +func,PyMem_Malloc,3.2,, +func,PyMem_RawCalloc,3.13,, +func,PyMem_RawFree,3.13,, +func,PyMem_RawMalloc,3.13,, +func,PyMem_RawRealloc,3.13,, +func,PyMem_Realloc,3.2,, type,PyMemberDef,3.2,,full-abi -var,PyMemberDescr_Type,3.2,, -function,PyMember_GetOne,3.2,, -function,PyMember_SetOne,3.2,, -function,PyMemoryView_FromBuffer,3.11,, -function,PyMemoryView_FromMemory,3.7,, -function,PyMemoryView_FromObject,3.2,, -function,PyMemoryView_GetContiguous,3.2,, -var,PyMemoryView_Type,3.2,, +data,PyMemberDescr_Type,3.2,, +func,PyMember_GetOne,3.2,, +func,PyMember_SetOne,3.2,, +func,PyMemoryView_FromBuffer,3.11,, +func,PyMemoryView_FromMemory,3.7,, +func,PyMemoryView_FromObject,3.2,, +func,PyMemoryView_GetContiguous,3.2,, +data,PyMemoryView_Type,3.2,, type,PyMethodDef,3.2,,full-abi -var,PyMethodDescr_Type,3.2,, +data,PyMethodDescr_Type,3.2,, type,PyModuleDef,3.2,,full-abi type,PyModuleDef_Base,3.2,,full-abi -function,PyModuleDef_Init,3.5,, -var,PyModuleDef_Type,3.5,, -function,PyModule_Add,3.13,, -function,PyModule_AddFunctions,3.7,, -function,PyModule_AddIntConstant,3.2,, -function,PyModule_AddObject,3.2,, -function,PyModule_AddObjectRef,3.10,, -function,PyModule_AddStringConstant,3.2,, -function,PyModule_AddType,3.10,, -function,PyModule_Create2,3.2,, -function,PyModule_ExecDef,3.7,, -function,PyModule_FromDefAndSpec2,3.7,, -function,PyModule_GetDef,3.2,, -function,PyModule_GetDict,3.2,, -function,PyModule_GetFilename,3.2,, -function,PyModule_GetFilenameObject,3.2,, -function,PyModule_GetName,3.2,, -function,PyModule_GetNameObject,3.7,, -function,PyModule_GetState,3.2,, -function,PyModule_New,3.2,, -function,PyModule_NewObject,3.7,, -function,PyModule_SetDocString,3.7,, -var,PyModule_Type,3.2,, -function,PyNumber_Absolute,3.2,, -function,PyNumber_Add,3.2,, -function,PyNumber_And,3.2,, -function,PyNumber_AsSsize_t,3.2,, -function,PyNumber_Check,3.2,, -function,PyNumber_Divmod,3.2,, -function,PyNumber_Float,3.2,, -function,PyNumber_FloorDivide,3.2,, -function,PyNumber_InPlaceAdd,3.2,, -function,PyNumber_InPlaceAnd,3.2,, -function,PyNumber_InPlaceFloorDivide,3.2,, -function,PyNumber_InPlaceLshift,3.2,, -function,PyNumber_InPlaceMatrixMultiply,3.7,, -function,PyNumber_InPlaceMultiply,3.2,, -function,PyNumber_InPlaceOr,3.2,, -function,PyNumber_InPlacePower,3.2,, -function,PyNumber_InPlaceRemainder,3.2,, -function,PyNumber_InPlaceRshift,3.2,, -function,PyNumber_InPlaceSubtract,3.2,, -function,PyNumber_InPlaceTrueDivide,3.2,, -function,PyNumber_InPlaceXor,3.2,, -function,PyNumber_Index,3.2,, -function,PyNumber_Invert,3.2,, -function,PyNumber_Long,3.2,, -function,PyNumber_Lshift,3.2,, -function,PyNumber_MatrixMultiply,3.7,, -function,PyNumber_Multiply,3.2,, -function,PyNumber_Negative,3.2,, -function,PyNumber_Or,3.2,, -function,PyNumber_Positive,3.2,, -function,PyNumber_Power,3.2,, -function,PyNumber_Remainder,3.2,, -function,PyNumber_Rshift,3.2,, -function,PyNumber_Subtract,3.2,, -function,PyNumber_ToBase,3.2,, -function,PyNumber_TrueDivide,3.2,, -function,PyNumber_Xor,3.2,, -function,PyOS_AfterFork,3.2,on platforms with fork(), -function,PyOS_AfterFork_Child,3.7,on platforms with fork(), -function,PyOS_AfterFork_Parent,3.7,on platforms with fork(), -function,PyOS_BeforeFork,3.7,on platforms with fork(), -function,PyOS_CheckStack,3.7,on platforms with USE_STACKCHECK, -function,PyOS_FSPath,3.6,, -var,PyOS_InputHook,3.2,, -function,PyOS_InterruptOccurred,3.2,, -function,PyOS_double_to_string,3.2,, -function,PyOS_getsig,3.2,, -function,PyOS_mystricmp,3.2,, -function,PyOS_mystrnicmp,3.2,, -function,PyOS_setsig,3.2,, +func,PyModuleDef_Init,3.5,, +data,PyModuleDef_Type,3.5,, +func,PyModule_Add,3.13,, +func,PyModule_AddFunctions,3.7,, +func,PyModule_AddIntConstant,3.2,, +func,PyModule_AddObject,3.2,, +func,PyModule_AddObjectRef,3.10,, +func,PyModule_AddStringConstant,3.2,, +func,PyModule_AddType,3.10,, +func,PyModule_Create2,3.2,, +func,PyModule_ExecDef,3.7,, +func,PyModule_FromDefAndSpec2,3.7,, +func,PyModule_GetDef,3.2,, +func,PyModule_GetDict,3.2,, +func,PyModule_GetFilename,3.2,, +func,PyModule_GetFilenameObject,3.2,, +func,PyModule_GetName,3.2,, +func,PyModule_GetNameObject,3.7,, +func,PyModule_GetState,3.2,, +func,PyModule_New,3.2,, +func,PyModule_NewObject,3.7,, +func,PyModule_SetDocString,3.7,, +data,PyModule_Type,3.2,, +func,PyNumber_Absolute,3.2,, +func,PyNumber_Add,3.2,, +func,PyNumber_And,3.2,, +func,PyNumber_AsSsize_t,3.2,, +func,PyNumber_Check,3.2,, +func,PyNumber_Divmod,3.2,, +func,PyNumber_Float,3.2,, +func,PyNumber_FloorDivide,3.2,, +func,PyNumber_InPlaceAdd,3.2,, +func,PyNumber_InPlaceAnd,3.2,, +func,PyNumber_InPlaceFloorDivide,3.2,, +func,PyNumber_InPlaceLshift,3.2,, +func,PyNumber_InPlaceMatrixMultiply,3.7,, +func,PyNumber_InPlaceMultiply,3.2,, +func,PyNumber_InPlaceOr,3.2,, +func,PyNumber_InPlacePower,3.2,, +func,PyNumber_InPlaceRemainder,3.2,, +func,PyNumber_InPlaceRshift,3.2,, +func,PyNumber_InPlaceSubtract,3.2,, +func,PyNumber_InPlaceTrueDivide,3.2,, +func,PyNumber_InPlaceXor,3.2,, +func,PyNumber_Index,3.2,, +func,PyNumber_Invert,3.2,, +func,PyNumber_Long,3.2,, +func,PyNumber_Lshift,3.2,, +func,PyNumber_MatrixMultiply,3.7,, +func,PyNumber_Multiply,3.2,, +func,PyNumber_Negative,3.2,, +func,PyNumber_Or,3.2,, +func,PyNumber_Positive,3.2,, +func,PyNumber_Power,3.2,, +func,PyNumber_Remainder,3.2,, +func,PyNumber_Rshift,3.2,, +func,PyNumber_Subtract,3.2,, +func,PyNumber_ToBase,3.2,, +func,PyNumber_TrueDivide,3.2,, +func,PyNumber_Xor,3.2,, +func,PyOS_AfterFork,3.2,on platforms with fork(), +func,PyOS_AfterFork_Child,3.7,on platforms with fork(), +func,PyOS_AfterFork_Parent,3.7,on platforms with fork(), +func,PyOS_BeforeFork,3.7,on platforms with fork(), +func,PyOS_CheckStack,3.7,on platforms with USE_STACKCHECK, +func,PyOS_FSPath,3.6,, +data,PyOS_InputHook,3.2,, +func,PyOS_InterruptOccurred,3.2,, +func,PyOS_double_to_string,3.2,, +func,PyOS_getsig,3.2,, +func,PyOS_mystricmp,3.2,, +func,PyOS_mystrnicmp,3.2,, +func,PyOS_setsig,3.2,, type,PyOS_sighandler_t,3.2,, -function,PyOS_snprintf,3.2,, -function,PyOS_string_to_double,3.2,, -function,PyOS_strtol,3.2,, -function,PyOS_strtoul,3.2,, -function,PyOS_vsnprintf,3.2,, +func,PyOS_snprintf,3.2,, +func,PyOS_string_to_double,3.2,, +func,PyOS_strtol,3.2,, +func,PyOS_strtoul,3.2,, +func,PyOS_vsnprintf,3.2,, type,PyObject,3.2,,members member,PyObject.ob_refcnt,3.2,, member,PyObject.ob_type,3.2,, -function,PyObject_ASCII,3.2,, -function,PyObject_AsFileDescriptor,3.2,, -function,PyObject_Bytes,3.2,, -function,PyObject_Call,3.2,, -function,PyObject_CallFunction,3.2,, -function,PyObject_CallFunctionObjArgs,3.2,, -function,PyObject_CallMethod,3.2,, -function,PyObject_CallMethodObjArgs,3.2,, -function,PyObject_CallNoArgs,3.10,, -function,PyObject_CallObject,3.2,, -function,PyObject_Calloc,3.7,, -function,PyObject_CheckBuffer,3.11,, -function,PyObject_ClearWeakRefs,3.2,, -function,PyObject_CopyData,3.11,, -function,PyObject_DelAttr,3.13,, -function,PyObject_DelAttrString,3.13,, -function,PyObject_DelItem,3.2,, -function,PyObject_DelItemString,3.2,, -function,PyObject_Dir,3.2,, -function,PyObject_Format,3.2,, -function,PyObject_Free,3.2,, -function,PyObject_GC_Del,3.2,, -function,PyObject_GC_IsFinalized,3.9,, -function,PyObject_GC_IsTracked,3.9,, -function,PyObject_GC_Track,3.2,, -function,PyObject_GC_UnTrack,3.2,, -function,PyObject_GenericGetAttr,3.2,, -function,PyObject_GenericGetDict,3.10,, -function,PyObject_GenericSetAttr,3.2,, -function,PyObject_GenericSetDict,3.7,, -function,PyObject_GetAIter,3.10,, -function,PyObject_GetAttr,3.2,, -function,PyObject_GetAttrString,3.2,, -function,PyObject_GetBuffer,3.11,, -function,PyObject_GetItem,3.2,, -function,PyObject_GetIter,3.2,, -function,PyObject_GetOptionalAttr,3.13,, -function,PyObject_GetOptionalAttrString,3.13,, -function,PyObject_GetTypeData,3.12,, -function,PyObject_HasAttr,3.2,, -function,PyObject_HasAttrString,3.2,, -function,PyObject_HasAttrStringWithError,3.13,, -function,PyObject_HasAttrWithError,3.13,, -function,PyObject_Hash,3.2,, -function,PyObject_HashNotImplemented,3.2,, -function,PyObject_Init,3.2,, -function,PyObject_InitVar,3.2,, -function,PyObject_IsInstance,3.2,, -function,PyObject_IsSubclass,3.2,, -function,PyObject_IsTrue,3.2,, -function,PyObject_Length,3.2,, -function,PyObject_Malloc,3.2,, -function,PyObject_Not,3.2,, -function,PyObject_Realloc,3.2,, -function,PyObject_Repr,3.2,, -function,PyObject_RichCompare,3.2,, -function,PyObject_RichCompareBool,3.2,, -function,PyObject_SelfIter,3.2,, -function,PyObject_SetAttr,3.2,, -function,PyObject_SetAttrString,3.2,, -function,PyObject_SetItem,3.2,, -function,PyObject_Size,3.2,, -function,PyObject_Str,3.2,, -function,PyObject_Type,3.2,, -function,PyObject_Vectorcall,3.12,, -function,PyObject_VectorcallMethod,3.12,, -var,PyProperty_Type,3.2,, -var,PyRangeIter_Type,3.2,, -var,PyRange_Type,3.2,, -var,PyReversed_Type,3.2,, -function,PySeqIter_New,3.2,, -var,PySeqIter_Type,3.2,, -function,PySequence_Check,3.2,, -function,PySequence_Concat,3.2,, -function,PySequence_Contains,3.2,, -function,PySequence_Count,3.2,, -function,PySequence_DelItem,3.2,, -function,PySequence_DelSlice,3.2,, -function,PySequence_Fast,3.2,, -function,PySequence_GetItem,3.2,, -function,PySequence_GetSlice,3.2,, -function,PySequence_In,3.2,, -function,PySequence_InPlaceConcat,3.2,, -function,PySequence_InPlaceRepeat,3.2,, -function,PySequence_Index,3.2,, -function,PySequence_Length,3.2,, -function,PySequence_List,3.2,, -function,PySequence_Repeat,3.2,, -function,PySequence_SetItem,3.2,, -function,PySequence_SetSlice,3.2,, -function,PySequence_Size,3.2,, -function,PySequence_Tuple,3.2,, -var,PySetIter_Type,3.2,, -function,PySet_Add,3.2,, -function,PySet_Clear,3.2,, -function,PySet_Contains,3.2,, -function,PySet_Discard,3.2,, -function,PySet_New,3.2,, -function,PySet_Pop,3.2,, -function,PySet_Size,3.2,, -var,PySet_Type,3.2,, -function,PySlice_AdjustIndices,3.7,, -function,PySlice_GetIndices,3.2,, -function,PySlice_GetIndicesEx,3.2,, -function,PySlice_New,3.2,, -var,PySlice_Type,3.2,, -function,PySlice_Unpack,3.7,, -function,PyState_AddModule,3.3,, -function,PyState_FindModule,3.2,, -function,PyState_RemoveModule,3.3,, +func,PyObject_ASCII,3.2,, +func,PyObject_AsFileDescriptor,3.2,, +func,PyObject_Bytes,3.2,, +func,PyObject_Call,3.2,, +func,PyObject_CallFunction,3.2,, +func,PyObject_CallFunctionObjArgs,3.2,, +func,PyObject_CallMethod,3.2,, +func,PyObject_CallMethodObjArgs,3.2,, +func,PyObject_CallNoArgs,3.10,, +func,PyObject_CallObject,3.2,, +func,PyObject_Calloc,3.7,, +func,PyObject_CheckBuffer,3.11,, +func,PyObject_ClearWeakRefs,3.2,, +func,PyObject_CopyData,3.11,, +func,PyObject_DelAttr,3.13,, +func,PyObject_DelAttrString,3.13,, +func,PyObject_DelItem,3.2,, +func,PyObject_DelItemString,3.2,, +func,PyObject_Dir,3.2,, +func,PyObject_Format,3.2,, +func,PyObject_Free,3.2,, +func,PyObject_GC_Del,3.2,, +func,PyObject_GC_IsFinalized,3.9,, +func,PyObject_GC_IsTracked,3.9,, +func,PyObject_GC_Track,3.2,, +func,PyObject_GC_UnTrack,3.2,, +func,PyObject_GenericGetAttr,3.2,, +func,PyObject_GenericGetDict,3.10,, +func,PyObject_GenericSetAttr,3.2,, +func,PyObject_GenericSetDict,3.7,, +func,PyObject_GetAIter,3.10,, +func,PyObject_GetAttr,3.2,, +func,PyObject_GetAttrString,3.2,, +func,PyObject_GetBuffer,3.11,, +func,PyObject_GetItem,3.2,, +func,PyObject_GetIter,3.2,, +func,PyObject_GetOptionalAttr,3.13,, +func,PyObject_GetOptionalAttrString,3.13,, +func,PyObject_GetTypeData,3.12,, +func,PyObject_HasAttr,3.2,, +func,PyObject_HasAttrString,3.2,, +func,PyObject_HasAttrStringWithError,3.13,, +func,PyObject_HasAttrWithError,3.13,, +func,PyObject_Hash,3.2,, +func,PyObject_HashNotImplemented,3.2,, +func,PyObject_Init,3.2,, +func,PyObject_InitVar,3.2,, +func,PyObject_IsInstance,3.2,, +func,PyObject_IsSubclass,3.2,, +func,PyObject_IsTrue,3.2,, +func,PyObject_Length,3.2,, +func,PyObject_Malloc,3.2,, +func,PyObject_Not,3.2,, +func,PyObject_Realloc,3.2,, +func,PyObject_Repr,3.2,, +func,PyObject_RichCompare,3.2,, +func,PyObject_RichCompareBool,3.2,, +func,PyObject_SelfIter,3.2,, +func,PyObject_SetAttr,3.2,, +func,PyObject_SetAttrString,3.2,, +func,PyObject_SetItem,3.2,, +func,PyObject_Size,3.2,, +func,PyObject_Str,3.2,, +func,PyObject_Type,3.2,, +func,PyObject_Vectorcall,3.12,, +func,PyObject_VectorcallMethod,3.12,, +data,PyProperty_Type,3.2,, +data,PyRangeIter_Type,3.2,, +data,PyRange_Type,3.2,, +data,PyReversed_Type,3.2,, +func,PySeqIter_New,3.2,, +data,PySeqIter_Type,3.2,, +func,PySequence_Check,3.2,, +func,PySequence_Concat,3.2,, +func,PySequence_Contains,3.2,, +func,PySequence_Count,3.2,, +func,PySequence_DelItem,3.2,, +func,PySequence_DelSlice,3.2,, +func,PySequence_Fast,3.2,, +func,PySequence_GetItem,3.2,, +func,PySequence_GetSlice,3.2,, +func,PySequence_In,3.2,, +func,PySequence_InPlaceConcat,3.2,, +func,PySequence_InPlaceRepeat,3.2,, +func,PySequence_Index,3.2,, +func,PySequence_Length,3.2,, +func,PySequence_List,3.2,, +func,PySequence_Repeat,3.2,, +func,PySequence_SetItem,3.2,, +func,PySequence_SetSlice,3.2,, +func,PySequence_Size,3.2,, +func,PySequence_Tuple,3.2,, +data,PySetIter_Type,3.2,, +func,PySet_Add,3.2,, +func,PySet_Clear,3.2,, +func,PySet_Contains,3.2,, +func,PySet_Discard,3.2,, +func,PySet_New,3.2,, +func,PySet_Pop,3.2,, +func,PySet_Size,3.2,, +data,PySet_Type,3.2,, +func,PySlice_AdjustIndices,3.7,, +func,PySlice_GetIndices,3.2,, +func,PySlice_GetIndicesEx,3.2,, +func,PySlice_New,3.2,, +data,PySlice_Type,3.2,, +func,PySlice_Unpack,3.7,, +func,PyState_AddModule,3.3,, +func,PyState_FindModule,3.2,, +func,PyState_RemoveModule,3.3,, type,PyStructSequence_Desc,3.2,,full-abi type,PyStructSequence_Field,3.2,,full-abi -function,PyStructSequence_GetItem,3.2,, -function,PyStructSequence_New,3.2,, -function,PyStructSequence_NewType,3.2,, -function,PyStructSequence_SetItem,3.2,, -var,PyStructSequence_UnnamedField,3.11,, -var,PySuper_Type,3.2,, -function,PySys_Audit,3.13,, -function,PySys_AuditTuple,3.13,, -function,PySys_FormatStderr,3.2,, -function,PySys_FormatStdout,3.2,, -function,PySys_GetObject,3.2,, -function,PySys_GetXOptions,3.7,, -function,PySys_ResetWarnOptions,3.2,, -function,PySys_SetArgv,3.2,, -function,PySys_SetArgvEx,3.2,, -function,PySys_SetObject,3.2,, -function,PySys_WriteStderr,3.2,, -function,PySys_WriteStdout,3.2,, +func,PyStructSequence_GetItem,3.2,, +func,PyStructSequence_New,3.2,, +func,PyStructSequence_NewType,3.2,, +func,PyStructSequence_SetItem,3.2,, +data,PyStructSequence_UnnamedField,3.11,, +data,PySuper_Type,3.2,, +func,PySys_Audit,3.13,, +func,PySys_AuditTuple,3.13,, +func,PySys_FormatStderr,3.2,, +func,PySys_FormatStdout,3.2,, +func,PySys_GetObject,3.2,, +func,PySys_GetXOptions,3.7,, +func,PySys_ResetWarnOptions,3.2,, +func,PySys_SetArgv,3.2,, +func,PySys_SetArgvEx,3.2,, +func,PySys_SetObject,3.2,, +func,PySys_WriteStderr,3.2,, +func,PySys_WriteStdout,3.2,, type,PyThreadState,3.2,,opaque -function,PyThreadState_Clear,3.2,, -function,PyThreadState_Delete,3.2,, -function,PyThreadState_Get,3.2,, -function,PyThreadState_GetDict,3.2,, -function,PyThreadState_GetFrame,3.10,, -function,PyThreadState_GetID,3.10,, -function,PyThreadState_GetInterpreter,3.10,, -function,PyThreadState_New,3.2,, -function,PyThreadState_SetAsyncExc,3.2,, -function,PyThreadState_Swap,3.2,, -function,PyThread_GetInfo,3.3,, -function,PyThread_ReInitTLS,3.2,, -function,PyThread_acquire_lock,3.2,, -function,PyThread_acquire_lock_timed,3.2,, -function,PyThread_allocate_lock,3.2,, -function,PyThread_create_key,3.2,, -function,PyThread_delete_key,3.2,, -function,PyThread_delete_key_value,3.2,, -function,PyThread_exit_thread,3.2,, -function,PyThread_free_lock,3.2,, -function,PyThread_get_key_value,3.2,, -function,PyThread_get_stacksize,3.2,, -function,PyThread_get_thread_ident,3.2,, -function,PyThread_get_thread_native_id,3.2,on platforms with native thread IDs, -function,PyThread_init_thread,3.2,, -function,PyThread_release_lock,3.2,, -function,PyThread_set_key_value,3.2,, -function,PyThread_set_stacksize,3.2,, -function,PyThread_start_new_thread,3.2,, -function,PyThread_tss_alloc,3.7,, -function,PyThread_tss_create,3.7,, -function,PyThread_tss_delete,3.7,, -function,PyThread_tss_free,3.7,, -function,PyThread_tss_get,3.7,, -function,PyThread_tss_is_created,3.7,, -function,PyThread_tss_set,3.7,, -function,PyTraceBack_Here,3.2,, -function,PyTraceBack_Print,3.2,, -var,PyTraceBack_Type,3.2,, -var,PyTupleIter_Type,3.2,, -function,PyTuple_GetItem,3.2,, -function,PyTuple_GetSlice,3.2,, -function,PyTuple_New,3.2,, -function,PyTuple_Pack,3.2,, -function,PyTuple_SetItem,3.2,, -function,PyTuple_Size,3.2,, -var,PyTuple_Type,3.2,, +func,PyThreadState_Clear,3.2,, +func,PyThreadState_Delete,3.2,, +func,PyThreadState_Get,3.2,, +func,PyThreadState_GetDict,3.2,, +func,PyThreadState_GetFrame,3.10,, +func,PyThreadState_GetID,3.10,, +func,PyThreadState_GetInterpreter,3.10,, +func,PyThreadState_New,3.2,, +func,PyThreadState_SetAsyncExc,3.2,, +func,PyThreadState_Swap,3.2,, +func,PyThread_GetInfo,3.3,, +func,PyThread_ReInitTLS,3.2,, +func,PyThread_acquire_lock,3.2,, +func,PyThread_acquire_lock_timed,3.2,, +func,PyThread_allocate_lock,3.2,, +func,PyThread_create_key,3.2,, +func,PyThread_delete_key,3.2,, +func,PyThread_delete_key_value,3.2,, +func,PyThread_exit_thread,3.2,, +func,PyThread_free_lock,3.2,, +func,PyThread_get_key_value,3.2,, +func,PyThread_get_stacksize,3.2,, +func,PyThread_get_thread_ident,3.2,, +func,PyThread_get_thread_native_id,3.2,on platforms with native thread IDs, +func,PyThread_init_thread,3.2,, +func,PyThread_release_lock,3.2,, +func,PyThread_set_key_value,3.2,, +func,PyThread_set_stacksize,3.2,, +func,PyThread_start_new_thread,3.2,, +func,PyThread_tss_alloc,3.7,, +func,PyThread_tss_create,3.7,, +func,PyThread_tss_delete,3.7,, +func,PyThread_tss_free,3.7,, +func,PyThread_tss_get,3.7,, +func,PyThread_tss_is_created,3.7,, +func,PyThread_tss_set,3.7,, +func,PyTraceBack_Here,3.2,, +func,PyTraceBack_Print,3.2,, +data,PyTraceBack_Type,3.2,, +data,PyTupleIter_Type,3.2,, +func,PyTuple_GetItem,3.2,, +func,PyTuple_GetSlice,3.2,, +func,PyTuple_New,3.2,, +func,PyTuple_Pack,3.2,, +func,PyTuple_SetItem,3.2,, +func,PyTuple_Size,3.2,, +data,PyTuple_Type,3.2,, type,PyTypeObject,3.2,,opaque -function,PyType_ClearCache,3.2,, -function,PyType_FromMetaclass,3.12,, -function,PyType_FromModuleAndSpec,3.10,, -function,PyType_FromSpec,3.2,, -function,PyType_FromSpecWithBases,3.3,, -function,PyType_GenericAlloc,3.2,, -function,PyType_GenericNew,3.2,, -function,PyType_GetFlags,3.2,, -function,PyType_GetFullyQualifiedName,3.13,, -function,PyType_GetModule,3.10,, -function,PyType_GetModuleByDef,3.13,, -function,PyType_GetModuleName,3.13,, -function,PyType_GetModuleState,3.10,, -function,PyType_GetName,3.11,, -function,PyType_GetQualName,3.11,, -function,PyType_GetSlot,3.4,, -function,PyType_GetTypeDataSize,3.12,, -function,PyType_IsSubtype,3.2,, -function,PyType_Modified,3.2,, -function,PyType_Ready,3.2,, +func,PyType_ClearCache,3.2,, +func,PyType_FromMetaclass,3.12,, +func,PyType_FromModuleAndSpec,3.10,, +func,PyType_FromSpec,3.2,, +func,PyType_FromSpecWithBases,3.3,, +func,PyType_GenericAlloc,3.2,, +func,PyType_GenericNew,3.2,, +func,PyType_GetFlags,3.2,, +func,PyType_GetFullyQualifiedName,3.13,, +func,PyType_GetModule,3.10,, +func,PyType_GetModuleByDef,3.13,, +func,PyType_GetModuleName,3.13,, +func,PyType_GetModuleState,3.10,, +func,PyType_GetName,3.11,, +func,PyType_GetQualName,3.11,, +func,PyType_GetSlot,3.4,, +func,PyType_GetTypeDataSize,3.12,, +func,PyType_IsSubtype,3.2,, +func,PyType_Modified,3.2,, +func,PyType_Ready,3.2,, type,PyType_Slot,3.2,,full-abi type,PyType_Spec,3.2,,full-abi -var,PyType_Type,3.2,, -function,PyUnicodeDecodeError_Create,3.2,, -function,PyUnicodeDecodeError_GetEncoding,3.2,, -function,PyUnicodeDecodeError_GetEnd,3.2,, -function,PyUnicodeDecodeError_GetObject,3.2,, -function,PyUnicodeDecodeError_GetReason,3.2,, -function,PyUnicodeDecodeError_GetStart,3.2,, -function,PyUnicodeDecodeError_SetEnd,3.2,, -function,PyUnicodeDecodeError_SetReason,3.2,, -function,PyUnicodeDecodeError_SetStart,3.2,, -function,PyUnicodeEncodeError_GetEncoding,3.2,, -function,PyUnicodeEncodeError_GetEnd,3.2,, -function,PyUnicodeEncodeError_GetObject,3.2,, -function,PyUnicodeEncodeError_GetReason,3.2,, -function,PyUnicodeEncodeError_GetStart,3.2,, -function,PyUnicodeEncodeError_SetEnd,3.2,, -function,PyUnicodeEncodeError_SetReason,3.2,, -function,PyUnicodeEncodeError_SetStart,3.2,, -var,PyUnicodeIter_Type,3.2,, -function,PyUnicodeTranslateError_GetEnd,3.2,, -function,PyUnicodeTranslateError_GetObject,3.2,, -function,PyUnicodeTranslateError_GetReason,3.2,, -function,PyUnicodeTranslateError_GetStart,3.2,, -function,PyUnicodeTranslateError_SetEnd,3.2,, -function,PyUnicodeTranslateError_SetReason,3.2,, -function,PyUnicodeTranslateError_SetStart,3.2,, -function,PyUnicode_Append,3.2,, -function,PyUnicode_AppendAndDel,3.2,, -function,PyUnicode_AsASCIIString,3.2,, -function,PyUnicode_AsCharmapString,3.2,, -function,PyUnicode_AsDecodedObject,3.2,, -function,PyUnicode_AsDecodedUnicode,3.2,, -function,PyUnicode_AsEncodedObject,3.2,, -function,PyUnicode_AsEncodedString,3.2,, -function,PyUnicode_AsEncodedUnicode,3.2,, -function,PyUnicode_AsLatin1String,3.2,, -function,PyUnicode_AsMBCSString,3.7,on Windows, -function,PyUnicode_AsRawUnicodeEscapeString,3.2,, -function,PyUnicode_AsUCS4,3.7,, -function,PyUnicode_AsUCS4Copy,3.7,, -function,PyUnicode_AsUTF16String,3.2,, -function,PyUnicode_AsUTF32String,3.2,, -function,PyUnicode_AsUTF8AndSize,3.10,, -function,PyUnicode_AsUTF8String,3.2,, -function,PyUnicode_AsUnicodeEscapeString,3.2,, -function,PyUnicode_AsWideChar,3.2,, -function,PyUnicode_AsWideCharString,3.7,, -function,PyUnicode_BuildEncodingMap,3.2,, -function,PyUnicode_Compare,3.2,, -function,PyUnicode_CompareWithASCIIString,3.2,, -function,PyUnicode_Concat,3.2,, -function,PyUnicode_Contains,3.2,, -function,PyUnicode_Count,3.2,, -function,PyUnicode_Decode,3.2,, -function,PyUnicode_DecodeASCII,3.2,, -function,PyUnicode_DecodeCharmap,3.2,, -function,PyUnicode_DecodeCodePageStateful,3.7,on Windows, -function,PyUnicode_DecodeFSDefault,3.2,, -function,PyUnicode_DecodeFSDefaultAndSize,3.2,, -function,PyUnicode_DecodeLatin1,3.2,, -function,PyUnicode_DecodeLocale,3.7,, -function,PyUnicode_DecodeLocaleAndSize,3.7,, -function,PyUnicode_DecodeMBCS,3.7,on Windows, -function,PyUnicode_DecodeMBCSStateful,3.7,on Windows, -function,PyUnicode_DecodeRawUnicodeEscape,3.2,, -function,PyUnicode_DecodeUTF16,3.2,, -function,PyUnicode_DecodeUTF16Stateful,3.2,, -function,PyUnicode_DecodeUTF32,3.2,, -function,PyUnicode_DecodeUTF32Stateful,3.2,, -function,PyUnicode_DecodeUTF7,3.2,, -function,PyUnicode_DecodeUTF7Stateful,3.2,, -function,PyUnicode_DecodeUTF8,3.2,, -function,PyUnicode_DecodeUTF8Stateful,3.2,, -function,PyUnicode_DecodeUnicodeEscape,3.2,, -function,PyUnicode_EncodeCodePage,3.7,on Windows, -function,PyUnicode_EncodeFSDefault,3.2,, -function,PyUnicode_EncodeLocale,3.7,, -function,PyUnicode_EqualToUTF8,3.13,, -function,PyUnicode_EqualToUTF8AndSize,3.13,, -function,PyUnicode_FSConverter,3.2,, -function,PyUnicode_FSDecoder,3.2,, -function,PyUnicode_Find,3.2,, -function,PyUnicode_FindChar,3.7,, -function,PyUnicode_Format,3.2,, -function,PyUnicode_FromEncodedObject,3.2,, -function,PyUnicode_FromFormat,3.2,, -function,PyUnicode_FromFormatV,3.2,, -function,PyUnicode_FromObject,3.2,, -function,PyUnicode_FromOrdinal,3.2,, -function,PyUnicode_FromString,3.2,, -function,PyUnicode_FromStringAndSize,3.2,, -function,PyUnicode_FromWideChar,3.2,, -function,PyUnicode_GetDefaultEncoding,3.2,, -function,PyUnicode_GetLength,3.7,, -function,PyUnicode_InternFromString,3.2,, -function,PyUnicode_InternInPlace,3.2,, -function,PyUnicode_IsIdentifier,3.2,, -function,PyUnicode_Join,3.2,, -function,PyUnicode_Partition,3.2,, -function,PyUnicode_RPartition,3.2,, -function,PyUnicode_RSplit,3.2,, -function,PyUnicode_ReadChar,3.7,, -function,PyUnicode_Replace,3.2,, -function,PyUnicode_Resize,3.2,, -function,PyUnicode_RichCompare,3.2,, -function,PyUnicode_Split,3.2,, -function,PyUnicode_Splitlines,3.2,, -function,PyUnicode_Substring,3.7,, -function,PyUnicode_Tailmatch,3.2,, -function,PyUnicode_Translate,3.2,, -var,PyUnicode_Type,3.2,, -function,PyUnicode_WriteChar,3.7,, +data,PyType_Type,3.2,, +func,PyUnicodeDecodeError_Create,3.2,, +func,PyUnicodeDecodeError_GetEncoding,3.2,, +func,PyUnicodeDecodeError_GetEnd,3.2,, +func,PyUnicodeDecodeError_GetObject,3.2,, +func,PyUnicodeDecodeError_GetReason,3.2,, +func,PyUnicodeDecodeError_GetStart,3.2,, +func,PyUnicodeDecodeError_SetEnd,3.2,, +func,PyUnicodeDecodeError_SetReason,3.2,, +func,PyUnicodeDecodeError_SetStart,3.2,, +func,PyUnicodeEncodeError_GetEncoding,3.2,, +func,PyUnicodeEncodeError_GetEnd,3.2,, +func,PyUnicodeEncodeError_GetObject,3.2,, +func,PyUnicodeEncodeError_GetReason,3.2,, +func,PyUnicodeEncodeError_GetStart,3.2,, +func,PyUnicodeEncodeError_SetEnd,3.2,, +func,PyUnicodeEncodeError_SetReason,3.2,, +func,PyUnicodeEncodeError_SetStart,3.2,, +data,PyUnicodeIter_Type,3.2,, +func,PyUnicodeTranslateError_GetEnd,3.2,, +func,PyUnicodeTranslateError_GetObject,3.2,, +func,PyUnicodeTranslateError_GetReason,3.2,, +func,PyUnicodeTranslateError_GetStart,3.2,, +func,PyUnicodeTranslateError_SetEnd,3.2,, +func,PyUnicodeTranslateError_SetReason,3.2,, +func,PyUnicodeTranslateError_SetStart,3.2,, +func,PyUnicode_Append,3.2,, +func,PyUnicode_AppendAndDel,3.2,, +func,PyUnicode_AsASCIIString,3.2,, +func,PyUnicode_AsCharmapString,3.2,, +func,PyUnicode_AsDecodedObject,3.2,, +func,PyUnicode_AsDecodedUnicode,3.2,, +func,PyUnicode_AsEncodedObject,3.2,, +func,PyUnicode_AsEncodedString,3.2,, +func,PyUnicode_AsEncodedUnicode,3.2,, +func,PyUnicode_AsLatin1String,3.2,, +func,PyUnicode_AsMBCSString,3.7,on Windows, +func,PyUnicode_AsRawUnicodeEscapeString,3.2,, +func,PyUnicode_AsUCS4,3.7,, +func,PyUnicode_AsUCS4Copy,3.7,, +func,PyUnicode_AsUTF16String,3.2,, +func,PyUnicode_AsUTF32String,3.2,, +func,PyUnicode_AsUTF8AndSize,3.10,, +func,PyUnicode_AsUTF8String,3.2,, +func,PyUnicode_AsUnicodeEscapeString,3.2,, +func,PyUnicode_AsWideChar,3.2,, +func,PyUnicode_AsWideCharString,3.7,, +func,PyUnicode_BuildEncodingMap,3.2,, +func,PyUnicode_Compare,3.2,, +func,PyUnicode_CompareWithASCIIString,3.2,, +func,PyUnicode_Concat,3.2,, +func,PyUnicode_Contains,3.2,, +func,PyUnicode_Count,3.2,, +func,PyUnicode_Decode,3.2,, +func,PyUnicode_DecodeASCII,3.2,, +func,PyUnicode_DecodeCharmap,3.2,, +func,PyUnicode_DecodeCodePageStateful,3.7,on Windows, +func,PyUnicode_DecodeFSDefault,3.2,, +func,PyUnicode_DecodeFSDefaultAndSize,3.2,, +func,PyUnicode_DecodeLatin1,3.2,, +func,PyUnicode_DecodeLocale,3.7,, +func,PyUnicode_DecodeLocaleAndSize,3.7,, +func,PyUnicode_DecodeMBCS,3.7,on Windows, +func,PyUnicode_DecodeMBCSStateful,3.7,on Windows, +func,PyUnicode_DecodeRawUnicodeEscape,3.2,, +func,PyUnicode_DecodeUTF16,3.2,, +func,PyUnicode_DecodeUTF16Stateful,3.2,, +func,PyUnicode_DecodeUTF32,3.2,, +func,PyUnicode_DecodeUTF32Stateful,3.2,, +func,PyUnicode_DecodeUTF7,3.2,, +func,PyUnicode_DecodeUTF7Stateful,3.2,, +func,PyUnicode_DecodeUTF8,3.2,, +func,PyUnicode_DecodeUTF8Stateful,3.2,, +func,PyUnicode_DecodeUnicodeEscape,3.2,, +func,PyUnicode_EncodeCodePage,3.7,on Windows, +func,PyUnicode_EncodeFSDefault,3.2,, +func,PyUnicode_EncodeLocale,3.7,, +func,PyUnicode_EqualToUTF8,3.13,, +func,PyUnicode_EqualToUTF8AndSize,3.13,, +func,PyUnicode_FSConverter,3.2,, +func,PyUnicode_FSDecoder,3.2,, +func,PyUnicode_Find,3.2,, +func,PyUnicode_FindChar,3.7,, +func,PyUnicode_Format,3.2,, +func,PyUnicode_FromEncodedObject,3.2,, +func,PyUnicode_FromFormat,3.2,, +func,PyUnicode_FromFormatV,3.2,, +func,PyUnicode_FromObject,3.2,, +func,PyUnicode_FromOrdinal,3.2,, +func,PyUnicode_FromString,3.2,, +func,PyUnicode_FromStringAndSize,3.2,, +func,PyUnicode_FromWideChar,3.2,, +func,PyUnicode_GetDefaultEncoding,3.2,, +func,PyUnicode_GetLength,3.7,, +func,PyUnicode_InternFromString,3.2,, +func,PyUnicode_InternInPlace,3.2,, +func,PyUnicode_IsIdentifier,3.2,, +func,PyUnicode_Join,3.2,, +func,PyUnicode_Partition,3.2,, +func,PyUnicode_RPartition,3.2,, +func,PyUnicode_RSplit,3.2,, +func,PyUnicode_ReadChar,3.7,, +func,PyUnicode_Replace,3.2,, +func,PyUnicode_Resize,3.2,, +func,PyUnicode_RichCompare,3.2,, +func,PyUnicode_Split,3.2,, +func,PyUnicode_Splitlines,3.2,, +func,PyUnicode_Substring,3.7,, +func,PyUnicode_Tailmatch,3.2,, +func,PyUnicode_Translate,3.2,, +data,PyUnicode_Type,3.2,, +func,PyUnicode_WriteChar,3.7,, type,PyVarObject,3.2,,members member,PyVarObject.ob_base,3.2,, member,PyVarObject.ob_size,3.2,, -function,PyVectorcall_Call,3.12,, -function,PyVectorcall_NARGS,3.12,, +func,PyVectorcall_Call,3.12,, +func,PyVectorcall_NARGS,3.12,, type,PyWeakReference,3.2,,opaque -function,PyWeakref_GetObject,3.2,, -function,PyWeakref_GetRef,3.13,, -function,PyWeakref_NewProxy,3.2,, -function,PyWeakref_NewRef,3.2,, -var,PyWrapperDescr_Type,3.2,, -function,PyWrapper_New,3.2,, -var,PyZip_Type,3.2,, -function,Py_AddPendingCall,3.2,, -function,Py_AtExit,3.2,, +func,PyWeakref_GetObject,3.2,, +func,PyWeakref_GetRef,3.13,, +func,PyWeakref_NewProxy,3.2,, +func,PyWeakref_NewRef,3.2,, +data,PyWrapperDescr_Type,3.2,, +func,PyWrapper_New,3.2,, +data,PyZip_Type,3.2,, +func,Py_AddPendingCall,3.2,, +func,Py_AtExit,3.2,, macro,Py_BEGIN_ALLOW_THREADS,3.2,, macro,Py_BLOCK_THREADS,3.2,, -function,Py_BuildValue,3.2,, -function,Py_BytesMain,3.8,, -function,Py_CompileString,3.2,, -function,Py_DecRef,3.2,, -function,Py_DecodeLocale,3.7,, +func,Py_BuildValue,3.2,, +func,Py_BytesMain,3.8,, +func,Py_CompileString,3.2,, +func,Py_DecRef,3.2,, +func,Py_DecodeLocale,3.7,, macro,Py_END_ALLOW_THREADS,3.2,, -function,Py_EncodeLocale,3.7,, -function,Py_EndInterpreter,3.2,, -function,Py_EnterRecursiveCall,3.9,, -function,Py_Exit,3.2,, -function,Py_FatalError,3.2,, -var,Py_FileSystemDefaultEncodeErrors,3.10,, -var,Py_FileSystemDefaultEncoding,3.2,, -function,Py_Finalize,3.2,, -function,Py_FinalizeEx,3.6,, -function,Py_GenericAlias,3.9,, -var,Py_GenericAliasType,3.9,, -function,Py_GetBuildInfo,3.2,, -function,Py_GetCompiler,3.2,, -function,Py_GetConstant,3.13,, -function,Py_GetConstantBorrowed,3.13,, -function,Py_GetCopyright,3.2,, -function,Py_GetExecPrefix,3.2,, -function,Py_GetPath,3.2,, -function,Py_GetPlatform,3.2,, -function,Py_GetPrefix,3.2,, -function,Py_GetProgramFullPath,3.2,, -function,Py_GetProgramName,3.2,, -function,Py_GetPythonHome,3.2,, -function,Py_GetRecursionLimit,3.2,, -function,Py_GetVersion,3.2,, -var,Py_HasFileSystemDefaultEncoding,3.2,, -function,Py_IncRef,3.2,, -function,Py_Initialize,3.2,, -function,Py_InitializeEx,3.2,, -function,Py_Is,3.10,, -function,Py_IsFalse,3.10,, -function,Py_IsFinalizing,3.13,, -function,Py_IsInitialized,3.2,, -function,Py_IsNone,3.10,, -function,Py_IsTrue,3.10,, -function,Py_LeaveRecursiveCall,3.9,, -function,Py_Main,3.2,, -function,Py_MakePendingCalls,3.2,, -function,Py_NewInterpreter,3.2,, -function,Py_NewRef,3.10,, -function,Py_ReprEnter,3.2,, -function,Py_ReprLeave,3.2,, -function,Py_SetProgramName,3.2,, -function,Py_SetPythonHome,3.2,, -function,Py_SetRecursionLimit,3.2,, +func,Py_EncodeLocale,3.7,, +func,Py_EndInterpreter,3.2,, +func,Py_EnterRecursiveCall,3.9,, +func,Py_Exit,3.2,, +func,Py_FatalError,3.2,, +data,Py_FileSystemDefaultEncodeErrors,3.10,, +data,Py_FileSystemDefaultEncoding,3.2,, +func,Py_Finalize,3.2,, +func,Py_FinalizeEx,3.6,, +func,Py_GenericAlias,3.9,, +data,Py_GenericAliasType,3.9,, +func,Py_GetBuildInfo,3.2,, +func,Py_GetCompiler,3.2,, +func,Py_GetConstant,3.13,, +func,Py_GetConstantBorrowed,3.13,, +func,Py_GetCopyright,3.2,, +func,Py_GetExecPrefix,3.2,, +func,Py_GetPath,3.2,, +func,Py_GetPlatform,3.2,, +func,Py_GetPrefix,3.2,, +func,Py_GetProgramFullPath,3.2,, +func,Py_GetProgramName,3.2,, +func,Py_GetPythonHome,3.2,, +func,Py_GetRecursionLimit,3.2,, +func,Py_GetVersion,3.2,, +data,Py_HasFileSystemDefaultEncoding,3.2,, +func,Py_IncRef,3.2,, +func,Py_Initialize,3.2,, +func,Py_InitializeEx,3.2,, +func,Py_Is,3.10,, +func,Py_IsFalse,3.10,, +func,Py_IsFinalizing,3.13,, +func,Py_IsInitialized,3.2,, +func,Py_IsNone,3.10,, +func,Py_IsTrue,3.10,, +func,Py_LeaveRecursiveCall,3.9,, +func,Py_Main,3.2,, +func,Py_MakePendingCalls,3.2,, +func,Py_NewInterpreter,3.2,, +func,Py_NewRef,3.10,, +func,Py_ReprEnter,3.2,, +func,Py_ReprLeave,3.2,, +func,Py_SetProgramName,3.2,, +func,Py_SetPythonHome,3.2,, +func,Py_SetRecursionLimit,3.2,, type,Py_UCS4,3.2,, macro,Py_UNBLOCK_THREADS,3.2,, -var,Py_UTF8Mode,3.8,, -function,Py_VaBuildValue,3.2,, -var,Py_Version,3.11,, -function,Py_XNewRef,3.10,, +data,Py_UTF8Mode,3.8,, +func,Py_VaBuildValue,3.2,, +data,Py_Version,3.11,, +func,Py_XNewRef,3.10,, type,Py_buffer,3.11,,full-abi type,Py_intptr_t,3.2,, type,Py_ssize_t,3.2,, diff --git a/Doc/deprecations/c-api-pending-removal-in-3.14.rst b/Doc/deprecations/c-api-pending-removal-in-3.14.rst new file mode 100644 index 00000000000..d16da66c29a --- /dev/null +++ b/Doc/deprecations/c-api-pending-removal-in-3.14.rst @@ -0,0 +1,72 @@ +Pending Removal in Python 3.14 +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* The ``ma_version_tag`` field in :c:type:`PyDictObject` for extension modules + (:pep:`699`; :gh:`101193`). + +* Creating :c:data:`immutable types ` with mutable + bases (:gh:`95388`). + +* Functions to configure Python's initialization, deprecated in Python 3.11: + + * :c:func:`!PySys_SetArgvEx()`: + Set :c:member:`PyConfig.argv` instead. + * :c:func:`!PySys_SetArgv()`: + Set :c:member:`PyConfig.argv` instead. + * :c:func:`!Py_SetProgramName()`: + Set :c:member:`PyConfig.program_name` instead. + * :c:func:`!Py_SetPythonHome()`: + Set :c:member:`PyConfig.home` instead. + + The :c:func:`Py_InitializeFromConfig` API should be used with + :c:type:`PyConfig` instead. + +* Global configuration variables: + + * :c:var:`Py_DebugFlag`: + Use :c:member:`PyConfig.parser_debug` instead. + * :c:var:`Py_VerboseFlag`: + Use :c:member:`PyConfig.verbose` instead. + * :c:var:`Py_QuietFlag`: + Use :c:member:`PyConfig.quiet` instead. + * :c:var:`Py_InteractiveFlag`: + Use :c:member:`PyConfig.interactive` instead. + * :c:var:`Py_InspectFlag`: + Use :c:member:`PyConfig.inspect` instead. + * :c:var:`Py_OptimizeFlag`: + Use :c:member:`PyConfig.optimization_level` instead. + * :c:var:`Py_NoSiteFlag`: + Use :c:member:`PyConfig.site_import` instead. + * :c:var:`Py_BytesWarningFlag`: + Use :c:member:`PyConfig.bytes_warning` instead. + * :c:var:`Py_FrozenFlag`: + Use :c:member:`PyConfig.pathconfig_warnings` instead. + * :c:var:`Py_IgnoreEnvironmentFlag`: + Use :c:member:`PyConfig.use_environment` instead. + * :c:var:`Py_DontWriteBytecodeFlag`: + Use :c:member:`PyConfig.write_bytecode` instead. + * :c:var:`Py_NoUserSiteDirectory`: + Use :c:member:`PyConfig.user_site_directory` instead. + * :c:var:`Py_UnbufferedStdioFlag`: + Use :c:member:`PyConfig.buffered_stdio` instead. + * :c:var:`Py_HashRandomizationFlag`: + Use :c:member:`PyConfig.use_hash_seed` + and :c:member:`PyConfig.hash_seed` instead. + * :c:var:`Py_IsolatedFlag`: + Use :c:member:`PyConfig.isolated` instead. + * :c:var:`Py_LegacyWindowsFSEncodingFlag`: + Use :c:member:`PyPreConfig.legacy_windows_fs_encoding` instead. + * :c:var:`Py_LegacyWindowsStdioFlag`: + Use :c:member:`PyConfig.legacy_windows_stdio` instead. + * :c:var:`!Py_FileSystemDefaultEncoding`: + Use :c:member:`PyConfig.filesystem_encoding` instead. + * :c:var:`!Py_HasFileSystemDefaultEncoding`: + Use :c:member:`PyConfig.filesystem_encoding` instead. + * :c:var:`!Py_FileSystemDefaultEncodeErrors`: + Use :c:member:`PyConfig.filesystem_errors` instead. + * :c:var:`!Py_UTF8Mode`: + Use :c:member:`PyPreConfig.utf8_mode` instead. + (see :c:func:`Py_PreInitialize`) + + The :c:func:`Py_InitializeFromConfig` API should be used with + :c:type:`PyConfig` instead. diff --git a/Doc/deprecations/c-api-pending-removal-in-3.15.rst b/Doc/deprecations/c-api-pending-removal-in-3.15.rst new file mode 100644 index 00000000000..e3974415e0c --- /dev/null +++ b/Doc/deprecations/c-api-pending-removal-in-3.15.rst @@ -0,0 +1,27 @@ +Pending Removal in Python 3.15 +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* The bundled copy of ``libmpdecimal``. +* The :c:func:`PyImport_ImportModuleNoBlock`: + Use :c:func:`PyImport_ImportModule` instead. +* :c:func:`PyWeakref_GetObject` and :c:func:`PyWeakref_GET_OBJECT`: + Use :c:func:`PyWeakref_GetRef` instead. +* :c:type:`Py_UNICODE` type and the :c:macro:`!Py_UNICODE_WIDE` macro: + Use :c:type:`wchar_t` instead. +* Python initialization functions: + + * :c:func:`PySys_ResetWarnOptions`: + Clear :data:`sys.warnoptions` and :data:`!warnings.filters` instead. + * :c:func:`Py_GetExecPrefix`: + Get :data:`sys.exec_prefix` instead. + * :c:func:`Py_GetPath`: + Get :data:`sys.path` instead. + * :c:func:`Py_GetPrefix`: + Get :data:`sys.prefix` instead. + * :c:func:`Py_GetProgramFullPath`: + Get :data:`sys.executable` instead. + * :c:func:`Py_GetProgramName`: + Get :data:`sys.executable` instead. + * :c:func:`Py_GetPythonHome`: + Get :c:member:`PyConfig.home` + or the :envvar:`PYTHONHOME` environment variable instead. diff --git a/Doc/deprecations/c-api-pending-removal-in-future.rst b/Doc/deprecations/c-api-pending-removal-in-future.rst new file mode 100644 index 00000000000..0c3ae52b87f --- /dev/null +++ b/Doc/deprecations/c-api-pending-removal-in-future.rst @@ -0,0 +1,51 @@ +Pending Removal in Future Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The following APIs are deprecated and will be removed, +although there is currently no date scheduled for their removal. + +* :c:macro:`Py_TPFLAGS_HAVE_FINALIZE`: + Unneeded since Python 3.8. +* :c:func:`PyErr_Fetch`: + Use :c:func:`PyErr_GetRaisedException` instead. +* :c:func:`PyErr_NormalizeException`: + Use :c:func:`PyErr_GetRaisedException` instead. +* :c:func:`PyErr_Restore`: + Use :c:func:`PyErr_SetRaisedException` instead. +* :c:func:`PyModule_GetFilename`: + Use :c:func:`PyModule_GetFilenameObject` instead. +* :c:func:`PyOS_AfterFork`: + Use :c:func:`PyOS_AfterFork_Child` instead. +* :c:func:`PySlice_GetIndicesEx`: + Use :c:func:`PySlice_Unpack` and :c:func:`PySlice_AdjustIndices` instead. +* :c:func:`!PyUnicode_AsDecodedObject`: + Use :c:func:`PyCodec_Decode` instead. +* :c:func:`!PyUnicode_AsDecodedUnicode`: + Use :c:func:`PyCodec_Decode` instead. +* :c:func:`!PyUnicode_AsEncodedObject`: + Use :c:func:`PyCodec_Encode` instead. +* :c:func:`!PyUnicode_AsEncodedUnicode`: + Use :c:func:`PyCodec_Encode` instead. +* :c:func:`PyUnicode_READY`: + Unneeded since Python 3.12 +* :c:func:`!PyErr_Display`: + Use :c:func:`PyErr_DisplayException` instead. +* :c:func:`!_PyErr_ChainExceptions`: + Use :c:func:`!_PyErr_ChainExceptions1` instead. +* :c:member:`!PyBytesObject.ob_shash` member: + call :c:func:`PyObject_Hash` instead. +* :c:member:`!PyDictObject.ma_version_tag` member. +* Thread Local Storage (TLS) API: + + * :c:func:`PyThread_create_key`: + Use :c:func:`PyThread_tss_alloc` instead. + * :c:func:`PyThread_delete_key`: + Use :c:func:`PyThread_tss_free` instead. + * :c:func:`PyThread_set_key_value`: + Use :c:func:`PyThread_tss_set` instead. + * :c:func:`PyThread_get_key_value`: + Use :c:func:`PyThread_tss_get` instead. + * :c:func:`PyThread_delete_key_value`: + Use :c:func:`PyThread_tss_delete` instead. + * :c:func:`PyThread_ReInitTLS`: + Unneeded since Python 3.7. diff --git a/Doc/deprecations/index.rst b/Doc/deprecations/index.rst new file mode 100644 index 00000000000..5aa7062aca2 --- /dev/null +++ b/Doc/deprecations/index.rst @@ -0,0 +1,19 @@ +Deprecations +============ + +.. include:: pending-removal-in-3.14.rst + +.. include:: pending-removal-in-3.15.rst + +.. include:: pending-removal-in-3.16.rst + +.. include:: pending-removal-in-future.rst + +C API Deprecations +------------------ + +.. include:: c-api-pending-removal-in-3.14.rst + +.. include:: c-api-pending-removal-in-3.15.rst + +.. include:: c-api-pending-removal-in-future.rst diff --git a/Doc/deprecations/pending-removal-in-3.13.rst b/Doc/deprecations/pending-removal-in-3.13.rst new file mode 100644 index 00000000000..89790497816 --- /dev/null +++ b/Doc/deprecations/pending-removal-in-3.13.rst @@ -0,0 +1,52 @@ +Pending Removal in Python 3.13 +------------------------------ + +Modules (see :pep:`594`): + +* :mod:`!aifc` +* :mod:`!audioop` +* :mod:`!cgi` +* :mod:`!cgitb` +* :mod:`!chunk` +* :mod:`!crypt` +* :mod:`!imghdr` +* :mod:`!mailcap` +* :mod:`!msilib` +* :mod:`!nis` +* :mod:`!nntplib` +* :mod:`!ossaudiodev` +* :mod:`!pipes` +* :mod:`!sndhdr` +* :mod:`!spwd` +* :mod:`!sunau` +* :mod:`!telnetlib` +* :mod:`!uu` +* :mod:`!xdrlib` + +Other modules: + +* :mod:`!lib2to3`, and the :program:`2to3` program (:gh:`84540`) + +APIs: + +* :class:`!configparser.LegacyInterpolation` (:gh:`90765`) +* ``locale.resetlocale()`` (:gh:`90817`) +* :meth:`!turtle.RawTurtle.settiltangle` (:gh:`50096`) +* :func:`!unittest.findTestCases` (:gh:`50096`) +* :func:`!unittest.getTestCaseNames` (:gh:`50096`) +* :func:`!unittest.makeSuite` (:gh:`50096`) +* :meth:`!unittest.TestProgram.usageExit` (:gh:`67048`) +* :class:`!webbrowser.MacOSX` (:gh:`86421`) +* :class:`classmethod` descriptor chaining (:gh:`89519`) +* :mod:`importlib.resources` deprecated methods: + + * ``contents()`` + * ``is_resource()`` + * ``open_binary()`` + * ``open_text()`` + * ``path()`` + * ``read_binary()`` + * ``read_text()`` + + Use :func:`importlib.resources.files` instead. Refer to `importlib-resources: Migrating from Legacy + `_ (:gh:`106531`) diff --git a/Doc/deprecations/pending-removal-in-3.14.rst b/Doc/deprecations/pending-removal-in-3.14.rst new file mode 100644 index 00000000000..330b426deac --- /dev/null +++ b/Doc/deprecations/pending-removal-in-3.14.rst @@ -0,0 +1,115 @@ +Pending Removal in Python 3.14 +------------------------------ + +* :mod:`argparse`: The *type*, *choices*, and *metavar* parameters + of :class:`!argparse.BooleanOptionalAction` are deprecated + and will be removed in 3.14. + (Contributed by Nikita Sobolev in :gh:`92248`.) + +* :mod:`ast`: The following features have been deprecated in documentation + since Python 3.8, now cause a :exc:`DeprecationWarning` to be emitted at + runtime when they are accessed or used, and will be removed in Python 3.14: + + * :class:`!ast.Num` + * :class:`!ast.Str` + * :class:`!ast.Bytes` + * :class:`!ast.NameConstant` + * :class:`!ast.Ellipsis` + + Use :class:`ast.Constant` instead. + (Contributed by Serhiy Storchaka in :gh:`90953`.) + +* :mod:`asyncio`: + + * The child watcher classes :class:`~asyncio.MultiLoopChildWatcher`, + :class:`~asyncio.FastChildWatcher`, :class:`~asyncio.AbstractChildWatcher` + and :class:`~asyncio.SafeChildWatcher` are deprecated and + will be removed in Python 3.14. + (Contributed by Kumar Aditya in :gh:`94597`.) + + * :func:`asyncio.set_child_watcher`, :func:`asyncio.get_child_watcher`, + :meth:`asyncio.AbstractEventLoopPolicy.set_child_watcher` and + :meth:`asyncio.AbstractEventLoopPolicy.get_child_watcher` are deprecated + and will be removed in Python 3.14. + (Contributed by Kumar Aditya in :gh:`94597`.) + + * The :meth:`~asyncio.get_event_loop` method of the + default event loop policy now emits a :exc:`DeprecationWarning` if there + is no current event loop set and it decides to create one. + (Contributed by Serhiy Storchaka and Guido van Rossum in :gh:`100160`.) + +* :mod:`collections.abc`: Deprecated :class:`~collections.abc.ByteString`. + Prefer :class:`!Sequence` or :class:`~collections.abc.Buffer`. + For use in typing, prefer a union, like ``bytes | bytearray``, + or :class:`collections.abc.Buffer`. + (Contributed by Shantanu Jain in :gh:`91896`.) + +* :mod:`email`: Deprecated the *isdst* parameter in :func:`email.utils.localtime`. + (Contributed by Alan Williams in :gh:`72346`.) + +* :mod:`importlib.abc` deprecated classes: + + * :class:`!importlib.abc.ResourceReader` + * :class:`!importlib.abc.Traversable` + * :class:`!importlib.abc.TraversableResources` + + Use :mod:`importlib.resources.abc` classes instead: + + * :class:`importlib.resources.abc.Traversable` + * :class:`importlib.resources.abc.TraversableResources` + + (Contributed by Jason R. Coombs and Hugo van Kemenade in :gh:`93963`.) + +* :mod:`itertools` had undocumented, inefficient, historically buggy, + and inconsistent support for copy, deepcopy, and pickle operations. + This will be removed in 3.14 for a significant reduction in code + volume and maintenance burden. + (Contributed by Raymond Hettinger in :gh:`101588`.) + +* :mod:`multiprocessing`: The default start method will change to a safer one on + Linux, BSDs, and other non-macOS POSIX platforms where ``'fork'`` is currently + the default (:gh:`84559`). Adding a runtime warning about this was deemed too + disruptive as the majority of code is not expected to care. Use the + :func:`~multiprocessing.get_context` or + :func:`~multiprocessing.set_start_method` APIs to explicitly specify when + your code *requires* ``'fork'``. See :ref:`multiprocessing-start-methods`. + +* :mod:`pathlib`: :meth:`~pathlib.PurePath.is_relative_to` and + :meth:`~pathlib.PurePath.relative_to`: passing additional arguments is + deprecated. + +* :mod:`pkgutil`: :func:`~pkgutil.find_loader` and :func:`~pkgutil.get_loader` + now raise :exc:`DeprecationWarning`; + use :func:`importlib.util.find_spec` instead. + (Contributed by Nikita Sobolev in :gh:`97850`.) + +* :mod:`pty`: + + * ``master_open()``: use :func:`pty.openpty`. + * ``slave_open()``: use :func:`pty.openpty`. + +* :mod:`sqlite3`: + + * :data:`~sqlite3.version` and :data:`~sqlite3.version_info`. + + * :meth:`~sqlite3.Cursor.execute` and :meth:`~sqlite3.Cursor.executemany` + if :ref:`named placeholders ` are used and + *parameters* is a sequence instead of a :class:`dict`. + + * date and datetime adapter, date and timestamp converter: + see the :mod:`sqlite3` documentation for suggested replacement recipes. + +* :class:`types.CodeType`: Accessing :attr:`~codeobject.co_lnotab` was + deprecated in :pep:`626` + since 3.10 and was planned to be removed in 3.12, + but it only got a proper :exc:`DeprecationWarning` in 3.12. + May be removed in 3.14. + (Contributed by Nikita Sobolev in :gh:`101866`.) + +* :mod:`typing`: :class:`~typing.ByteString`, deprecated since Python 3.9, + now causes a :exc:`DeprecationWarning` to be emitted when it is used. + +* :mod:`urllib`: + :class:`!urllib.parse.Quoter` is deprecated: it was not intended to be a + public API. + (Contributed by Gregory P. Smith in :gh:`88168`.) diff --git a/Doc/deprecations/pending-removal-in-3.15.rst b/Doc/deprecations/pending-removal-in-3.15.rst new file mode 100644 index 00000000000..b921b4f97d5 --- /dev/null +++ b/Doc/deprecations/pending-removal-in-3.15.rst @@ -0,0 +1,71 @@ +Pending Removal in Python 3.15 +------------------------------ + +* :mod:`ctypes`: + + * The undocumented :func:`!ctypes.SetPointerType` function + has been deprecated since Python 3.13. + +* :mod:`http.server`: + + * The obsolete and rarely used :class:`~http.server.CGIHTTPRequestHandler` + has been deprecated since Python 3.13. + No direct replacement exists. + *Anything* is better than CGI to interface + a web server with a request handler. + + * The :option:`!--cgi` flag to the :program:`python -m http.server` + command-line interface has been deprecated since Python 3.13. + +* :mod:`importlib`: ``__package__`` and ``__cached__`` will cease to be set or + taken into consideration by the import system (:gh:`97879`). + +* :class:`locale`: + + * The :func:`~locale.getdefaultlocale` function + has been deprecated since Python 3.11. + Its removal was originally planned for Python 3.13 (:gh:`90817`), + but has been postponed to Python 3.15. + Use :func:`~locale.getlocale`, :func:`~locale.setlocale`, + and :func:`~locale.getencoding` instead. + (Contributed by Hugo van Kemenade in :gh:`111187`.) + +* :mod:`pathlib`: + + * :meth:`.PurePath.is_reserved` + has been deprecated since Python 3.13. + Use :func:`os.path.isreserved` to detect reserved paths on Windows. + +* :mod:`platform`: + + * :func:`~platform.java_ver` has been deprecated since Python 3.13. + This function is only useful for Jython support, has a confusing API, + and is largely untested. + +* :mod:`threading`: + + * :func:`~threading.RLock` will take no arguments in Python 3.15. + Passing any arguments has been deprecated since Python 3.14, + as the Python version does not permit any arguments, + but the C version allows any number of positional or keyword arguments, + ignoring every argument. + +* :mod:`typing`: + + * The undocumented keyword argument syntax for creating + :class:`~typing.NamedTuple` classes + (e.g. ``Point = NamedTuple("Point", x=int, y=int)``) + has been deprecated since Python 3.13. + Use the class-based syntax or the functional syntax instead. + + * The :func:`typing.no_type_check_decorator` decorator function + has been deprecated since Python 3.13. + After eight years in the :mod:`typing` module, + it has yet to be supported by any major type checker. + +* :mod:`wave`: + + * The :meth:`~wave.Wave_read.getmark`, :meth:`!setmark`, + and :meth:`~wave.Wave_read.getmarkers` methods of + the :class:`~wave.Wave_read` and :class:`~wave.Wave_write` classes + have been deprecated since Python 3.13. diff --git a/Doc/deprecations/pending-removal-in-3.16.rst b/Doc/deprecations/pending-removal-in-3.16.rst new file mode 100644 index 00000000000..446cc63cb34 --- /dev/null +++ b/Doc/deprecations/pending-removal-in-3.16.rst @@ -0,0 +1,42 @@ +Pending Removal in Python 3.16 +------------------------------ + +* :mod:`builtins`: + + * Bitwise inversion on boolean types, ``~True`` or ``~False`` + has been deprecated since Python 3.12, + as it produces surprising and unintuitive results (``-2`` and ``-1``). + Use ``not x`` instead for the logical negation of a Boolean. + In the rare case that you need the bitwise inversion of + the underlying integer, convert to ``int`` explicitly (``~int(x)``). + +* :mod:`array`: + + * The ``'u'`` format code (:c:type:`wchar_t`) + has been deprecated in documentation since Python 3.3 + and at runtime since Python 3.13. + Use the ``'w'`` format code (:c:type:`Py_UCS4`) + for Unicode characters instead. + +* :mod:`shutil`: + + * The :class:`!ExecError` exception + has been deprecated since Python 3.14. + It has not been used by any function in :mod:`!shutil` since Python 3.4, + and is now an alias of :exc:`RuntimeError`. + +* :mod:`symtable`: + + * The :meth:`Class.get_methods ` method + has been deprecated since Python 3.14. + +* :mod:`sys`: + + * The :func:`~sys._enablelegacywindowsfsencoding` function + has been deprecated since Python 3.13. + Use the :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment variable instead. + +* :mod:`tarfile`: + + * The undocumented and unused :attr:`!TarFile.tarfile` attribute + has been deprecated since Python 3.13. diff --git a/Doc/deprecations/pending-removal-in-future.rst b/Doc/deprecations/pending-removal-in-future.rst new file mode 100644 index 00000000000..3f9cf6f2082 --- /dev/null +++ b/Doc/deprecations/pending-removal-in-future.rst @@ -0,0 +1,155 @@ +Pending Removal in Future Versions +---------------------------------- + +The following APIs will be removed in the future, +although there is currently no date scheduled for their removal. + +* :mod:`argparse`: Nesting argument groups and nesting mutually exclusive + groups are deprecated. + +* :mod:`array`'s ``'u'`` format code (:gh:`57281`) + +* :mod:`builtins`: + + * ``bool(NotImplemented)``. + * Generators: ``throw(type, exc, tb)`` and ``athrow(type, exc, tb)`` + signature is deprecated: use ``throw(exc)`` and ``athrow(exc)`` instead, + the single argument signature. + * Currently Python accepts numeric literals immediately followed by keywords, + for example ``0in x``, ``1or x``, ``0if 1else 2``. It allows confusing and + ambiguous expressions like ``[0x1for x in y]`` (which can be interpreted as + ``[0x1 for x in y]`` or ``[0x1f or x in y]``). A syntax warning is raised + if the numeric literal is immediately followed by one of keywords + :keyword:`and`, :keyword:`else`, :keyword:`for`, :keyword:`if`, + :keyword:`in`, :keyword:`is` and :keyword:`or`. In a future release it + will be changed to a syntax error. (:gh:`87999`) + * Support for ``__index__()`` and ``__int__()`` method returning non-int type: + these methods will be required to return an instance of a strict subclass of + :class:`int`. + * Support for ``__float__()`` method returning a strict subclass of + :class:`float`: these methods will be required to return an instance of + :class:`float`. + * Support for ``__complex__()`` method returning a strict subclass of + :class:`complex`: these methods will be required to return an instance of + :class:`complex`. + * Delegation of ``int()`` to ``__trunc__()`` method. + * Passing a complex number as the *real* or *imag* argument in the + :func:`complex` constructor is now deprecated; it should only be passed + as a single positional argument. + (Contributed by Serhiy Storchaka in :gh:`109218`.) + +* :mod:`calendar`: ``calendar.January`` and ``calendar.February`` constants are + deprecated and replaced by :data:`calendar.JANUARY` and + :data:`calendar.FEBRUARY`. + (Contributed by Prince Roshan in :gh:`103636`.) + +* :attr:`codeobject.co_lnotab`: use the :meth:`codeobject.co_lines` method + instead. + +* :mod:`datetime`: + + * :meth:`~datetime.datetime.utcnow`: + use ``datetime.datetime.now(tz=datetime.UTC)``. + * :meth:`~datetime.datetime.utcfromtimestamp`: + use ``datetime.datetime.fromtimestamp(timestamp, tz=datetime.UTC)``. + +* :mod:`gettext`: Plural value must be an integer. + +* :mod:`importlib`: + + * ``load_module()`` method: use ``exec_module()`` instead. + * :func:`~importlib.util.cache_from_source` *debug_override* parameter is + deprecated: use the *optimization* parameter instead. + +* :mod:`importlib.metadata`: + + * ``EntryPoints`` tuple interface. + * Implicit ``None`` on return values. + +* :mod:`logging`: the ``warn()`` method has been deprecated + since Python 3.3, use :meth:`~logging.warning` instead. + +* :mod:`mailbox`: Use of StringIO input and text mode is deprecated, use + BytesIO and binary mode instead. + +* :mod:`os`: Calling :func:`os.register_at_fork` in multi-threaded process. + +* :class:`!pydoc.ErrorDuringImport`: A tuple value for *exc_info* parameter is + deprecated, use an exception instance. + +* :mod:`re`: More strict rules are now applied for numerical group references + and group names in regular expressions. Only sequence of ASCII digits is now + accepted as a numerical reference. The group name in bytes patterns and + replacement strings can now only contain ASCII letters and digits and + underscore. + (Contributed by Serhiy Storchaka in :gh:`91760`.) + +* :mod:`!sre_compile`, :mod:`!sre_constants` and :mod:`!sre_parse` modules. + +* :mod:`shutil`: :func:`~shutil.rmtree`'s *onerror* parameter is deprecated in + Python 3.12; use the *onexc* parameter instead. + +* :mod:`ssl` options and protocols: + + * :class:`ssl.SSLContext` without protocol argument is deprecated. + * :class:`ssl.SSLContext`: :meth:`~ssl.SSLContext.set_npn_protocols` and + :meth:`!selected_npn_protocol` are deprecated: use ALPN + instead. + * ``ssl.OP_NO_SSL*`` options + * ``ssl.OP_NO_TLS*`` options + * ``ssl.PROTOCOL_SSLv3`` + * ``ssl.PROTOCOL_TLS`` + * ``ssl.PROTOCOL_TLSv1`` + * ``ssl.PROTOCOL_TLSv1_1`` + * ``ssl.PROTOCOL_TLSv1_2`` + * ``ssl.TLSVersion.SSLv3`` + * ``ssl.TLSVersion.TLSv1`` + * ``ssl.TLSVersion.TLSv1_1`` + +* :func:`sysconfig.is_python_build` *check_home* parameter is deprecated and + ignored. + +* :mod:`threading` methods: + + * :meth:`!threading.Condition.notifyAll`: use :meth:`~threading.Condition.notify_all`. + * :meth:`!threading.Event.isSet`: use :meth:`~threading.Event.is_set`. + * :meth:`!threading.Thread.isDaemon`, :meth:`threading.Thread.setDaemon`: + use :attr:`threading.Thread.daemon` attribute. + * :meth:`!threading.Thread.getName`, :meth:`threading.Thread.setName`: + use :attr:`threading.Thread.name` attribute. + * :meth:`!threading.currentThread`: use :meth:`threading.current_thread`. + * :meth:`!threading.activeCount`: use :meth:`threading.active_count`. + +* :class:`typing.Text` (:gh:`92332`). + +* :class:`unittest.IsolatedAsyncioTestCase`: it is deprecated to return a value + that is not ``None`` from a test case. + +* :mod:`urllib.parse` deprecated functions: :func:`~urllib.parse.urlparse` instead + + * ``splitattr()`` + * ``splithost()`` + * ``splitnport()`` + * ``splitpasswd()`` + * ``splitport()`` + * ``splitquery()`` + * ``splittag()`` + * ``splittype()`` + * ``splituser()`` + * ``splitvalue()`` + * ``to_bytes()`` + +* :mod:`urllib.request`: :class:`~urllib.request.URLopener` and + :class:`~urllib.request.FancyURLopener` style of invoking requests is + deprecated. Use newer :func:`~urllib.request.urlopen` functions and methods. + +* :mod:`wsgiref`: ``SimpleHandler.stdout.write()`` should not do partial + writes. + +* :mod:`xml.etree.ElementTree`: Testing the truth value of an + :class:`~xml.etree.ElementTree.Element` is deprecated. In a future release it + will always return ``True``. Prefer explicit ``len(elem)`` or + ``elem is not None`` tests instead. + +* :meth:`zipimport.zipimporter.load_module` is deprecated: + use :meth:`~zipimport.zipimporter.exec_module` instead. diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst index fd05c82b416..7f57a3a6aac 100644 --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -296,7 +296,7 @@ An interesting advantage of using the :c:member:`~PyTypeObject.tp_members` table descriptors that are used at runtime is that any attribute defined this way can have an associated doc string simply by providing the text in the table. An application can use the introspection API to retrieve the descriptor from the -class object, and get the doc string using its :attr:`!__doc__` attribute. +class object, and get the doc string using its :attr:`~type.__doc__` attribute. As with the :c:member:`~PyTypeObject.tp_methods` table, a sentinel entry with a :c:member:`~PyMethodDef.ml_name` value of ``NULL`` is required. diff --git a/Doc/extending/newtypes_tutorial.rst b/Doc/extending/newtypes_tutorial.rst index 7eba9759119..bcf938f117d 100644 --- a/Doc/extending/newtypes_tutorial.rst +++ b/Doc/extending/newtypes_tutorial.rst @@ -144,7 +144,7 @@ only used for variable-sized objects and should otherwise be zero. If you want your type to be subclassable from Python, and your type has the same :c:member:`~PyTypeObject.tp_basicsize` as its base type, you may have problems with multiple inheritance. A Python subclass of your type will have to list your type first - in its :attr:`~class.__bases__`, or else it will not be able to call your type's + in its :attr:`~type.__bases__`, or else it will not be able to call your type's :meth:`~object.__new__` method without getting an error. You can avoid this problem by ensuring that your type has a larger value for :c:member:`~PyTypeObject.tp_basicsize` than its base type does. Most of the time, this will be true anyway, because either your @@ -447,7 +447,7 @@ Further, the attributes can be deleted, setting the C pointers to ``NULL``. Eve though we can make sure the members are initialized to non-``NULL`` values, the members can be set to ``NULL`` if the attributes are deleted. -We define a single method, :meth:`!Custom.name()`, that outputs the objects name as the +We define a single method, :meth:`!Custom.name`, that outputs the objects name as the concatenation of the first and last names. :: static PyObject * diff --git a/Doc/faq/design.rst b/Doc/faq/design.rst index c8beb64e39b..e2710fab9cf 100644 --- a/Doc/faq/design.rst +++ b/Doc/faq/design.rst @@ -70,7 +70,7 @@ operations. This means that as far as floating-point operations are concerned, Python behaves like many popular languages including C and Java. Many numbers that can be written easily in decimal notation cannot be expressed -exactly in binary floating-point. For example, after:: +exactly in binary floating point. For example, after:: >>> x = 1.2 @@ -87,7 +87,7 @@ which is exactly:: The typical precision of 53 bits provides Python floats with 15--16 decimal digits of accuracy. -For a fuller explanation, please see the :ref:`floating point arithmetic +For a fuller explanation, please see the :ref:`floating-point arithmetic ` chapter in the Python tutorial. @@ -328,7 +328,7 @@ Can Python be compiled to machine code, C or some other language? ----------------------------------------------------------------- `Cython `_ compiles a modified version of Python with -optional annotations into C extensions. `Nuitka `_ is +optional annotations into C extensions. `Nuitka `_ is an up-and-coming compiler of Python into C++ code, aiming to support the full Python language. @@ -345,7 +345,7 @@ to perform a garbage collection, obtain debugging statistics, and tune the collector's parameters. Other implementations (such as `Jython `_ or -`PyPy `_), however, can rely on a different mechanism +`PyPy `_), however, can rely on a different mechanism such as a full-blown garbage collector. This difference can cause some subtle porting problems if your Python code depends on the behavior of the reference counting implementation. diff --git a/Doc/faq/extending.rst b/Doc/faq/extending.rst index 1cff2c4091d..3147fda7c37 100644 --- a/Doc/faq/extending.rst +++ b/Doc/faq/extending.rst @@ -246,13 +246,12 @@ Then, when you run GDB: I want to compile a Python module on my Linux system, but some files are missing. Why? -------------------------------------------------------------------------------------- -Most packaged versions of Python don't include the -:file:`/usr/lib/python2.{x}/config/` directory, which contains various files +Most packaged versions of Python omit some files required for compiling Python extensions. -For Red Hat, install the python-devel RPM to get the necessary files. +For Red Hat, install the python3-devel RPM to get the necessary files. -For Debian, run ``apt-get install python-dev``. +For Debian, run ``apt-get install python3-dev``. How do I tell "incomplete input" from "invalid input"? ------------------------------------------------------ diff --git a/Doc/faq/general.rst b/Doc/faq/general.rst index eb859c5d599..2a55f6b6fb6 100644 --- a/Doc/faq/general.rst +++ b/Doc/faq/general.rst @@ -309,10 +309,10 @@ guaranteed that interfaces will remain the same throughout a series of bugfix releases. The latest stable releases can always be found on the `Python download page -`_. There are two production-ready versions -of Python: 2.x and 3.x. The recommended version is 3.x, which is supported by -most widely used libraries. Although 2.x is still widely used, `it is not -maintained anymore `_. +`_. +Python 3.x is the recommended version and supported by most widely used libraries. +Python 2.x :pep:`is not maintained anymore <373>`. + How many people are using Python? --------------------------------- diff --git a/Doc/faq/library.rst b/Doc/faq/library.rst index b959cd73921..522923572bf 100644 --- a/Doc/faq/library.rst +++ b/Doc/faq/library.rst @@ -796,12 +796,12 @@ is simple:: import random random.random() -This returns a random floating point number in the range [0, 1). +This returns a random floating-point number in the range [0, 1). There are also many other specialized generators in this module, such as: * ``randrange(a, b)`` chooses an integer in the range [a, b). -* ``uniform(a, b)`` chooses a floating point number in the range [a, b). +* ``uniform(a, b)`` chooses a floating-point number in the range [a, b). * ``normalvariate(mean, sdev)`` samples the normal (Gaussian) distribution. Some higher-level functions operate on sequences directly, such as: diff --git a/Doc/faq/programming.rst b/Doc/faq/programming.rst index 0a88c5f6384..fa7b22bde1d 100644 --- a/Doc/faq/programming.rst +++ b/Doc/faq/programming.rst @@ -869,7 +869,7 @@ How do I convert a string to a number? -------------------------------------- For integers, use the built-in :func:`int` type constructor, e.g. ``int('144') -== 144``. Similarly, :func:`float` converts to floating-point, +== 144``. Similarly, :func:`float` converts to a floating-point number, e.g. ``float('144') == 144.0``. By default, these interpret the number as decimal, so that ``int('0144') == @@ -1013,7 +1013,7 @@ Not as such. For simple input parsing, the easiest approach is usually to split the line into whitespace-delimited words using the :meth:`~str.split` method of string objects and then convert decimal strings to numeric values using :func:`int` or -:func:`float`. :meth:`!split()` supports an optional "sep" parameter which is useful +:func:`float`. :meth:`!split` supports an optional "sep" parameter which is useful if the line uses something other than whitespace as a separator. For more complicated input parsing, regular expressions are more powerful @@ -1613,9 +1613,16 @@ method too, and it must do so carefully. The basic implementation of self.__dict__[name] = value ... -Most :meth:`!__setattr__` implementations must modify -:meth:`self.__dict__ ` to store -local state for self without causing an infinite recursion. +Many :meth:`~object.__setattr__` implementations call :meth:`!object.__setattr__` to set +an attribute on self without causing infinite recursion:: + + class X: + def __setattr__(self, name, value): + # Custom logic here... + object.__setattr__(self, name, value) + +Alternatively, it is possible to set attributes by inserting +entries into :attr:`self.__dict__ ` directly. How do I call a method defined in a base class from a derived class that extends it? @@ -1741,11 +1748,31 @@ but effective way to define class private variables. Any identifier of the form is textually replaced with ``_classname__spam``, where ``classname`` is the current class name with any leading underscores stripped. -This doesn't guarantee privacy: an outside user can still deliberately access -the "_classname__spam" attribute, and private values are visible in the object's -``__dict__``. Many Python programmers never bother to use private variable -names at all. +The identifier can be used unchanged within the class, but to access it outside +the class, the mangled name must be used: + +.. code-block:: python + + class A: + def __one(self): + return 1 + def two(self): + return 2 * self.__one() + + class B(A): + def three(self): + return 3 * self._A__one() + + four = 4 * A()._A__one() + +In particular, this does not guarantee privacy since an outside user can still +deliberately access the private attribute; many Python programmers never bother +to use private variable names at all. + +.. seealso:: + The :ref:`private name mangling specifications ` + for details and special cases. My class defines __del__ but it is not called when I delete the object. ----------------------------------------------------------------------- diff --git a/Doc/glossary.rst b/Doc/glossary.rst index bacbf66b433..17461e23e71 100644 --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -342,7 +342,7 @@ Glossary docstring A string literal which appears as the first expression in a class, function or module. While ignored when the suite is executed, it is - recognized by the compiler and put into the :attr:`!__doc__` attribute + recognized by the compiler and put into the :attr:`~definition.__doc__` attribute of the enclosing class, function or module. Since it is available via introspection, it is the canonical place for documentation of the object. @@ -590,14 +590,12 @@ Glossary which ships with the standard distribution of Python. immortal - If an object is immortal, its reference count is never modified, and - therefore it is never deallocated. + *Immortal objects* are a CPython implementation detail introduced + in :pep:`683`. - Built-in strings and singletons are immortal objects. For example, - :const:`True` and :const:`None` singletons are immmortal. - - See `PEP 683 – Immortal Objects, Using a Fixed Refcount - `_ for more information. + If an object is immortal, its :term:`reference count` is never modified, + and therefore it is never deallocated while the interpreter is running. + For example, :const:`True` and :const:`None` are immortal in CPython. immutable An object with a fixed value. Immutable objects include numbers, strings and @@ -1123,7 +1121,7 @@ Glossary :class:`tuple`, and :class:`bytes`. Note that :class:`dict` also supports :meth:`~object.__getitem__` and :meth:`!__len__`, but is considered a mapping rather than a sequence because the lookups use arbitrary - :term:`immutable` keys rather than integers. + :term:`hashable` keys rather than integers. The :class:`collections.abc.Sequence` abstract base class defines a much richer interface that goes beyond just @@ -1152,16 +1150,12 @@ Glossary (subscript) notation uses :class:`slice` objects internally. soft deprecated - A soft deprecation can be used when using an API which should no longer - be used to write new code, but it remains safe to continue using it in - existing code. The API remains documented and tested, but will not be - developed further (no enhancement). - - The main difference between a "soft" and a (regular) "hard" deprecation - is that the soft deprecation does not imply scheduling the removal of the - deprecated API. + A soft deprecated API should not be used in new code, + but it is safe for already existing code to use it. + The API remains documented and tested, but will not be enhanced further. - Another difference is that a soft deprecation does not issue a warning. + Soft deprecation, unlike normal deprecation, does not plan on removing the API + and will not emit warnings. See `PEP 387: Soft Deprecation `_. @@ -1233,7 +1227,7 @@ Glossary type The type of a Python object determines what kind of object it is; every object has a type. An object's type is accessible as its - :attr:`~instance.__class__` attribute or can be retrieved with + :attr:`~object.__class__` attribute or can be retrieved with ``type(obj)``. type alias diff --git a/Doc/howto/annotations.rst b/Doc/howto/annotations.rst index be8c7e6c827..174078b84aa 100644 --- a/Doc/howto/annotations.rst +++ b/Doc/howto/annotations.rst @@ -102,9 +102,9 @@ Your code will have to have a separate code path if the object you're examining is a class (``isinstance(o, type)``). In that case, best practice relies on an implementation detail of Python 3.9 and before: if a class has annotations defined, -they are stored in the class's ``__dict__`` dictionary. Since +they are stored in the class's :attr:`~type.__dict__` dictionary. Since the class may or may not have annotations defined, best practice -is to call the ``get`` method on the class dict. +is to call the :meth:`~dict.get` method on the class dict. To put it all together, here is some sample code that safely accesses the ``__annotations__`` attribute on an arbitrary @@ -121,8 +121,8 @@ the type of ``ann`` using :func:`isinstance` before further examination. Note that some exotic or malformed type objects may not have -a ``__dict__`` attribute, so for extra safety you may also wish -to use :func:`getattr` to access ``__dict__``. +a :attr:`~type.__dict__` attribute, so for extra safety you may also wish +to use :func:`getattr` to access :attr:`!__dict__`. Manually Un-Stringizing Stringized Annotations diff --git a/Doc/howto/argparse.rst b/Doc/howto/argparse.rst index ae5bab90bf8..30d9ac70037 100644 --- a/Doc/howto/argparse.rst +++ b/Doc/howto/argparse.rst @@ -444,7 +444,7 @@ And the output: options: -h, --help show this help message and exit - -v {0,1,2}, --verbosity {0,1,2} + -v, --verbosity {0,1,2} increase output verbosity Note that the change also reflects both in the error message as well as the diff --git a/Doc/howto/descriptor.rst b/Doc/howto/descriptor.rst index b29488be39a..c60cd638229 100644 --- a/Doc/howto/descriptor.rst +++ b/Doc/howto/descriptor.rst @@ -389,7 +389,9 @@ Here are three practical data validation utilities: def validate(self, value): if value not in self.options: - raise ValueError(f'Expected {value!r} to be one of {self.options!r}') + raise ValueError( + f'Expected {value!r} to be one of {self.options!r}' + ) class Number(Validator): @@ -469,6 +471,7 @@ The descriptors prevent invalid instances from being created: Traceback (most recent call last): ... ValueError: Expected -5 to be at least 0 + >>> Component('WIDGET', 'metal', 'V') # Blocked: 'V' isn't a number Traceback (most recent call last): ... @@ -513,7 +516,7 @@ were defined. Descriptors are a powerful, general purpose protocol. They are the mechanism behind properties, methods, static methods, class methods, and -:func:`super()`. They are used throughout Python itself. Descriptors +:func:`super`. They are used throughout Python itself. Descriptors simplify the underlying C code and offer a flexible set of new tools for everyday Python programs. @@ -559,8 +562,8 @@ attribute access. The expression ``obj.x`` looks up the attribute ``x`` in the chain of namespaces for ``obj``. If the search finds a descriptor outside of the -instance ``__dict__``, its :meth:`__get__` method is invoked according to the -precedence rules listed below. +instance :attr:`~object.__dict__`, its :meth:`~object.__get__` method is +invoked according to the precedence rules listed below. The details of invocation depend on whether ``obj`` is an object, class, or instance of super. @@ -803,7 +806,7 @@ The full C implementation can be found in :c:func:`!super_getattro` in Summary of invocation logic --------------------------- -The mechanism for descriptors is embedded in the :meth:`__getattribute__()` +The mechanism for descriptors is embedded in the :meth:`__getattribute__` methods for :class:`object`, :class:`type`, and :func:`super`. The important points to remember are: @@ -990,7 +993,7 @@ The documentation shows a typical use to define a managed attribute ``x``: AttributeError: 'C' object has no attribute '_C__x' To see how :func:`property` is implemented in terms of the descriptor protocol, -here is a pure Python equivalent: +here is a pure Python equivalent that implements most of the core functionality: .. testcode:: @@ -1004,59 +1007,35 @@ here is a pure Python equivalent: if doc is None and fget is not None: doc = fget.__doc__ self.__doc__ = doc - self._name = None def __set_name__(self, owner, name): - self._name = name - - @property - def __name__(self): - return self._name if self._name is not None else self.fget.__name__ - - @__name__.setter - def __name__(self, value): - self._name = value + self.__name__ = name def __get__(self, obj, objtype=None): if obj is None: return self if self.fget is None: - raise AttributeError( - f'property {self.__name__!r} of {type(obj).__name__!r} ' - 'object has no getter' - ) + raise AttributeError return self.fget(obj) def __set__(self, obj, value): if self.fset is None: - raise AttributeError( - f'property {self.__name__!r} of {type(obj).__name__!r} ' - 'object has no setter' - ) + raise AttributeError self.fset(obj, value) def __delete__(self, obj): if self.fdel is None: - raise AttributeError( - f'property {self.__name__!r} of {type(obj).__name__!r} ' - 'object has no deleter' - ) + raise AttributeError self.fdel(obj) def getter(self, fget): - prop = type(self)(fget, self.fset, self.fdel, self.__doc__) - prop._name = self._name - return prop + return type(self)(fget, self.fset, self.fdel, self.__doc__) def setter(self, fset): - prop = type(self)(self.fget, fset, self.fdel, self.__doc__) - prop._name = self._name - return prop + return type(self)(self.fget, fset, self.fdel, self.__doc__) def deleter(self, fdel): - prop = type(self)(self.fget, self.fset, fdel, self.__doc__) - prop._name = self._name - return prop + return type(self)(self.fget, self.fset, fdel, self.__doc__) .. testcode:: :hide: @@ -1119,23 +1098,23 @@ here is a pure Python equivalent: >>> try: ... cc.no_getter ... except AttributeError as e: - ... e.args[0] + ... type(e).__name__ ... - "property 'no_getter' of 'CC' object has no getter" + 'AttributeError' >>> try: ... cc.no_setter = 33 ... except AttributeError as e: - ... e.args[0] + ... type(e).__name__ ... - "property 'no_setter' of 'CC' object has no setter" + 'AttributeError' >>> try: ... del cc.no_deleter ... except AttributeError as e: - ... e.args[0] + ... type(e).__name__ ... - "property 'no_deleter' of 'CC' object has no deleter" + 'AttributeError' >>> CC.no_doc.__doc__ is None True @@ -1326,8 +1305,8 @@ mean, median, and other descriptive statistics that depend on the data. However, there may be useful functions which are conceptually related but do not depend on the data. For instance, ``erf(x)`` is handy conversion routine that comes up in statistical work but does not directly depend on a particular dataset. -It can be called either from an object or the class: ``s.erf(1.5) --> .9332`` or -``Sample.erf(1.5) --> .9332``. +It can be called either from an object or the class: ``s.erf(1.5) --> 0.9332`` +or ``Sample.erf(1.5) --> 0.9332``. Since static methods return the underlying function with no changes, the example calls are unexciting: diff --git a/Doc/howto/enum.rst b/Doc/howto/enum.rst index 18e13fcf9f5..66929b4104d 100644 --- a/Doc/howto/enum.rst +++ b/Doc/howto/enum.rst @@ -9,7 +9,7 @@ Enum HOWTO .. currentmodule:: enum An :class:`Enum` is a set of symbolic names bound to unique values. They are -similar to global variables, but they offer a more useful :func:`repr()`, +similar to global variables, but they offer a more useful :func:`repr`, grouping, type-safety, and a few other features. They are most useful when you have a variable that can take one of a limited @@ -167,7 +167,7 @@ And a function to display the chores for a given day:: answer SO questions In cases where the actual values of the members do not matter, you can save -yourself some work and use :func:`auto()` for the values:: +yourself some work and use :func:`auto` for the values:: >>> from enum import auto >>> class Weekday(Flag): @@ -608,7 +608,7 @@ The solution is to specify the module name explicitly as follows:: the source, pickling will be disabled. The new pickle protocol 4 also, in some circumstances, relies on -:attr:`~definition.__qualname__` being set to the location where pickle will be able +:attr:`~type.__qualname__` being set to the location where pickle will be able to find the class. For example, if the class was made available in class SomeData in the global scope:: diff --git a/Doc/howto/free-threading-extensions.rst b/Doc/howto/free-threading-extensions.rst index 1ba91b09516..6abe93d71ad 100644 --- a/Doc/howto/free-threading-extensions.rst +++ b/Doc/howto/free-threading-extensions.rst @@ -181,6 +181,8 @@ Some of these functions were added in Python 3.13. You can use the to provide implementations of these functions for older Python versions. +.. _free-threaded-memory-allocation: + Memory Allocation APIs ====================== @@ -188,7 +190,7 @@ Python's memory management C API provides functions in three different :ref:`allocation domains `: "raw", "mem", and "object". For thread-safety, the free-threaded build requires that only Python objects are allocated using the object domain, and that all Python object are -allocated using that domain. This differes from the prior Python versions, +allocated using that domain. This differs from the prior Python versions, where this was only a best practice and not a hard requirement. .. note:: @@ -270,3 +272,9 @@ Windows Due to a limitation of the official Windows installer, you will need to manually define ``Py_GIL_DISABLED=1`` when building extensions from source. + +.. seealso:: + + `Porting Extension Modules to Support Free-Threading + `_: + A community-maintained porting guide for extension authors. diff --git a/Doc/howto/free-threading-python.rst b/Doc/howto/free-threading-python.rst new file mode 100644 index 00000000000..b21e3287eca --- /dev/null +++ b/Doc/howto/free-threading-python.rst @@ -0,0 +1,154 @@ +.. _freethreading-python-howto: + +********************************************** +Python experimental support for free threading +********************************************** + +Starting with the 3.13 release, CPython has experimental support for a build of +Python called :term:`free threading` where the :term:`global interpreter lock` +(GIL) is disabled. Free-threaded execution allows for full utilization of the +available processing power by running threads in parallel on available CPU cores. +While not all software will benefit from this automatically, programs +designed with threading in mind will run faster on multi-core hardware. + +**The free-threaded mode is experimental** and work is ongoing to improve it: +expect some bugs and a substantial single-threaded performance hit. + +This document describes the implications of free threading +for Python code. See :ref:`freethreading-extensions-howto` for information on +how to write C extensions that support the free-threaded build. + +.. seealso:: + + :pep:`703` – Making the Global Interpreter Lock Optional in CPython for an + overall description of free-threaded Python. + + +Installation +============ + +Starting with Python 3.13, the official macOS and Windows installers +optionally support installing free-threaded Python binaries. The installers +are available at https://www.python.org/downloads/. + +For information on other platforms, see the `Installing a Free-Threaded Python +`_, a +community-maintained installation guide for installing free-threaded Python. + +When building CPython from source, the :option:`--disable-gil` configure option +should be used to build a free-threaded Python interpreter. + + +Identifying free-threaded Python +================================ + +To check if the current interpreter supports free-threading, :option:`python -VV <-V>` +and :attr:`sys.version` contain "experimental free-threading build". +The new :func:`sys._is_gil_enabled` function can be used to check whether +the GIL is actually disabled in the running process. + +The ``sysconfig.get_config_var("Py_GIL_DISABLED")`` configuration variable can +be used to determine whether the build supports free threading. If the variable +is set to ``1``, then the build supports free threading. This is the recommended +mechanism for decisions related to the build configuration. + + +The global interpreter lock in free-threaded Python +=================================================== + +Free-threaded builds of CPython support optionally running with the GIL enabled +at runtime using the environment variable :envvar:`PYTHON_GIL` or +the command-line option :option:`-X gil`. + +The GIL may also automatically be enabled when importing a C-API extension +module that is not explicitly marked as supporting free threading. A warning +will be printed in this case. + +In addition to individual package documentation, the following websites track +the status of popular packages support for free threading: + +* https://py-free-threading.github.io/tracking/ +* https://hugovk.github.io/free-threaded-wheels/ + + +Thread safety +============= + +The free-threaded build of CPython aims to provide similar thread-safety +behavior at the Python level to the default GIL-enabled build. Built-in +types like :class:`dict`, :class:`list`, and :class:`set` use internal locks +to protect against concurrent modifications in ways that behave similarly to +the GIL. However, Python has not historically guaranteed specific behavior for +concurrent modifications to these built-in types, so this should be treated +as a description of the current implementation, not a guarantee of current or +future behavior. + +.. note:: + + It's recommended to use the :class:`threading.Lock` or other synchronization + primitives instead of relying on the internal locks of built-in types, when + possible. + + +Known limitations +================= + +This section describes known limitations of the free-threaded CPython build. + +Immortalization +--------------- + +The free-threaded build of the 3.13 release makes some objects :term:`immortal`. +Immortal objects are not deallocated and have reference counts that are +never modified. This is done to avoid reference count contention that would +prevent efficient multi-threaded scaling. + +An object will be made immortal when a new thread is started for the first time +after the main thread is running. The following objects are immortalized: + +* :ref:`function ` objects declared at the module level +* :ref:`method ` descriptors +* :ref:`code ` objects +* :term:`module` objects and their dictionaries +* :ref:`classes ` (type objects) + +Because immortal objects are never deallocated, applications that create many +objects of these types may see increased memory usage. This is expected to be +addressed in the 3.14 release. + +Additionally, numeric and string literals in the code as well as strings +returned by :func:`sys.intern` are also immortalized. This behavior is +expected to remain in the 3.14 free-threaded build. + + +Frame objects +------------- + +It is not safe to access :ref:`frame ` objects from other +threads and doing so may cause your program to crash . This means that +:func:`sys._current_frames` is generally not safe to use in a free-threaded +build. Functions like :func:`inspect.currentframe` and :func:`sys._getframe` +are generally safe as long as the resulting frame object is not passed to +another thread. + +Iterators +--------- + +Sharing the same iterator object between multiple threads is generally not +safe and threads may see duplicate or missing elements when iterating or crash +the interpreter. + + +Single-threaded performance +--------------------------- + +The free-threaded build has additional overhead when executing Python code +compared to the default GIL-enabled build. In 3.13, this overhead is about +40% on the `pyperformance `_ suite. +Programs that spend most of their time in C extensions or I/O will see +less of an impact. The largest impact is because the specializing adaptive +interpreter (:pep:`659`) is disabled in the free-threaded build. We expect +to re-enable it in a thread-safe way in the 3.14 release. This overhead is +expected to be reduced in upcoming Python release. We are aiming for an +overhead of 10% or less on the pyperformance suite compared to the default +GIL-enabled build. diff --git a/Doc/howto/index.rst b/Doc/howto/index.rst index a882f174708..c09f92c9528 100644 --- a/Doc/howto/index.rst +++ b/Doc/howto/index.rst @@ -32,6 +32,7 @@ Python Library Reference. isolating-extensions.rst timerfd.rst mro.rst + free-threading-python.rst free-threading-extensions.rst General: @@ -52,6 +53,7 @@ General: Advanced development: * :ref:`curses-howto` +* :ref:`freethreading-python-howto` * :ref:`freethreading-extensions-howto` * :ref:`isolating-extensions-howto` * :ref:`python_2.3_mro` diff --git a/Doc/howto/instrumentation.rst b/Doc/howto/instrumentation.rst index 9c99fcecce1..6e03ef20a21 100644 --- a/Doc/howto/instrumentation.rst +++ b/Doc/howto/instrumentation.rst @@ -307,7 +307,7 @@ Available static markers .. object:: gc__start(int generation) Fires when the Python interpreter starts a garbage collection cycle. - ``arg0`` is the generation to scan, like :func:`gc.collect()`. + ``arg0`` is the generation to scan, like :func:`gc.collect`. .. object:: gc__done(long collected) diff --git a/Doc/howto/isolating-extensions.rst b/Doc/howto/isolating-extensions.rst index e35855deedb..a636e06bda8 100644 --- a/Doc/howto/isolating-extensions.rst +++ b/Doc/howto/isolating-extensions.rst @@ -339,7 +339,7 @@ That is, heap types should: - Define a traverse function using ``Py_tp_traverse``, which visits the type (e.g. using ``Py_VISIT(Py_TYPE(self))``). -Please refer to the the documentation of +Please refer to the documentation of :c:macro:`Py_TPFLAGS_HAVE_GC` and :c:member:`~PyTypeObject.tp_traverse` for additional considerations. diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst index 60d88204b79..96768ec4ae3 100644 --- a/Doc/howto/logging-cookbook.rst +++ b/Doc/howto/logging-cookbook.rst @@ -4022,7 +4022,7 @@ As you can see, this output isn't ideal. That's because the underlying code which writes to ``sys.stderr`` makes multiple writes, each of which results in a separate logged line (for example, the last three lines above). To get around this problem, you need to buffer things and only output log lines when newlines -are seen. Let's use a slghtly better implementation of ``LoggerWriter``: +are seen. Let's use a slightly better implementation of ``LoggerWriter``: .. code-block:: python diff --git a/Doc/howto/logging.rst b/Doc/howto/logging.rst index 316b16aa796..3182d5664ab 100644 --- a/Doc/howto/logging.rst +++ b/Doc/howto/logging.rst @@ -382,9 +382,53 @@ Logging Flow The flow of log event information in loggers and handlers is illustrated in the following diagram. +.. only:: not html + + .. image:: logging_flow.* + .. raw:: html :file: logging_flow.svg +.. raw:: html + + + Loggers ^^^^^^^ diff --git a/Doc/howto/logging_flow.png b/Doc/howto/logging_flow.png index c2d0befe273..d60ed7c0315 100644 Binary files a/Doc/howto/logging_flow.png and b/Doc/howto/logging_flow.png differ diff --git a/Doc/howto/logging_flow.svg b/Doc/howto/logging_flow.svg index 52206bdbcf5..4974994ac6b 100644 --- a/Doc/howto/logging_flow.svg +++ b/Doc/howto/logging_flow.svg @@ -1,281 +1,327 @@ - - - - + - - - - + + + + Logger flow - - + + Create - LogRecord + LogRecord - - + + - - + + - - - - - - - - Logging call in user - code, e.g. + + Logging call in user + code, e.g. - - logging.info(...) + + logger.info(...) - - + + - - + + Stop - - - Does a filter attached - to logger reject the - record? + + + Does a filter attached + to logger reject the + record? - - + + - - - Pass to + + + Pass record to handlers of current logger - - - Is propagate true for - current logger? + + + Is propagate true for + current logger? - - - Is there a parent - logger? + + + Is there a parent + logger? - - + + Set current logger to parent - - - At least one handler - in hierarchy? + + + At least one handler + in hierarchy? - - - Use lastResort - handler + + + Use + lastResort + handler - - + + Handler enabled for - level of LogRecord? + level of record? - - - Does a filter attached - to handler reject the - record? + + + Does a filter attached + to handler reject the + record? - - + + Stop - - + + Emit (includes formatting) - + Handler flow - - - Logger enabled for - level of call? + + + Logger enabled for + level of call? - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - + No - + Yes - + Yes - + No - + No - + Yes - + Yes - + No - + No - + Yes - + - + No - + - + - + Yes - + No - + - + Yes - - LogRecord passed + + Record passed to handler - + - - + + diff --git a/Doc/howto/mro.rst b/Doc/howto/mro.rst index f44b4f98e57..46db516e16d 100644 --- a/Doc/howto/mro.rst +++ b/Doc/howto/mro.rst @@ -335,7 +335,7 @@ E is more specialized than C, even if it is in a higher level. A lazy programmer can obtain the MRO directly from Python 2.2, since in this case it coincides with the Python 2.3 linearization. It is enough -to invoke the .mro() method of class A: +to invoke the :meth:`~type.mro` method of class A: >>> A.mro() # doctest: +NORMALIZE_WHITESPACE [, , , diff --git a/Doc/includes/wasm-ios-notavail.rst b/Doc/includes/wasm-ios-notavail.rst deleted file mode 100644 index c820665f5e4..00000000000 --- a/Doc/includes/wasm-ios-notavail.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. include for modules that don't work on WASM or iOS - -.. availability:: not WASI, not iOS. - - This module does not work or is not available on WebAssembly platforms, or - on iOS. See :ref:`wasm-availability` for more information on WASM - availability; see :ref:`iOS-availability` for more information on iOS - availability. diff --git a/Doc/includes/wasm-mobile-notavail.rst b/Doc/includes/wasm-mobile-notavail.rst new file mode 100644 index 00000000000..725b0f7a86d --- /dev/null +++ b/Doc/includes/wasm-mobile-notavail.rst @@ -0,0 +1,6 @@ +.. include for modules that don't work on WASM or mobile platforms + +.. availability:: not Android, not iOS, not WASI. + + This module is not supported on :ref:`mobile platforms ` + or :ref:`WebAssembly platforms `. diff --git a/Doc/library/__main__.rst b/Doc/library/__main__.rst index 6232e173d95..647ff9da04d 100644 --- a/Doc/library/__main__.rst +++ b/Doc/library/__main__.rst @@ -251,9 +251,9 @@ attribute will include the package's path if imported:: >>> asyncio.__main__.__name__ 'asyncio.__main__' -This won't work for ``__main__.py`` files in the root directory of a .zip file -though. Hence, for consistency, minimal ``__main__.py`` like the :mod:`venv` -one mentioned below are preferred. +This won't work for ``__main__.py`` files in the root directory of a +``.zip`` file though. Hence, for consistency, a minimal ``__main__.py`` +without a ``__name__`` check is preferred. .. seealso:: diff --git a/Doc/library/_thread.rst b/Doc/library/_thread.rst index 81f0cac947f..5fd604c0538 100644 --- a/Doc/library/_thread.rst +++ b/Doc/library/_thread.rst @@ -213,9 +213,8 @@ In addition to these methods, lock objects can also be used via the .. index:: pair: module; signal -* Threads interact strangely with interrupts: the :exc:`KeyboardInterrupt` - exception will be received by an arbitrary thread. (When the :mod:`signal` - module is available, interrupts always go to the main thread.) +* Interrupts always go to the main thread (the :exc:`KeyboardInterrupt` + exception will be received by that thread.) * Calling :func:`sys.exit` or raising the :exc:`SystemExit` exception is equivalent to calling :func:`_thread.exit`. @@ -229,7 +228,3 @@ In addition to these methods, lock objects can also be used via the :keyword:`try` ... :keyword:`finally` clauses or executing object destructors. -* When the main thread exits, it does not do any of its usual cleanup (except - that :keyword:`try` ... :keyword:`finally` clauses are honored), and the - standard I/O files are not flushed. - diff --git a/Doc/library/abc.rst b/Doc/library/abc.rst index 168ef3ec00d..38d744e97d0 100644 --- a/Doc/library/abc.rst +++ b/Doc/library/abc.rst @@ -99,7 +99,7 @@ a helper class :class:`ABC` to alternatively define ABCs through inheritance: that you can customize the behavior of :func:`issubclass` further without the need to call :meth:`register` on every class you want to consider a subclass of the ABC. (This class method is called from the - :meth:`~class.__subclasscheck__` method of the ABC.) + :meth:`~type.__subclasscheck__` method of the ABC.) This method should return ``True``, ``False`` or :data:`NotImplemented`. If it returns ``True``, the *subclass* is considered a subclass of this ABC. @@ -149,7 +149,7 @@ a helper class :class:`ABC` to alternatively define ABCs through inheritance: The :meth:`__subclasshook__` class method defined here says that any class that has an :meth:`~iterator.__iter__` method in its :attr:`~object.__dict__` (or in that of one of its base classes, accessed - via the :attr:`~class.__mro__` list) is considered a ``MyIterable`` too. + via the :attr:`~type.__mro__` list) is considered a ``MyIterable`` too. Finally, the last line makes ``Foo`` a virtual subclass of ``MyIterable``, even though it does not define an :meth:`~iterator.__iter__` method (it uses diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst index 0367c83d936..53ecc97d565 100644 --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -249,11 +249,12 @@ The following sections describe how each of these are used. prog ^^^^ -By default, :class:`ArgumentParser` objects use ``sys.argv[0]`` to determine +By default, :class:`ArgumentParser` objects use the base name +(see :func:`os.path.basename`) of ``sys.argv[0]`` to determine how to display the name of the program in help messages. This default is almost -always desirable because it will make the help messages match how the program was -invoked on the command line. For example, consider a file named -``myprogram.py`` with the following code:: +always desirable because it will make the help messages match the name that was +used to invoke the program on the command line. For example, consider a file +named ``myprogram.py`` with the following code:: import argparse parser = argparse.ArgumentParser() @@ -1122,6 +1123,9 @@ is used when no command-line argument was present:: >>> parser.parse_args([]) Namespace(foo=42) +For required_ arguments, the ``default`` value is ignored. For example, this +applies to positional arguments with nargs_ values other than ``?`` or ``*``, +or optional arguments marked as ``required=True``. Providing ``default=argparse.SUPPRESS`` causes no attribute to be added if the command-line argument was not present:: @@ -1455,7 +1459,7 @@ The ``deprecated`` keyword argument of specifies if the argument is deprecated and will be removed in the future. For arguments, if ``deprecated`` is ``True``, then a warning will be -printed to standard error when the argument is used:: +printed to :data:`sys.stderr` when the argument is used:: >>> import argparse >>> parser = argparse.ArgumentParser(prog='snake.py') @@ -1466,7 +1470,7 @@ printed to standard error when the argument is used:: snake.py: warning: option '--legs' is deprecated Namespace(legs=4) -.. versionchanged:: 3.13 +.. versionadded:: 3.13 Action classes @@ -2235,8 +2239,8 @@ Exiting methods .. method:: ArgumentParser.exit(status=0, message=None) This method terminates the program, exiting with the specified *status* - and, if given, it prints a *message* before that. The user can override - this method to handle these steps differently:: + and, if given, it prints a *message* to :data:`sys.stderr` before that. + The user can override this method to handle these steps differently:: class ErrorCatchingArgumentParser(argparse.ArgumentParser): def exit(self, status=0, message=None): @@ -2246,8 +2250,8 @@ Exiting methods .. method:: ArgumentParser.error(message) - This method prints a usage message including the *message* to the - standard error and terminates the program with a status code of 2. + This method prints a usage message, including the *message*, to + :data:`sys.stderr` and terminates the program with a status code of 2. Intermixed parsing diff --git a/Doc/library/array.rst b/Doc/library/array.rst index d34a1888342..e0b1eb89cf6 100644 --- a/Doc/library/array.rst +++ b/Doc/library/array.rst @@ -9,7 +9,7 @@ -------------- This module defines an object type which can compactly represent an array of -basic values: characters, integers, floating point numbers. Arrays are sequence +basic values: characters, integers, floating-point numbers. Arrays are sequence types and behave very much like lists, except that the type of objects stored in them is constrained. The type is specified at object creation time by using a :dfn:`type code`, which is a single character. The following type codes are @@ -263,7 +263,7 @@ The string representation is guaranteed to be able to be converted back to an array with the same type and value using :func:`eval`, so long as the :class:`~array.array` class has been imported using ``from array import array``. Variables ``inf`` and ``nan`` must also be defined if it contains -corresponding floating point values. +corresponding floating-point values. Examples:: array('l') diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst index c5daa6e2439..0b7d56286b2 100644 --- a/Doc/library/ast.rst +++ b/Doc/library/ast.rst @@ -173,9 +173,9 @@ Root nodes A Python module, as with :ref:`file input `. Node type generated by :func:`ast.parse` in the default ``"exec"`` *mode*. - *body* is a :class:`list` of the module's :ref:`ast-statements`. + ``body`` is a :class:`list` of the module's :ref:`ast-statements`. - *type_ignores* is a :class:`list` of the module's type ignore comments; + ``type_ignores`` is a :class:`list` of the module's type ignore comments; see :func:`ast.parse` for more details. .. doctest:: @@ -194,7 +194,7 @@ Root nodes A single Python :ref:`expression input `. Node type generated by :func:`ast.parse` when *mode* is ``"eval"``. - *body* is a single node, + ``body`` is a single node, one of the :ref:`expression types `. .. doctest:: @@ -209,7 +209,7 @@ Root nodes A single :ref:`interactive input `, like in :ref:`tut-interac`. Node type generated by :func:`ast.parse` when *mode* is ``"single"``. - *body* is a :class:`list` of :ref:`statement nodes `. + ``body`` is a :class:`list` of :ref:`statement nodes `. .. doctest:: @@ -238,9 +238,9 @@ Root nodes # type: (int, int) -> int return a + b - *argtypes* is a :class:`list` of :ref:`expression nodes `. + ``argtypes`` is a :class:`list` of :ref:`expression nodes `. - *returns* is a single :ref:`expression node `. + ``returns`` is a single :ref:`expression node `. .. doctest:: @@ -889,7 +889,7 @@ Statements .. class:: AnnAssign(target, annotation, value, simple) An assignment with a type annotation. ``target`` is a single node and can - be a :class:`Name`, a :class:`Attribute` or a :class:`Subscript`. + be a :class:`Name`, an :class:`Attribute` or a :class:`Subscript`. ``annotation`` is the annotation, such as a :class:`Constant` or :class:`Name` node. ``value`` is a single optional node. @@ -1766,9 +1766,9 @@ aliases. .. class:: TypeVar(name, bound, default_value) - A :class:`typing.TypeVar`. *name* is the name of the type variable. - *bound* is the bound or constraints, if any. If *bound* is a :class:`Tuple`, - it represents constraints; otherwise it represents the bound. *default_value* + A :class:`typing.TypeVar`. ``name`` is the name of the type variable. + ``bound`` is the bound or constraints, if any. If ``bound`` is a :class:`Tuple`, + it represents constraints; otherwise it represents the bound. ``default_value`` is the default value; if the :class:`!TypeVar` has no default, this attribute will be set to ``None``. @@ -1796,8 +1796,8 @@ aliases. .. class:: ParamSpec(name, default_value) - A :class:`typing.ParamSpec`. *name* is the name of the parameter specification. - *default_value* is the default value; if the :class:`!ParamSpec` has no default, + A :class:`typing.ParamSpec`. ``name`` is the name of the parameter specification. + ``default_value`` is the default value; if the :class:`!ParamSpec` has no default, this attribute will be set to ``None``. .. doctest:: @@ -1831,8 +1831,8 @@ aliases. .. class:: TypeVarTuple(name, default_value) - A :class:`typing.TypeVarTuple`. *name* is the name of the type variable tuple. - *default_value* is the default value; if the :class:`!TypeVarTuple` has no + A :class:`typing.TypeVarTuple`. ``name`` is the name of the type variable tuple. + ``default_value`` is the default value; if the :class:`!TypeVarTuple` has no default, this attribute will be set to ``None``. .. doctest:: @@ -1981,7 +1981,7 @@ Function and class definitions YieldFrom(value) A ``yield`` or ``yield from`` expression. Because these are expressions, they - must be wrapped in a :class:`Expr` node if the value sent back is not used. + must be wrapped in an :class:`Expr` node if the value sent back is not used. .. doctest:: @@ -2033,8 +2033,7 @@ Function and class definitions * ``name`` is a raw string for the class name * ``bases`` is a list of nodes for explicitly specified base classes. * ``keywords`` is a list of :class:`.keyword` nodes, principally for 'metaclass'. - Other keywords will be passed to the metaclass, as per `PEP-3115 - `_. + Other keywords will be passed to the metaclass, as per :pep:`3115`. * ``body`` is a list of nodes representing the code within the class definition. * ``decorator_list`` is a list of nodes, as in :class:`FunctionDef`. @@ -2134,7 +2133,7 @@ and classes for traversing abstract syntax trees: If ``type_comments=True`` is given, the parser is modified to check and return type comments as specified by :pep:`484` and :pep:`526`. This is equivalent to adding :data:`ast.PyCF_TYPE_COMMENTS` to the - flags passed to :func:`compile()`. This will report syntax errors + flags passed to :func:`compile`. This will report syntax errors for misplaced type comments. Without this flag, type comments will be ignored, and the ``type_comment`` field on selected AST nodes will always be ``None``. In addition, the locations of ``# type: diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 1d79f78e8e1..943683f6b8a 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -126,7 +126,7 @@ Running and stopping the loop Run the event loop until :meth:`stop` is called. - If :meth:`stop` is called before :meth:`run_forever()` is called, + If :meth:`stop` is called before :meth:`run_forever` is called, the loop will poll the I/O selector once with a timeout of zero, run all callbacks scheduled in response to I/O events (and those that were already scheduled), and then exit. @@ -165,7 +165,7 @@ Running and stopping the loop .. coroutinemethod:: loop.shutdown_asyncgens() Schedule all currently open :term:`asynchronous generator` objects to - close with an :meth:`~agen.aclose()` call. After calling this method, + close with an :meth:`~agen.aclose` call. After calling this method, the event loop will issue a warning if a new asynchronous generator is iterated. This should be used to reliably finalize all scheduled asynchronous generators. @@ -1262,6 +1262,9 @@ Executing code in thread or process pools The *executor* argument should be an :class:`concurrent.futures.Executor` instance. The default executor is used if *executor* is ``None``. + The default executor can be set by :meth:`loop.set_default_executor`, + otherwise, a :class:`concurrent.futures.ThreadPoolExecutor` will be + lazy-initialized and used by :func:`run_in_executor` if needed. Example:: @@ -1399,7 +1402,7 @@ Allows customizing how exceptions are handled in the event loop. This method should not be overloaded in subclassed event loops. For custom exception handling, use - the :meth:`set_exception_handler()` method. + the :meth:`set_exception_handler` method. Enabling debug mode ^^^^^^^^^^^^^^^^^^^ @@ -1482,7 +1485,7 @@ async/await code consider using the high-level * *stdin* can be any of these: * a file-like object - * an existing file descriptor (a positive integer), for example those created with :meth:`os.pipe()` + * an existing file descriptor (a positive integer), for example those created with :meth:`os.pipe` * the :const:`subprocess.PIPE` constant (default) which will create a new pipe and connect it, * the value ``None`` which will make the subprocess inherit the file diff --git a/Doc/library/asyncio-future.rst b/Doc/library/asyncio-future.rst index 893ae5518f7..9dce0731411 100644 --- a/Doc/library/asyncio-future.rst +++ b/Doc/library/asyncio-future.rst @@ -120,20 +120,20 @@ Future Object a :exc:`CancelledError` exception. If the Future's result isn't yet available, this method raises - a :exc:`InvalidStateError` exception. + an :exc:`InvalidStateError` exception. .. method:: set_result(result) Mark the Future as *done* and set its result. - Raises a :exc:`InvalidStateError` error if the Future is + Raises an :exc:`InvalidStateError` error if the Future is already *done*. .. method:: set_exception(exception) Mark the Future as *done* and set an exception. - Raises a :exc:`InvalidStateError` error if the Future is + Raises an :exc:`InvalidStateError` error if the Future is already *done*. .. method:: done() diff --git a/Doc/library/asyncio-llapi-index.rst b/Doc/library/asyncio-llapi-index.rst index 67136ba69ec..3e21054aa4f 100644 --- a/Doc/library/asyncio-llapi-index.rst +++ b/Doc/library/asyncio-llapi-index.rst @@ -56,10 +56,10 @@ See also the main documentation section about the * - :meth:`loop.close` - Close the event loop. - * - :meth:`loop.is_running()` + * - :meth:`loop.is_running` - Return ``True`` if the event loop is running. - * - :meth:`loop.is_closed()` + * - :meth:`loop.is_closed` - Return ``True`` if the event loop is closed. * - ``await`` :meth:`loop.shutdown_asyncgens` diff --git a/Doc/library/asyncio-queue.rst b/Doc/library/asyncio-queue.rst index 9b579cc1d5f..61991bf2f4e 100644 --- a/Doc/library/asyncio-queue.rst +++ b/Doc/library/asyncio-queue.rst @@ -55,7 +55,7 @@ Queue Return ``True`` if there are :attr:`maxsize` items in the queue. If the queue was initialized with ``maxsize=0`` (the default), - then :meth:`full()` never returns ``True``. + then :meth:`full` never returns ``True``. .. coroutinemethod:: get() diff --git a/Doc/library/asyncio-runner.rst b/Doc/library/asyncio-runner.rst index ec170dfde9e..8312e55126a 100644 --- a/Doc/library/asyncio-runner.rst +++ b/Doc/library/asyncio-runner.rst @@ -91,7 +91,7 @@ Runner context manager current one. By default :func:`asyncio.new_event_loop` is used and set as current event loop with :func:`asyncio.set_event_loop` if *loop_factory* is ``None``. - Basically, :func:`asyncio.run()` example can be rewritten with the runner usage:: + Basically, :func:`asyncio.run` example can be rewritten with the runner usage:: async def main(): await asyncio.sleep(1) diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index c5deac7e274..4716a3f9c8a 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -414,6 +414,53 @@ reported by :meth:`asyncio.Task.cancelling`. Improved handling of simultaneous internal and external cancellations and correct preservation of cancellation counts. +Terminating a Task Group +------------------------ + +While terminating a task group is not natively supported by the standard +library, termination can be achieved by adding an exception-raising task +to the task group and ignoring the raised exception: + +.. code-block:: python + + import asyncio + from asyncio import TaskGroup + + class TerminateTaskGroup(Exception): + """Exception raised to terminate a task group.""" + + async def force_terminate_task_group(): + """Used to force termination of a task group.""" + raise TerminateTaskGroup() + + async def job(task_id, sleep_time): + print(f'Task {task_id}: start') + await asyncio.sleep(sleep_time) + print(f'Task {task_id}: done') + + async def main(): + try: + async with TaskGroup() as group: + # spawn some tasks + group.create_task(job(1, 0.5)) + group.create_task(job(2, 1.5)) + # sleep for 1 second + await asyncio.sleep(1) + # add an exception-raising task to force the group to terminate + group.create_task(force_terminate_task_group()) + except* TerminateTaskGroup: + pass + + asyncio.run(main()) + +Expected output: + +.. code-block:: text + + Task 1: start + Task 2: start + Task 1: done + Sleeping ======== @@ -1170,7 +1217,7 @@ Task Object a :exc:`CancelledError` exception. If the Task's result isn't yet available, this method raises - a :exc:`InvalidStateError` exception. + an :exc:`InvalidStateError` exception. .. method:: exception() diff --git a/Doc/library/asyncio.rst b/Doc/library/asyncio.rst index 184f981c102..5f83b3a2658 100644 --- a/Doc/library/asyncio.rst +++ b/Doc/library/asyncio.rst @@ -56,9 +56,13 @@ Additionally, there are **low-level** APIs for * :ref:`bridge ` callback-based libraries and code with async/await syntax. +.. include:: ../includes/wasm-notavail.rst + .. _asyncio-cli: -You can experiment with an ``asyncio`` concurrent context in the REPL: +.. rubric:: asyncio REPL + +You can experiment with an ``asyncio`` concurrent context in the :term:`REPL`: .. code-block:: pycon @@ -70,7 +74,14 @@ You can experiment with an ``asyncio`` concurrent context in the REPL: >>> await asyncio.sleep(10, result='hello') 'hello' -.. include:: ../includes/wasm-notavail.rst +.. audit-event:: cpython.run_stdin "" "" + +.. versionchanged:: 3.12.5 (also 3.11.10, 3.10.15, 3.9.20, and 3.8.20) + Emits audit events. + +.. versionchanged:: 3.13 + Uses PyREPL if possible, in which case :envvar:`PYTHONSTARTUP` is + also executed. Emits audit events. .. We use the "rubric" directive here to avoid creating the "Reference" subsection in the TOC. diff --git a/Doc/library/calendar.rst b/Doc/library/calendar.rst index d5876054da3..eafc038d6cb 100644 --- a/Doc/library/calendar.rst +++ b/Doc/library/calendar.rst @@ -393,13 +393,22 @@ The :mod:`calendar` module exports the following data attributes: .. data:: day_name - An array that represents the days of the week in the current locale. + A sequence that represents the days of the week in the current locale, + where Monday is day number 0. + + >>> import calendar + >>> list(calendar.day_name) + ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] .. data:: day_abbr - An array that represents the abbreviated days of the week in the current locale. + A sequence that represents the abbreviated days of the week in the current locale, + where Mon is day number 0. + >>> import calendar + >>> list(calendar.day_abbr) + ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] .. data:: MONDAY TUESDAY @@ -426,17 +435,24 @@ The :mod:`calendar` module exports the following data attributes: .. data:: month_name - An array that represents the months of the year in the current locale. This + A sequence that represents the months of the year in the current locale. This follows normal convention of January being month number 1, so it has a length of 13 and ``month_name[0]`` is the empty string. + >>> import calendar + >>> list(calendar.month_name) + ['', 'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'] + .. data:: month_abbr - An array that represents the abbreviated months of the year in the current + A sequence that represents the abbreviated months of the year in the current locale. This follows normal convention of January being month number 1, so it has a length of 13 and ``month_abbr[0]`` is the empty string. + >>> import calendar + >>> list(calendar.month_abbr) + ['', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] .. data:: JANUARY FEBRUARY diff --git a/Doc/library/cmdline.rst b/Doc/library/cmdline.rst index 5174515ffc2..487fd775337 100644 --- a/Doc/library/cmdline.rst +++ b/Doc/library/cmdline.rst @@ -40,6 +40,7 @@ The following modules have a command-line interface. * :mod:`runpy` * :ref:`site ` * :ref:`sqlite3 ` +* :ref:`symtable ` * :ref:`sysconfig ` * :mod:`tabnanny` * :ref:`tarfile ` diff --git a/Doc/library/collections.abc.rst b/Doc/library/collections.abc.rst index ea27436f67f..0adbd305b46 100644 --- a/Doc/library/collections.abc.rst +++ b/Doc/library/collections.abc.rst @@ -216,6 +216,9 @@ Collections Abstract Base Classes -- Detailed Descriptions ABC for classes that provide the :meth:`~object.__call__` method. + See :ref:`annotating-callables` for details on how to use + :class:`!Callable` in type annotations. + .. class:: Iterable ABC for classes that provide the :meth:`~container.__iter__` method. @@ -253,6 +256,9 @@ Collections Abstract Base Classes -- Detailed Descriptions :meth:`~generator.send`, :meth:`~generator.throw` and :meth:`~generator.close` methods. + See :ref:`annotating-generators-and-coroutines` + for details on using :class:`!Generator` in type annotations. + .. versionadded:: 3.5 .. class:: Sequence @@ -331,6 +337,11 @@ Collections Abstract Base Classes -- Detailed Descriptions Using ``isinstance(gencoro, Coroutine)`` for them will return ``False``. Use :func:`inspect.isawaitable` to detect them. + See :ref:`annotating-generators-and-coroutines` + for details on using :class:`!Coroutine` in type annotations. + The variance and order of type parameters correspond to those of + :class:`Generator`. + .. versionadded:: 3.5 .. class:: AsyncIterable @@ -352,6 +363,9 @@ Collections Abstract Base Classes -- Detailed Descriptions ABC for :term:`asynchronous generator` classes that implement the protocol defined in :pep:`525` and :pep:`492`. + See :ref:`annotating-generators-and-coroutines` + for details on using :class:`!AsyncGenerator` in type annotations. + .. versionadded:: 3.6 .. class:: Buffer diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst index ce89101d6b6..0cc9063f153 100644 --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -874,8 +874,8 @@ they add the ability to access fields by name instead of position index. ``(1, 2)``, then ``x`` will be a required argument, ``y`` will default to ``1``, and ``z`` will default to ``2``. - If *module* is defined, the ``__module__`` attribute of the named tuple is - set to that value. + If *module* is defined, the :attr:`~type.__module__` attribute of the + named tuple is set to that value. Named tuple instances do not have per-instance dictionaries, so they are lightweight and require no more memory than regular tuples. @@ -1169,8 +1169,11 @@ Some differences from :class:`dict` still remain: In addition to the usual mapping methods, ordered dictionaries also support reverse iteration using :func:`reversed`. +.. _collections_OrderedDict__eq__: + Equality tests between :class:`OrderedDict` objects are order-sensitive -and are implemented as ``list(od1.items())==list(od2.items())``. +and are roughly equivalent to ``list(od1.items())==list(od2.items())``. + Equality tests between :class:`OrderedDict` objects and other :class:`~collections.abc.Mapping` objects are order-insensitive like regular dictionaries. This allows :class:`OrderedDict` objects to be substituted @@ -1186,7 +1189,7 @@ anywhere a regular dictionary is used. method. .. versionchanged:: 3.9 - Added merge (``|``) and update (``|=``) operators, specified in :pep:`584`. + Added merge (``|``) and update (``|=``) operators, specified in :pep:`584`. :class:`OrderedDict` Examples and Recipes diff --git a/Doc/library/colorsys.rst b/Doc/library/colorsys.rst index 125d62b1740..ffebf4e40dd 100644 --- a/Doc/library/colorsys.rst +++ b/Doc/library/colorsys.rst @@ -14,7 +14,7 @@ The :mod:`colorsys` module defines bidirectional conversions of color values between colors expressed in the RGB (Red Green Blue) color space used in computer monitors and three other coordinate systems: YIQ, HLS (Hue Lightness Saturation) and HSV (Hue Saturation Value). Coordinates in all of these color -spaces are floating point values. In the YIQ space, the Y coordinate is between +spaces are floating-point values. In the YIQ space, the Y coordinate is between 0 and 1, but the I and Q coordinates can be positive or negative. In all other spaces, the coordinates are all between 0 and 1. diff --git a/Doc/library/compileall.rst b/Doc/library/compileall.rst index d9c0cb67a92..c42288419c4 100644 --- a/Doc/library/compileall.rst +++ b/Doc/library/compileall.rst @@ -90,7 +90,7 @@ compile Python sources. .. option:: -j N Use *N* workers to compile the files within the given directory. - If ``0`` is used, then the result of :func:`os.process_cpu_count()` + If ``0`` is used, then the result of :func:`os.process_cpu_count` will be used. .. option:: --invalidation-mode [timestamp|checked-hash|unchecked-hash] diff --git a/Doc/library/configparser.rst b/Doc/library/configparser.rst index e84fb513e45..cf13de4116f 100644 --- a/Doc/library/configparser.rst +++ b/Doc/library/configparser.rst @@ -147,23 +147,28 @@ case-insensitive and stored in lowercase [1]_. It is possible to read several configurations into a single :class:`ConfigParser`, where the most recently added configuration has the highest priority. Any conflicting keys are taken from the more recent -configuration while the previously existing keys are retained. +configuration while the previously existing keys are retained. The example +below reads in an ``override.ini`` file, which will override any conflicting +keys from the ``example.ini`` file. + +.. code-block:: ini + + [DEFAULT] + ServerAliveInterval = -1 .. doctest:: - >>> another_config = configparser.ConfigParser() - >>> another_config.read('example.ini') - ['example.ini'] - >>> another_config['topsecret.server.example']['Port'] - '50022' - >>> another_config.read_string("[topsecret.server.example]\nPort=48484") - >>> another_config['topsecret.server.example']['Port'] - '48484' - >>> another_config.read_dict({"topsecret.server.example": {"Port": 21212}}) - >>> another_config['topsecret.server.example']['Port'] - '21212' - >>> another_config['topsecret.server.example']['ForwardX11'] - 'no' + >>> config_override = configparser.ConfigParser() + >>> config_override['DEFAULT'] = {'ServerAliveInterval': '-1'} + >>> with open('override.ini', 'w') as configfile: + ... config_override.write(configfile) + ... + >>> config_override = configparser.ConfigParser() + >>> config_override.read(['example.ini', 'override.ini']) + ['example.ini', 'override.ini'] + >>> print(config_override.get('DEFAULT', 'ServerAliveInterval')) + -1 + This behaviour is equivalent to a :meth:`ConfigParser.read` call with several files passed to the *filenames* parameter. @@ -981,9 +986,34 @@ ConfigParser Objects When *converters* is given, it should be a dictionary where each key represents the name of a type converter and each value is a callable implementing the conversion from string to the desired datatype. Every - converter gets its own corresponding :meth:`!get*()` method on the parser + converter gets its own corresponding :meth:`!get*` method on the parser object and section proxies. + It is possible to read several configurations into a single + :class:`ConfigParser`, where the most recently added configuration has the + highest priority. Any conflicting keys are taken from the more recent + configuration while the previously existing keys are retained. The example + below reads in an ``override.ini`` file, which will override any conflicting + keys from the ``example.ini`` file. + + .. code-block:: ini + + [DEFAULT] + ServerAliveInterval = -1 + + .. doctest:: + + >>> config_override = configparser.ConfigParser() + >>> config_override['DEFAULT'] = {'ServerAliveInterval': '-1'} + >>> with open('override.ini', 'w') as configfile: + ... config_override.write(configfile) + ... + >>> config_override = configparser.ConfigParser() + >>> config_override.read(['example.ini', 'override.ini']) + ['example.ini', 'override.ini'] + >>> print(config_override.get('DEFAULT', 'ServerAliveInterval')) + -1 + .. versionchanged:: 3.1 The default *dict_type* is :class:`collections.OrderedDict`. @@ -996,7 +1026,7 @@ ConfigParser Objects The *converters* argument was added. .. versionchanged:: 3.7 - The *defaults* argument is read with :meth:`read_dict()`, + The *defaults* argument is read with :meth:`read_dict`, providing consistent behavior across the parser: non-string keys and values are implicitly converted to strings. @@ -1153,7 +1183,7 @@ ConfigParser Objects .. method:: getfloat(section, option, *, raw=False, vars=None[, fallback]) A convenience method which coerces the *option* in the specified *section* - to a floating point number. See :meth:`get` for explanation of *raw*, + to a floating-point number. See :meth:`get` for explanation of *raw*, *vars* and *fallback*. diff --git a/Doc/library/constants.rst b/Doc/library/constants.rst index 93a7244f87d..3eceecc4e0a 100644 --- a/Doc/library/constants.rst +++ b/Doc/library/constants.rst @@ -79,6 +79,8 @@ A small number of constants live in the built-in namespace. They are: :exc:`SyntaxError`), so they can be considered "true" constants. +.. _site-consts: + Constants added by the :mod:`site` module ----------------------------------------- @@ -94,6 +96,13 @@ should not be used in programs. (i.e. EOF) to exit", and when called, raise :exc:`SystemExit` with the specified exit code. +.. data:: help + :noindex: + + Object that when printed, prints the message "Type help() for interactive + help, or help(object) for help about object.", and when called, + acts as described :func:`elsewhere `. + .. data:: copyright credits diff --git a/Doc/library/contextlib.rst b/Doc/library/contextlib.rst index 27cf99446e5..f5b349441bc 100644 --- a/Doc/library/contextlib.rst +++ b/Doc/library/contextlib.rst @@ -322,7 +322,7 @@ Functions and classes provided: .. versionchanged:: 3.12 ``suppress`` now supports suppressing exceptions raised as - part of an :exc:`BaseExceptionGroup`. + part of a :exc:`BaseExceptionGroup`. .. function:: redirect_stdout(new_target) diff --git a/Doc/library/contextvars.rst b/Doc/library/contextvars.rst index 8ae386b489f..2a79dfe8f81 100644 --- a/Doc/library/contextvars.rst +++ b/Doc/library/contextvars.rst @@ -15,7 +15,7 @@ function and the :class:`~contextvars.Context` class should be used to manage the current context in asynchronous frameworks. Context managers that have state should use Context Variables -instead of :func:`threading.local()` to prevent their state from +instead of :func:`threading.local` to prevent their state from bleeding to other code unexpectedly, when used in concurrent code. See also :pep:`567` for additional details. @@ -146,7 +146,7 @@ Manual Context Management Every thread will have a different top-level :class:`~contextvars.Context` object. This means that a :class:`ContextVar` object behaves in a similar - fashion to :func:`threading.local()` when values are assigned in different + fashion to :func:`threading.local` when values are assigned in different threads. Context implements the :class:`collections.abc.Mapping` interface. @@ -254,7 +254,7 @@ client:: # without passing it explicitly to this function. client_addr = client_addr_var.get() - return f'Good bye, client @ {client_addr}\n'.encode() + return f'Good bye, client @ {client_addr}\r\n'.encode() async def handle_request(reader, writer): addr = writer.transport.get_extra_info('socket').getpeername() @@ -268,9 +268,10 @@ client:: print(line) if not line.strip(): break - writer.write(line) - writer.write(render_goodbye()) + writer.write(b'HTTP/1.1 200 OK\r\n') # status line + writer.write(b'\r\n') # headers + writer.write(render_goodbye()) # body writer.close() async def main(): @@ -282,5 +283,6 @@ client:: asyncio.run(main()) - # To test it you can use telnet: + # To test it you can use telnet or curl: # telnet 127.0.0.1 8081 + # curl 127.0.0.1:8081 diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index 820535e3cba..12b44569271 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -51,7 +51,7 @@ function call fails. Here are some examples for Windows. Note that ``msvcrt`` is the MS standard C -library containing most standard C functions, and uses the cdecl calling +library containing most standard C functions, and uses the ``cdecl`` calling convention:: >>> from ctypes import * @@ -107,7 +107,7 @@ Functions are accessed as attributes of dll objects:: Note that win32 system dlls like ``kernel32`` and ``user32`` often export ANSI as well as UNICODE versions of a function. The UNICODE version is exported with -an ``W`` appended to the name, while the ANSI version is exported with an ``A`` +a ``W`` appended to the name, while the ANSI version is exported with an ``A`` appended to the name. The win32 ``GetModuleHandle`` function, which returns a *module handle* for a given module name, has the following C prototype, and a macro is used to expose one of them as ``GetModuleHandle`` depending on whether @@ -2621,6 +2621,15 @@ Arrays and pointers Array subclass constructors accept positional arguments, used to initialize the elements in order. +.. function:: ARRAY(type, length) + + Create an array. + Equivalent to ``type * length``, where *type* is a + :mod:`ctypes` data type and *length* an integer. + + This function is :term:`soft deprecated` in favor of multiplication. + There are no plans to remove it. + .. class:: _Pointer diff --git a/Doc/library/curses.rst b/Doc/library/curses.rst index 91ea6150fb1..6c7fc721a3e 100644 --- a/Doc/library/curses.rst +++ b/Doc/library/curses.rst @@ -21,7 +21,7 @@ for Windows, DOS, and possibly other systems as well. This extension module is designed to match the API of ncurses, an open-source curses library hosted on Linux and the BSD variants of Unix. -.. include:: ../includes/wasm-ios-notavail.rst +.. include:: ../includes/wasm-mobile-notavail.rst .. note:: diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst index fcb5e8bad29..cfca11afbd2 100644 --- a/Doc/library/dataclasses.rst +++ b/Doc/library/dataclasses.rst @@ -124,7 +124,7 @@ Module contents - *unsafe_hash*: If ``False`` (the default), a :meth:`~object.__hash__` method is generated according to how *eq* and *frozen* are set. - :meth:`!__hash__` is used by built-in :meth:`hash()`, and when objects are + :meth:`!__hash__` is used by built-in :meth:`hash`, and when objects are added to hashed collections such as dictionaries and sets. Having a :meth:`!__hash__` implies that instances of the class are immutable. Mutability is a complicated property that depends on the programmer's @@ -185,10 +185,21 @@ Module contents - *slots*: If true (the default is ``False``), :attr:`~object.__slots__` attribute will be generated and new class will be returned instead of the original one. If :attr:`!__slots__` is already defined in the class, then :exc:`TypeError` - is raised. Calling no-arg :func:`super` in dataclasses using ``slots=True`` will result in - the following exception being raised: - ``TypeError: super(type, obj): obj must be an instance or subtype of type``. - The two-arg :func:`super` is a valid workaround. See :gh:`90562` for full details. + is raised. + + .. warning:: + Calling no-arg :func:`super` in dataclasses using ``slots=True`` + will result in the following exception being raised: + ``TypeError: super(type, obj): obj must be an instance or subtype of type``. + The two-arg :func:`super` is a valid workaround. + See :gh:`90562` for full details. + + .. warning:: + Passing parameters to a base class :meth:`~object.__init_subclass__` + when using ``slots=True`` will result in a :exc:`TypeError`. + Either use ``__init_subclass__`` with no parameters + or use default values as a workaround. + See :gh:`91126` for full details. .. versionadded:: 3.10 @@ -204,7 +215,8 @@ Module contents - *weakref_slot*: If true (the default is ``False``), add a slot named "__weakref__", which is required to make an instance - weakref-able. It is an error to specify ``weakref_slot=True`` + :func:`weakref-able `. + It is an error to specify ``weakref_slot=True`` without also specifying ``slots=True``. .. versionadded:: 3.11 diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst index b6d8e6e6df0..9246aff12a6 100644 --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -48,7 +48,7 @@ Aware and Naive Objects ----------------------- Date and time objects may be categorized as "aware" or "naive" depending on -whether or not they include timezone information. +whether or not they include time zone information. With sufficient knowledge of applicable algorithmic and political time adjustments, such as time zone and daylight saving time information, @@ -58,7 +58,7 @@ interpretation. [#]_ A **naive** object does not contain enough information to unambiguously locate itself relative to other date/time objects. Whether a naive object represents -Coordinated Universal Time (UTC), local time, or time in some other timezone is +Coordinated Universal Time (UTC), local time, or time in some other time zone is purely up to the program, just like it is up to the program whether a particular number represents metres, miles, or mass. Naive objects are easy to understand and to work with, at the cost of ignoring some aspects of reality. @@ -70,9 +70,9 @@ These :class:`tzinfo` objects capture information about the offset from UTC time, the time zone name, and whether daylight saving time is in effect. Only one concrete :class:`tzinfo` class, the :class:`timezone` class, is -supplied by the :mod:`!datetime` module. The :class:`timezone` class can -represent simple timezones with fixed offsets from UTC, such as UTC itself or -North American EST and EDT timezones. Supporting timezones at deeper levels of +supplied by the :mod:`!datetime` module. The :class:`!timezone` class can +represent simple time zones with fixed offsets from UTC, such as UTC itself or +North American EST and EDT time zones. Supporting time zones at deeper levels of detail is up to the application. The rules for time adjustment across the world are more political than rational, change frequently, and there is no standard suitable for every application aside from UTC. @@ -95,7 +95,7 @@ The :mod:`!datetime` module exports the following constants: .. attribute:: UTC - Alias for the UTC timezone singleton :attr:`datetime.timezone.utc`. + Alias for the UTC time zone singleton :attr:`datetime.timezone.utc`. .. versionadded:: 3.11 @@ -283,17 +283,37 @@ Class attributes: Note that, because of normalization, ``timedelta.max`` is greater than ``-timedelta.min``. ``-timedelta.max`` is not representable as a :class:`timedelta` object. + Instance attributes (read-only): -+------------------+--------------------------------------------+ -| Attribute | Value | -+==================+============================================+ -| ``days`` | Between -999999999 and 999999999 inclusive | -+------------------+--------------------------------------------+ -| ``seconds`` | Between 0 and 86399 inclusive | -+------------------+--------------------------------------------+ -| ``microseconds`` | Between 0 and 999999 inclusive | -+------------------+--------------------------------------------+ +.. attribute:: timedelta.days + + Between -999,999,999 and 999,999,999 inclusive. + + +.. attribute:: timedelta.seconds + + Between 0 and 86,399 inclusive. + + .. caution:: + + It is a somewhat common bug for code to unintentionally use this attribute + when it is actually intended to get a :meth:`~timedelta.total_seconds` + value instead: + + .. doctest:: + + >>> from datetime import timedelta + >>> duration = timedelta(seconds=11235813) + >>> duration.days, duration.seconds + (130, 3813) + >>> duration.total_seconds() + 11235813.0 + +.. attribute:: timedelta.microseconds + + Between 0 and 999,999 inclusive. + Supported operations: @@ -345,7 +365,7 @@ Supported operations: | | same value. (2) | +--------------------------------+-----------------------------------------------+ | ``-t1`` | Equivalent to ``timedelta(-t1.days, | -| | -t1.seconds*, -t1.microseconds)``, | +| | -t1.seconds, -t1.microseconds)``, | | | and to ``t1 * -1``. (1)(4) | +--------------------------------+-----------------------------------------------+ | ``abs(t)`` | Equivalent to ``+t`` when ``t.days >= 0``, | @@ -869,7 +889,7 @@ Other constructors, all class methods: .. classmethod:: datetime.today() - Return the current local datetime, with :attr:`.tzinfo` ``None``. + Return the current local date and time, with :attr:`.tzinfo` ``None``. Equivalent to:: @@ -1053,7 +1073,7 @@ Other constructors, all class methods: .. versionadded:: 3.7 .. versionchanged:: 3.11 Previously, this method only supported formats that could be emitted by - :meth:`date.isoformat()` or :meth:`datetime.isoformat()`. + :meth:`date.isoformat` or :meth:`datetime.isoformat`. .. classmethod:: datetime.fromisocalendar(year, week, day) @@ -1070,7 +1090,7 @@ Other constructors, all class methods: Return a :class:`.datetime` corresponding to *date_string*, parsed according to *format*. - If *format* does not contain microseconds or timezone information, this is equivalent to:: + If *format* does not contain microseconds or time zone information, this is equivalent to:: datetime(*(time.strptime(date_string, format)[0:6])) @@ -1311,22 +1331,22 @@ Instance methods: If provided, *tz* must be an instance of a :class:`tzinfo` subclass, and its :meth:`utcoffset` and :meth:`dst` methods must not return ``None``. If *self* - is naive, it is presumed to represent time in the system timezone. + is naive, it is presumed to represent time in the system time zone. If called without arguments (or with ``tz=None``) the system local - timezone is assumed for the target timezone. The ``.tzinfo`` attribute of the converted + time zone is assumed for the target time zone. The ``.tzinfo`` attribute of the converted datetime instance will be set to an instance of :class:`timezone` with the zone name and offset obtained from the OS. If ``self.tzinfo`` is *tz*, ``self.astimezone(tz)`` is equal to *self*: no adjustment of date or time data is performed. Else the result is local - time in the timezone *tz*, representing the same UTC time as *self*: after + time in the time zone *tz*, representing the same UTC time as *self*: after ``astz = dt.astimezone(tz)``, ``astz - astz.utcoffset()`` will have the same date and time data as ``dt - dt.utcoffset()``. - If you merely want to attach a time zone object *tz* to a datetime *dt* without + If you merely want to attach a :class:`timezone` object *tz* to a datetime *dt* without adjustment of date and time data, use ``dt.replace(tzinfo=tz)``. If you - merely want to remove the time zone object from an aware datetime *dt* without + merely want to remove the :class:`!timezone` object from an aware datetime *dt* without conversion of date and time data, use ``dt.replace(tzinfo=None)``. Note that the default :meth:`tzinfo.fromutc` method can be overridden in a @@ -1336,7 +1356,7 @@ Instance methods: def astimezone(self, tz): if self.tzinfo is tz: return self - # Convert self to UTC, and attach the new time zone object. + # Convert self to UTC, and attach the new timezone object. utc = (self - self.utcoffset()).replace(tzinfo=tz) # Convert from UTC to tz's local time. return tz.fromutc(utc) @@ -1450,7 +1470,7 @@ Instance methods: There is no method to obtain the POSIX timestamp directly from a naive :class:`.datetime` instance representing UTC time. If your - application uses this convention and your system timezone is not + application uses this convention and your system time zone is not set to UTC, you can obtain the POSIX timestamp by supplying ``tzinfo=timezone.utc``:: @@ -1861,7 +1881,7 @@ Other constructor: .. versionadded:: 3.7 .. versionchanged:: 3.11 Previously, this method only supported formats that could be emitted by - :meth:`time.isoformat()`. + :meth:`time.isoformat`. Instance methods: @@ -2021,7 +2041,7 @@ Examples of working with a :class:`.time` object:: supply implementations of the standard :class:`tzinfo` methods needed by the :class:`.datetime` methods you use. The :mod:`!datetime` module provides :class:`timezone`, a simple concrete subclass of :class:`tzinfo` which can - represent timezones with fixed offset from UTC such as UTC itself or North + represent time zones with fixed offset from UTC such as UTC itself or North American EST and EDT. Special requirement for pickling: A :class:`tzinfo` subclass must have an @@ -2146,7 +2166,7 @@ When a :class:`.datetime` object is passed in response to a :class:`.datetime` method, ``dt.tzinfo`` is the same object as *self*. :class:`tzinfo` methods can rely on this, unless user code calls :class:`tzinfo` methods directly. The intent is that the :class:`tzinfo` methods interpret *dt* as being in local -time, and not need worry about objects in other timezones. +time, and not need worry about objects in other time zones. There is one more :class:`tzinfo` method that a subclass may wish to override: @@ -2263,12 +2283,12 @@ only EST (fixed offset -5 hours), or only EDT (fixed offset -4 hours)). :mod:`zoneinfo` The :mod:`!datetime` module has a basic :class:`timezone` class (for handling arbitrary fixed offsets from UTC) and its :attr:`timezone.utc` - attribute (a UTC timezone instance). + attribute (a UTC :class:`!timezone` instance). - ``zoneinfo`` brings the *IANA timezone database* (also known as the Olson + ``zoneinfo`` brings the *IANA time zone database* (also known as the Olson database) to Python, and its usage is recommended. - `IANA timezone database `_ + `IANA time zone database `_ The Time Zone Database (often called tz, tzdata or zoneinfo) contains code and data that represent the history of local time for many representative locations around the globe. It is updated periodically to reflect changes @@ -2282,10 +2302,10 @@ only EST (fixed offset -5 hours), or only EDT (fixed offset -4 hours)). ------------------------- The :class:`timezone` class is a subclass of :class:`tzinfo`, each -instance of which represents a timezone defined by a fixed offset from +instance of which represents a time zone defined by a fixed offset from UTC. -Objects of this class cannot be used to represent timezone information in the +Objects of this class cannot be used to represent time zone information in the locations where different offsets are used in different days of the year or where historical changes have been made to civil time. @@ -2346,7 +2366,7 @@ Class attributes: .. attribute:: timezone.utc - The UTC timezone, ``timezone(timedelta(0))``. + The UTC time zone, ``timezone(timedelta(0))``. .. index:: @@ -2555,7 +2575,7 @@ Using ``datetime.strptime(date_string, format)`` is equivalent to:: datetime(*(time.strptime(date_string, format)[0:6])) -except when the format includes sub-second components or timezone offset +except when the format includes sub-second components or time zone offset information, which are supported in ``datetime.strptime`` but are discarded by ``time.strptime``. diff --git a/Doc/library/dbm.rst b/Doc/library/dbm.rst index 77148a558d1..6c659ea52ad 100644 --- a/Doc/library/dbm.rst +++ b/Doc/library/dbm.rst @@ -19,8 +19,6 @@ slow-but-simple implementation in module :mod:`dbm.dumb` will be used. There is a `third party interface `_ to the Oracle Berkeley DB. -.. include:: ../includes/wasm-ios-notavail.rst - .. exception:: error A tuple containing the exceptions that can be raised by each of the supported @@ -164,6 +162,8 @@ SQLite backend for the :mod:`dbm` module. The files created by :mod:`dbm.sqlite3` can thus be opened by :mod:`sqlite3`, or any other SQLite browser, including the SQLite CLI. +.. include:: ../includes/wasm-notavail.rst + .. function:: open(filename, /, flag="r", mode=0o666) Open an SQLite database. @@ -207,6 +207,8 @@ functionality like crash tolerance. The file formats created by :mod:`dbm.gnu` and :mod:`dbm.ndbm` are incompatible and can not be used interchangeably. +.. include:: ../includes/wasm-mobile-notavail.rst + .. exception:: error Raised on :mod:`dbm.gnu`-specific errors, such as I/O errors. :exc:`KeyError` is @@ -326,6 +328,8 @@ This module can be used with the "classic" NDBM interface or the when storing values larger than this limit. Reading such corrupted files can result in a hard crash (segmentation fault). +.. include:: ../includes/wasm-mobile-notavail.rst + .. exception:: error Raised on :mod:`dbm.ndbm`-specific errors, such as I/O errors. :exc:`KeyError` is raised diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst index db323802a6f..916f17cadfa 100644 --- a/Doc/library/decimal.rst +++ b/Doc/library/decimal.rst @@ -1,4 +1,4 @@ -:mod:`!decimal` --- Decimal fixed point and floating point arithmetic +:mod:`!decimal` --- Decimal fixed-point and floating-point arithmetic ===================================================================== .. module:: decimal @@ -31,7 +31,7 @@ -------------- The :mod:`decimal` module provides support for fast correctly rounded -decimal floating point arithmetic. It offers several advantages over the +decimal floating-point arithmetic. It offers several advantages over the :class:`float` datatype: * Decimal "is based on a floating-point model which was designed with people @@ -207,7 +207,7 @@ a decimal raises :class:`InvalidOperation`:: .. versionchanged:: 3.3 Decimals interact well with much of the rest of Python. Here is a small decimal -floating point flying circus: +floating-point flying circus: .. doctest:: :options: +NORMALIZE_WHITESPACE @@ -373,7 +373,7 @@ Decimal objects digits, and an integer exponent. For example, ``Decimal((0, (1, 4, 1, 4), -3))`` returns ``Decimal('1.414')``. - If *value* is a :class:`float`, the binary floating point value is losslessly + If *value* is a :class:`float`, the binary floating-point value is losslessly converted to its exact decimal equivalent. This conversion can often require 53 or more digits of precision. For example, ``Decimal(float('1.1'))`` converts to @@ -403,7 +403,7 @@ Decimal objects Underscores are allowed for grouping, as with integral and floating-point literals in code. - Decimal floating point objects share many properties with the other built-in + Decimal floating-point objects share many properties with the other built-in numeric types such as :class:`float` and :class:`int`. All of the usual math operations and special methods apply. Likewise, decimal objects can be copied, pickled, printed, used as dictionary keys, used as set elements, @@ -445,7 +445,7 @@ Decimal objects Mixed-type comparisons between :class:`Decimal` instances and other numeric types are now fully supported. - In addition to the standard numeric properties, decimal floating point + In addition to the standard numeric properties, decimal floating-point objects also have a number of specialized methods: @@ -1741,7 +1741,7 @@ The following table summarizes the hierarchy of signals:: .. _decimal-notes: -Floating Point Notes +Floating-Point Notes -------------------- @@ -1754,7 +1754,7 @@ can still incur round-off error when non-zero digits exceed the fixed precision. The effects of round-off error can be amplified by the addition or subtraction of nearly offsetting quantities resulting in loss of significance. Knuth -provides two instructive examples where rounded floating point arithmetic with +provides two instructive examples where rounded floating-point arithmetic with insufficient precision causes the breakdown of the associative and distributive properties of addition: @@ -1844,7 +1844,7 @@ treated as equal and their sign is informational. In addition to the two signed zeros which are distinct yet equal, there are various representations of zero with differing precisions yet equivalent in value. This takes a bit of getting used to. For an eye accustomed to -normalized floating point representations, it is not immediately obvious that +normalized floating-point representations, it is not immediately obvious that the following calculation returns a value equal to zero: >>> 1 / Decimal('Infinity') @@ -2171,7 +2171,7 @@ value unchanged: Q. Is there a way to convert a regular float to a :class:`Decimal`? -A. Yes, any binary floating point number can be exactly expressed as a +A. Yes, any binary floating-point number can be exactly expressed as a Decimal though an exact conversion may take more precision than intuition would suggest: @@ -2225,7 +2225,7 @@ Q. Is the CPython implementation fast for large numbers? A. Yes. In the CPython and PyPy3 implementations, the C/CFFI versions of the decimal module integrate the high speed `libmpdec `_ library for -arbitrary precision correctly rounded decimal floating point arithmetic [#]_. +arbitrary precision correctly rounded decimal floating-point arithmetic [#]_. ``libmpdec`` uses `Karatsuba multiplication `_ for medium-sized numbers and the `Number Theoretic Transform diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst index ca215888f09..c5507e89a52 100644 --- a/Doc/library/dis.rst +++ b/Doc/library/dis.rst @@ -1102,11 +1102,15 @@ iterations of the loop. .. opcode:: BUILD_TUPLE (count) Creates a tuple consuming *count* items from the stack, and pushes the - resulting tuple onto the stack.:: + resulting tuple onto the stack:: - assert count > 0 - STACK, values = STACK[:-count], STACK[-count:] - STACK.append(tuple(values)) + if count == 0: + value = () + else: + value = tuple(STACK[-count:]) + STACK = STACK[:-count] + + STACK.append(value) .. opcode:: BUILD_LIST (count) @@ -1581,7 +1585,7 @@ iterations of the loop. end = STACK.pop() start = STACK.pop() - STACK.append(slice(start, stop)) + STACK.append(slice(start, end)) if it is 3, implements:: @@ -1748,7 +1752,7 @@ iterations of the loop. | ``INTRINSIC_STOPITERATION_ERROR`` | Extracts the return value from a | | | ``StopIteration`` exception. | +-----------------------------------+-----------------------------------+ - | ``INTRINSIC_ASYNC_GEN_WRAP`` | Wraps an aync generator value | + | ``INTRINSIC_ASYNC_GEN_WRAP`` | Wraps an async generator value | +-----------------------------------+-----------------------------------+ | ``INTRINSIC_UNARY_POSITIVE`` | Performs the unary ``+`` | | | operation | diff --git a/Doc/library/email.compat32-message.rst b/Doc/library/email.compat32-message.rst index c4c322a82e1..4285c436e8d 100644 --- a/Doc/library/email.compat32-message.rst +++ b/Doc/library/email.compat32-message.rst @@ -7,6 +7,7 @@ :synopsis: The base class representing email messages in a fashion backward compatible with Python 3.2 :noindex: + :no-index: The :class:`Message` class is very similar to the @@ -104,7 +105,7 @@ Here are the methods of the :class:`Message` class: .. method:: __str__() - Equivalent to :meth:`.as_string()`. Allows ``str(msg)`` to produce a + Equivalent to :meth:`.as_string`. Allows ``str(msg)`` to produce a string containing the formatted message. @@ -142,7 +143,7 @@ Here are the methods of the :class:`Message` class: .. method:: __bytes__() - Equivalent to :meth:`.as_bytes()`. Allows ``bytes(msg)`` to produce a + Equivalent to :meth:`.as_bytes`. Allows ``bytes(msg)`` to produce a bytes object containing the formatted message. .. versionadded:: 3.4 diff --git a/Doc/library/email.contentmanager.rst b/Doc/library/email.contentmanager.rst index 34121f8c0a7..a86e227429b 100644 --- a/Doc/library/email.contentmanager.rst +++ b/Doc/library/email.contentmanager.rst @@ -58,11 +58,12 @@ * the type itself (``typ``) * the type's fully qualified name (``typ.__module__ + '.' + typ.__qualname__``). - * the type's qualname (``typ.__qualname__``) - * the type's name (``typ.__name__``). + * the type's :attr:`qualname ` (``typ.__qualname__``) + * the type's :attr:`name ` (``typ.__name__``). If none of the above match, repeat all of the checks above for each of - the types in the :term:`MRO` (``typ.__mro__``). Finally, if no other key + the types in the :term:`MRO` (:attr:`typ.__mro__ `). + Finally, if no other key yields a handler, check for a handler for the key ``None``. If there is no handler for ``None``, raise a :exc:`KeyError` for the fully qualified name of the type. diff --git a/Doc/library/email.errors.rst b/Doc/library/email.errors.rst index 33ab4265116..f8f43d82a3d 100644 --- a/Doc/library/email.errors.rst +++ b/Doc/library/email.errors.rst @@ -58,6 +58,13 @@ The following exception classes are defined in the :mod:`email.errors` module: :class:`~email.mime.nonmultipart.MIMENonMultipart` (e.g. :class:`~email.mime.image.MIMEImage`). + +.. exception:: HeaderWriteError() + + Raised when an error occurs when the :mod:`~email.generator` outputs + headers. + + .. exception:: MessageDefect() This is the base class for all defects found when parsing email messages. diff --git a/Doc/library/email.headerregistry.rst b/Doc/library/email.headerregistry.rst index bcbd00c833e..7f8044932fa 100644 --- a/Doc/library/email.headerregistry.rst +++ b/Doc/library/email.headerregistry.rst @@ -317,7 +317,7 @@ variant, :attr:`~.BaseHeader.max_count` is set to 1. class. When *use_default_map* is ``True`` (the default), the standard mapping of header names to classes is copied in to the registry during initialization. *base_class* is always the last class in the generated - class's ``__bases__`` list. + class's :class:`~type.__bases__` list. The default mappings are: diff --git a/Doc/library/email.message.rst b/Doc/library/email.message.rst index e9cce1af186..71d6e321f38 100644 --- a/Doc/library/email.message.rst +++ b/Doc/library/email.message.rst @@ -124,7 +124,7 @@ message objects. .. method:: __bytes__() - Equivalent to :meth:`.as_bytes()`. Allows ``bytes(msg)`` to produce a + Equivalent to :meth:`.as_bytes`. Allows ``bytes(msg)`` to produce a bytes object containing the serialized message. diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst index 83feedf7283..314767d0802 100644 --- a/Doc/library/email.policy.rst +++ b/Doc/library/email.policy.rst @@ -229,6 +229,24 @@ added matters. To illustrate:: .. versionadded:: 3.6 + + .. attribute:: verify_generated_headers + + If ``True`` (the default), the generator will raise + :exc:`~email.errors.HeaderWriteError` instead of writing a header + that is improperly folded or delimited, such that it would + be parsed as multiple headers or joined with adjacent data. + Such headers can be generated by custom header classes or bugs + in the ``email`` module. + + As it's a security feature, this defaults to ``True`` even in the + :class:`~email.policy.Compat32` policy. + For backwards compatible, but unsafe, behavior, it must be set to + ``False`` explicitly. + + .. versionadded:: 3.13 + + The following :class:`Policy` method is intended to be called by code using the email library to create policy instances with custom settings: diff --git a/Doc/library/email.utils.rst b/Doc/library/email.utils.rst index 6f0bed130bc..6bd45200d86 100644 --- a/Doc/library/email.utils.rst +++ b/Doc/library/email.utils.rst @@ -159,7 +159,7 @@ of the new API. Fri, 09 Nov 2001 01:08:47 -0000 - Optional *timeval* if given is a floating point time value as accepted by + Optional *timeval* if given is a floating-point time value as accepted by :func:`time.gmtime` and :func:`time.localtime`, otherwise the current time is used. diff --git a/Doc/library/ensurepip.rst b/Doc/library/ensurepip.rst index 518a2940edc..8dfb7ad9c95 100644 --- a/Doc/library/ensurepip.rst +++ b/Doc/library/ensurepip.rst @@ -38,7 +38,7 @@ when creating a virtual environment) or after explicitly uninstalling :pep:`453`: Explicit bootstrapping of pip in Python installations The original rationale and specification for this module. -.. include:: ../includes/wasm-ios-notavail.rst +.. include:: ../includes/wasm-mobile-notavail.rst Command line interface ---------------------- diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst index 9cf94e342da..a724fe4bcdc 100644 --- a/Doc/library/enum.rst +++ b/Doc/library/enum.rst @@ -570,6 +570,8 @@ Data Types >>> len(white) 3 + .. versionadded:: 3.11 + .. method:: __bool__(self): Returns *True* if any members in flag, *False* otherwise:: @@ -661,7 +663,7 @@ Data Types * the result is a valid *IntFlag*: an *IntFlag* is returned * the result is not a valid *IntFlag*: the result depends on the :class:`FlagBoundary` setting - The :func:`repr()` of unnamed zero-valued flags has changed. It is now: + The :func:`repr` of unnamed zero-valued flags has changed. It is now: >>> Color(0) diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index 7910b306f14..b5ba86f1b19 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -412,8 +412,8 @@ The following exceptions are the exceptions that are usually raised. represented. This cannot occur for integers (which would rather raise :exc:`MemoryError` than give up). However, for historical reasons, OverflowError is sometimes raised for integers that are outside a required - range. Because of the lack of standardization of floating point exception - handling in C, most floating point operations are not checked. + range. Because of the lack of standardization of floating-point exception + handling in C, most floating-point operations are not checked. .. exception:: PythonFinalizationError diff --git a/Doc/library/filecmp.rst b/Doc/library/filecmp.rst index 2a0670ffcc2..282d0e0d8db 100644 --- a/Doc/library/filecmp.rst +++ b/Doc/library/filecmp.rst @@ -70,7 +70,7 @@ The :mod:`filecmp` module defines the following functions: The :class:`dircmp` class ------------------------- -.. class:: dircmp(a, b, ignore=None, hide=None, shallow=True) +.. class:: dircmp(a, b, ignore=None, hide=None, *, shallow=True) Construct a new directory comparison object, to compare the directories *a* and *b*. *ignore* is a list of names to ignore, and defaults to diff --git a/Doc/library/fractions.rst b/Doc/library/fractions.rst index 552d6030b1c..7e615ed230a 100644 --- a/Doc/library/fractions.rst +++ b/Doc/library/fractions.rst @@ -31,7 +31,7 @@ another rational number, or from a string. :class:`Fraction` instance with the same value. The next two versions accept either a :class:`float` or a :class:`decimal.Decimal` instance, and return a :class:`Fraction` instance with exactly the same value. Note that due to the - usual issues with binary floating-point (see :ref:`tut-fp-issues`), the + usual issues with binary floating point (see :ref:`tut-fp-issues`), the argument to ``Fraction(1.1)`` is not exactly equal to 11/10, and so ``Fraction(1.1)`` does *not* return ``Fraction(11, 10)`` as one might expect. (But see the documentation for the :meth:`limit_denominator` method below.) @@ -87,7 +87,7 @@ another rational number, or from a string. .. versionchanged:: 3.9 The :func:`math.gcd` function is now used to normalize the *numerator* - and *denominator*. :func:`math.gcd` always return a :class:`int` type. + and *denominator*. :func:`math.gcd` always returns an :class:`int` type. Previously, the GCD type depended on *numerator* and *denominator*. .. versionchanged:: 3.11 diff --git a/Doc/library/ftplib.rst b/Doc/library/ftplib.rst index 8c39dc00f5d..bb153220672 100644 --- a/Doc/library/ftplib.rst +++ b/Doc/library/ftplib.rst @@ -243,7 +243,7 @@ FTP objects Retrieve a file in binary transfer mode. :param str cmd: - An appropriate ``STOR`` command: :samp:`"STOR {filename}"`. + An appropriate ``RETR`` command: :samp:`"RETR {filename}"`. :param callback: A single parameter callable that is called diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index b75b6dfc315..8d023ebf48a 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -57,7 +57,7 @@ are always available. They are listed here in alphabetical order. .. function:: abs(x) Return the absolute value of a number. The argument may be an - integer, a floating point number, or an object implementing + integer, a floating-point number, or an object implementing :meth:`~object.__abs__`. If the argument is a complex number, its magnitude is returned. @@ -161,7 +161,7 @@ are always available. They are listed here in alphabetical order. This function drops you into the debugger at the call site. Specifically, it calls :func:`sys.breakpointhook`, passing ``args`` and ``kws`` straight through. By default, ``sys.breakpointhook()`` calls - :func:`pdb.set_trace()` expecting no arguments. In this case, it is + :func:`pdb.set_trace` expecting no arguments. In this case, it is purely a convenience function so you don't have to explicitly import :mod:`pdb` or type as much code to enter the debugger. However, :func:`sys.breakpointhook` can be set to some other function and @@ -283,9 +283,11 @@ are always available. They are listed here in alphabetical order. :func:`property`. .. versionchanged:: 3.10 - Class methods now inherit the method attributes (``__module__``, - ``__name__``, ``__qualname__``, ``__doc__`` and ``__annotations__``) and - have a new ``__wrapped__`` attribute. + Class methods now inherit the method attributes + (:attr:`~function.__module__`, :attr:`~function.__name__`, + :attr:`~function.__qualname__`, :attr:`~function.__doc__` and + :attr:`~function.__annotations__`) and have a new ``__wrapped__`` + attribute. .. deprecated-removed:: 3.11 3.13 Class methods can no longer wrap other :term:`descriptors ` such as @@ -538,7 +540,7 @@ are always available. They are listed here in alphabetical order. Take two (non-complex) numbers as arguments and return a pair of numbers consisting of their quotient and remainder when using integer division. With mixed operand types, the rules for binary arithmetic operators apply. For - integers, the result is the same as ``(a // b, a % b)``. For floating point + integers, the result is the same as ``(a // b, a % b)``. For floating-point numbers the result is ``(q, a % b)``, where *q* is usually ``math.floor(a / b)`` but may be 1 less than that. In any case ``q * b + a % b`` is very close to *a*, if ``a % b`` is non-zero it has the same sign as *b*, and ``0 @@ -734,7 +736,7 @@ are always available. They are listed here in alphabetical order. single: NaN single: Infinity - Return a floating point number constructed from a number or a string. + Return a floating-point number constructed from a number or a string. Examples: @@ -775,8 +777,8 @@ are always available. They are listed here in alphabetical order. Case is not significant, so, for example, "inf", "Inf", "INFINITY", and "iNfINity" are all acceptable spellings for positive infinity. - Otherwise, if the argument is an integer or a floating point number, a - floating point number with the same value (within Python's floating point + Otherwise, if the argument is an integer or a floating-point number, a + floating-point number with the same value (within Python's floating-point precision) is returned. If the argument is outside the range of a Python float, an :exc:`OverflowError` will be raised. @@ -1003,7 +1005,7 @@ are always available. They are listed here in alphabetical order. ``int(x)`` returns ``x.__int__()``. If the argument defines :meth:`~object.__index__`, it returns ``x.__index__()``. If the argument defines :meth:`~object.__trunc__`, it returns ``x.__trunc__()``. - For floating point numbers, this truncates towards zero. + For floating-point numbers, this truncates towards zero. If the argument is not a number or if *base* is given, then it must be a string, :class:`bytes`, or :class:`bytearray` instance representing an integer @@ -1279,8 +1281,9 @@ are always available. They are listed here in alphabetical order. .. note:: - :class:`object` does *not* have a :attr:`~object.__dict__`, so you can't - assign arbitrary attributes to an instance of the :class:`object` class. + :class:`object` instances do *not* have :attr:`~object.__dict__` + attributes, so you can't assign arbitrary attributes to an instance of + :class:`object`. .. function:: oct(x) @@ -1329,7 +1332,7 @@ are always available. They are listed here in alphabetical order. (which on *some* Unix systems, means that *all* writes append to the end of the file regardless of the current seek position). In text mode, if *encoding* is not specified the encoding used is platform-dependent: - :func:`locale.getencoding()` is called to get the current locale encoding. + :func:`locale.getencoding` is called to get the current locale encoding. (For reading and writing raw bytes use binary mode and leave *encoding* unspecified.) The available modes are: @@ -1502,7 +1505,7 @@ are always available. They are listed here in alphabetical order. (where :func:`open` is declared), :mod:`os`, :mod:`os.path`, :mod:`tempfile`, and :mod:`shutil`. - .. audit-event:: open file,mode,flags open + .. audit-event:: open path,mode,flags open The ``mode`` and ``flags`` arguments may have been modified or inferred from the original call. @@ -1693,6 +1696,13 @@ are always available. They are listed here in alphabetical order. .. versionchanged:: 3.5 The docstrings of property objects are now writeable. + .. attribute:: __name__ + + Attribute holding the name of the property. The name of the property + can be changed at runtime. + + .. versionadded:: 3.13 + .. _func-range: .. class:: range(stop) @@ -1893,10 +1903,11 @@ are always available. They are listed here in alphabetical order. For more information on static methods, see :ref:`types`. .. versionchanged:: 3.10 - Static methods now inherit the method attributes (``__module__``, - ``__name__``, ``__qualname__``, ``__doc__`` and ``__annotations__``), - have a new ``__wrapped__`` attribute, and are now callable as regular - functions. + Static methods now inherit the method attributes + (:attr:`~function.__module__`, :attr:`~function.__name__`, + :attr:`~function.__qualname__`, :attr:`~function.__doc__` and + :attr:`~function.__annotations__`), have a new ``__wrapped__`` attribute, + and are now callable as regular functions. .. index:: @@ -1921,7 +1932,7 @@ are always available. They are listed here in alphabetical order. For some use cases, there are good alternatives to :func:`sum`. The preferred, fast way to concatenate a sequence of strings is by calling - ``''.join(sequence)``. To add floating point values with extended precision, + ``''.join(sequence)``. To add floating-point values with extended precision, see :func:`math.fsum`\. To concatenate a series of iterables, consider using :func:`itertools.chain`. @@ -1943,14 +1954,14 @@ are always available. They are listed here in alphabetical order. to be searched. The search starts from the class right after the *type*. - For example, if :attr:`~class.__mro__` of *object_or_type* is + For example, if :attr:`~type.__mro__` of *object_or_type* is ``D -> B -> C -> A -> object`` and the value of *type* is ``B``, then :func:`super` searches ``C -> A -> object``. - The :attr:`~class.__mro__` attribute of the *object_or_type* lists the method - resolution search order used by both :func:`getattr` and :func:`super`. The - attribute is dynamic and can change whenever the inheritance hierarchy is - updated. + The :attr:`~type.__mro__` attribute of the class corresponding to + *object_or_type* lists the method resolution search order used by both + :func:`getattr` and :func:`super`. The attribute is dynamic and can change + whenever the inheritance hierarchy is updated. If the second argument is omitted, the super object returned is unbound. If the second argument is an object, ``isinstance(obj, type)`` must be true. If @@ -2026,28 +2037,30 @@ are always available. They are listed here in alphabetical order. With one argument, return the type of an *object*. The return value is a type object and generally the same object as returned by - :attr:`object.__class__ `. + :attr:`object.__class__`. The :func:`isinstance` built-in function is recommended for testing the type of an object, because it takes subclasses into account. - With three arguments, return a new type object. This is essentially a dynamic form of the :keyword:`class` statement. The *name* string is - the class name and becomes the :attr:`~definition.__name__` attribute. + the class name and becomes the :attr:`~type.__name__` attribute. The *bases* tuple contains the base classes and becomes the - :attr:`~class.__bases__` attribute; if empty, :class:`object`, the + :attr:`~type.__bases__` attribute; if empty, :class:`object`, the ultimate base of all classes, is added. The *dict* dictionary contains attribute and method definitions for the class body; it may be copied - or wrapped before becoming the :attr:`~object.__dict__` attribute. - The following two statements create identical :class:`type` objects: + or wrapped before becoming the :attr:`~type.__dict__` attribute. + The following two statements create identical :class:`!type` objects: >>> class X: ... a = 1 ... >>> X = type('X', (), dict(a=1)) - See also :ref:`bltin-type-objects`. + See also: + + * :ref:`Documentation on attributes and methods on classes `. + * :ref:`bltin-type-objects` Keyword arguments provided to the three argument form are passed to the appropriate metaclass machinery (usually :meth:`~object.__init_subclass__`) @@ -2057,18 +2070,18 @@ are always available. They are listed here in alphabetical order. See also :ref:`class-customization`. .. versionchanged:: 3.6 - Subclasses of :class:`type` which don't override ``type.__new__`` may no + Subclasses of :class:`!type` which don't override ``type.__new__`` may no longer use the one-argument form to get the type of an object. .. function:: vars() vars(object) Return the :attr:`~object.__dict__` attribute for a module, class, instance, - or any other object with a :attr:`~object.__dict__` attribute. + or any other object with a :attr:`!__dict__` attribute. Objects such as modules and instances have an updateable :attr:`~object.__dict__` attribute; however, other objects may have write restrictions on their - :attr:`~object.__dict__` attributes (for example, classes use a + :attr:`!__dict__` attributes (for example, classes use a :class:`types.MappingProxyType` to prevent direct dictionary updates). Without an argument, :func:`vars` acts like :func:`locals`. diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index 9d5c72802a2..3540a1e8f49 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -34,7 +34,7 @@ The :mod:`functools` module defines the following functions: Returns the same as ``lru_cache(maxsize=None)``, creating a thin wrapper around a dictionary lookup for the function arguments. Because it never needs to evict old values, this is smaller and faster than - :func:`lru_cache()` with a size limit. + :func:`lru_cache` with a size limit. For example:: @@ -218,7 +218,7 @@ The :mod:`functools` module defines the following functions: cache. See :ref:`faq-cache-method-calls` An `LRU (least recently used) cache - `_ + `_ works best when the most recent calls are the best predictors of upcoming calls (for example, the most popular articles on a news server tend to change each day). The cache's size limit assures that the cache does not @@ -492,6 +492,25 @@ The :mod:`functools` module defines the following functions: ... print(arg.real, arg.imag) ... + For code that dispatches on a collections type (e.g., ``list``), but wants + to typehint the items of the collection (e.g., ``list[int]``), the + dispatch type should be passed explicitly to the decorator itself with the + typehint going into the function definition:: + + >>> @fun.register(list) + ... def _(arg: list[int], verbose=False): + ... if verbose: + ... print("Enumerate this:") + ... for i, elem in enumerate(arg): + ... print(i, elem) + + .. note:: + + At runtime the function will dispatch on an instance of a list regardless + of the type contained within the list i.e. ``[1,2,3]`` will be + dispatched the same as ``["foo", "bar", "baz"]``. The annotation + provided in this example is for static type checkers only and has no + runtime impact. To enable registering :term:`lambdas` and pre-existing functions, the :func:`register` attribute can also be used in a functional form:: @@ -646,10 +665,11 @@ The :mod:`functools` module defines the following functions: attributes of the wrapper function are updated with the corresponding attributes from the original function. The default values for these arguments are the module level constants ``WRAPPER_ASSIGNMENTS`` (which assigns to the wrapper - function's ``__module__``, ``__name__``, ``__qualname__``, ``__annotations__``, - ``__type_params__``, and ``__doc__``, the documentation string) - and ``WRAPPER_UPDATES`` (which - updates the wrapper function's ``__dict__``, i.e. the instance dictionary). + function's :attr:`~function.__module__`, :attr:`~function.__name__`, + :attr:`~function.__qualname__`, :attr:`~function.__annotations__`, + :attr:`~function.__type_params__`, and :attr:`~function.__doc__`, the + documentation string) and ``WRAPPER_UPDATES`` (which updates the wrapper + function's :attr:`~function.__dict__`, i.e. the instance dictionary). To allow access to the original function for introspection and other purposes (e.g. bypassing a caching decorator such as :func:`lru_cache`), this function @@ -670,7 +690,7 @@ The :mod:`functools` module defines the following functions: .. versionchanged:: 3.2 The ``__wrapped__`` attribute is now automatically added. - The ``__annotations__`` attribute is now copied by default. + The :attr:`~function.__annotations__` attribute is now copied by default. Missing attributes no longer trigger an :exc:`AttributeError`. .. versionchanged:: 3.4 @@ -679,7 +699,7 @@ The :mod:`functools` module defines the following functions: (see :issue:`17482`) .. versionchanged:: 3.12 - The ``__type_params__`` attribute is now copied by default. + The :attr:`~function.__type_params__` attribute is now copied by default. .. decorator:: wraps(wrapped, assigned=WRAPPER_ASSIGNMENTS, updated=WRAPPER_UPDATES) @@ -741,9 +761,10 @@ have three read-only attributes: The keyword arguments that will be supplied when the :class:`partial` object is called. -:class:`partial` objects are like :class:`function` objects in that they are -callable, weak referenceable, and can have attributes. There are some important -differences. For instance, the :attr:`~definition.__name__` and :attr:`__doc__` attributes +:class:`partial` objects are like :ref:`function objects ` +in that they are callable, weak referenceable, and can have attributes. +There are some important differences. For instance, the +:attr:`~function.__name__` and :attr:`function.__doc__` attributes are not created automatically. Also, :class:`partial` objects defined in classes behave like static methods and do not transform into bound methods during instance attribute look-up. diff --git a/Doc/library/gc.rst b/Doc/library/gc.rst index 790dfdfd00b..8ce850ba777 100644 --- a/Doc/library/gc.rst +++ b/Doc/library/gc.rst @@ -42,7 +42,7 @@ The :mod:`gc` module provides the following functions: With no arguments, run a full collection. The optional argument *generation* may be an integer specifying which generation to collect (from 0 to 2). A - :exc:`ValueError` is raised if the generation number is invalid. The sum of + :exc:`ValueError` is raised if the generation number is invalid. The sum of collected objects and uncollectable objects is returned. The free lists maintained for a number of built-in types are cleared diff --git a/Doc/library/getpass.rst b/Doc/library/getpass.rst index 9d67250033d..3b5296f9ec6 100644 --- a/Doc/library/getpass.rst +++ b/Doc/library/getpass.rst @@ -49,7 +49,7 @@ The :mod:`getpass` module provides two functions: systems which support the :mod:`pwd` module, otherwise, an :exc:`OSError` is raised. - In general, this function should be preferred over :func:`os.getlogin()`. + In general, this function should be preferred over :func:`os.getlogin`. .. versionchanged:: 3.13 Previously, various exceptions beyond just :exc:`OSError` were raised. diff --git a/Doc/library/grp.rst b/Doc/library/grp.rst index 30caea328ba..d1c7f22a209 100644 --- a/Doc/library/grp.rst +++ b/Doc/library/grp.rst @@ -10,7 +10,7 @@ This module provides access to the Unix group database. It is available on all Unix versions. -.. availability:: Unix, not WASI, not iOS. +.. availability:: Unix, not WASI, not Android, not iOS. Group database entries are reported as a tuple-like object, whose attributes correspond to the members of the ``group`` structure (Attribute field below, see diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst index 5d24b77e13b..dffb167c747 100644 --- a/Doc/library/hashlib.rst +++ b/Doc/library/hashlib.rst @@ -655,7 +655,7 @@ on the hash function used in digital signatures. by the signer. (`NIST SP-800-106 "Randomized Hashing for Digital Signatures" - `_) + `_) In BLAKE2 the salt is processed as a one-time input to the hash function during initialization, rather than as an input to each compression function. @@ -809,8 +809,8 @@ Domain Dedication 1.0 Universal: .. _NIST-SP-800-132: https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf .. _stackexchange pbkdf2 iterations question: https://security.stackexchange.com/questions/3959/recommended-of-iterations-when-using-pbkdf2-sha256/ .. _Attacks on cryptographic hash algorithms: https://en.wikipedia.org/wiki/Cryptographic_hash_function#Attacks_on_cryptographic_hash_algorithms -.. _the FIPS 180-4 standard: https://csrc.nist.gov/publications/detail/fips/180/4/final -.. _the FIPS 202 standard: https://csrc.nist.gov/publications/detail/fips/202/final +.. _the FIPS 180-4 standard: https://csrc.nist.gov/pubs/fips/180-4/upd1/final +.. _the FIPS 202 standard: https://csrc.nist.gov/pubs/fips/202/final .. _HACL\* project: https://github.com/hacl-star/hacl-star @@ -827,7 +827,7 @@ Domain Dedication 1.0 Universal: https://nvlpubs.nist.gov/nistpubs/fips/nist.fips.180-4.pdf The FIPS 180-4 publication on Secure Hash Algorithms. - https://csrc.nist.gov/publications/detail/fips/202/final + https://csrc.nist.gov/pubs/fips/202/final The FIPS 202 publication on the SHA-3 Standard. https://www.blake2.net/ diff --git a/Doc/library/http.cookiejar.rst b/Doc/library/http.cookiejar.rst index 31ac8bafb6a..23ddecf8738 100644 --- a/Doc/library/http.cookiejar.rst +++ b/Doc/library/http.cookiejar.rst @@ -137,7 +137,7 @@ The following classes are provided: The Netscape protocol with the bugs fixed. Uses :mailheader:`Set-Cookie2` in place of :mailheader:`Set-Cookie`. Not widely used. - http://kristol.org/cookie/errata.html + https://kristol.org/cookie/errata.html Unfinished errata to :rfc:`2965`. :rfc:`2964` - Use of HTTP State Management diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst index 3c80fa747d5..1197b575c00 100644 --- a/Doc/library/http.server.rst +++ b/Doc/library/http.server.rst @@ -263,7 +263,7 @@ provides three different variants: Adds a blank line (indicating the end of the HTTP headers in the response) - to the headers buffer and calls :meth:`flush_headers()`. + to the headers buffer and calls :meth:`flush_headers`. .. versionchanged:: 3.2 The buffered headers are written to the output stream. @@ -378,7 +378,7 @@ provides three different variants: If the request was mapped to a file, it is opened. Any :exc:`OSError` exception in opening the requested file is mapped to a ``404``, - ``'File not found'`` error. If there was a ``'If-Modified-Since'`` + ``'File not found'`` error. If there was an ``'If-Modified-Since'`` header in the request, and the file was not modified after this time, a ``304``, ``'Not Modified'`` response is sent. Otherwise, the content type is guessed by calling the :meth:`guess_type` method, which in turn diff --git a/Doc/library/importlib.resources.abc.rst b/Doc/library/importlib.resources.abc.rst index 5ea8044e1ec..54995ddbfbc 100644 --- a/Doc/library/importlib.resources.abc.rst +++ b/Doc/library/importlib.resources.abc.rst @@ -22,7 +22,7 @@ something like a data file that lives next to the ``__init__.py`` file of the package. The purpose of this class is to help abstract out the accessing of such data files so that it does not matter if - the package and its data file(s) are stored in a e.g. zip file + the package and its data file(s) are stored e.g. in a zip file versus on the file system. For any of methods of this class, a *resource* argument is diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst index b58ef359378..1b798568646 100644 --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst @@ -657,7 +657,7 @@ ABC hierarchy:: something like a data file that lives next to the ``__init__.py`` file of the package. The purpose of this class is to help abstract out the accessing of such data files so that it does not matter if - the package and its data file(s) are stored in a e.g. zip file + the package and its data file(s) are stored e.g. in a zip file versus on the file system. For any of methods of this class, a *resource* argument is @@ -1166,10 +1166,9 @@ find and load modules. .. class:: ModuleSpec(name, loader, *, origin=None, loader_state=None, is_package=None) A specification for a module's import-system-related state. This is - typically exposed as the module's :attr:`__spec__` attribute. In the - descriptions below, the names in parentheses give the corresponding - attribute available directly on the module object, - e.g. ``module.__spec__.origin == module.__file__``. Note, however, that + typically exposed as the module's :attr:`__spec__` attribute. Many + of these attributes are also available directly on a module: for example, + ``module.__spec__.origin == module.__file__``. Note, however, that while the *values* are usually equivalent, they can differ since there is no synchronization between the two objects. For example, it is possible to update the module's :attr:`__file__` at runtime and this will not be automatically @@ -1179,66 +1178,60 @@ find and load modules. .. attribute:: name - (:attr:`__name__`) - - The module's fully qualified name. - The :term:`finder` should always set this attribute to a non-empty string. + The module's fully qualified name + (see :attr:`__name__` attributes on modules). + The :term:`finder` should always set this attribute to a non-empty string. .. attribute:: loader - (:attr:`__loader__`) - - The :term:`loader` used to load the module. - The :term:`finder` should always set this attribute. + The :term:`loader` used to load the module + (see :attr:`__loader__` attributes on modules). + The :term:`finder` should always set this attribute. .. attribute:: origin - (:attr:`__file__`) - - The location the :term:`loader` should use to load the module. - For example, for modules loaded from a .py file this is the filename. - The :term:`finder` should always set this attribute to a meaningful value - for the :term:`loader` to use. In the uncommon case that there is not one - (like for namespace packages), it should be set to ``None``. + The location the :term:`loader` should use to load the module + (see :attr:`__file__` attributes on modules). + For example, for modules loaded from a .py file this is the filename. + The :term:`finder` should always set this attribute to a meaningful value + for the :term:`loader` to use. In the uncommon case that there is not one + (like for namespace packages), it should be set to ``None``. .. attribute:: submodule_search_locations - (:attr:`__path__`) - - The list of locations where the package's submodules will be found. - Most of the time this is a single directory. - The :term:`finder` should set this attribute to a list, even an empty one, to indicate - to the import system that the module is a package. It should be set to ``None`` for - non-package modules. It is set automatically later to a special object for - namespace packages. + The list of locations where the package's submodules will be found + (see :attr:`__path__` attributes on modules). + Most of the time this is a single directory. + The :term:`finder` should set this attribute to a list, even an empty one, to indicate + to the import system that the module is a package. It should be set to ``None`` for + non-package modules. It is set automatically later to a special object for + namespace packages. .. attribute:: loader_state - The :term:`finder` may set this attribute to an object containing additional, - module-specific data to use when loading the module. Otherwise it should be - set to ``None``. + The :term:`finder` may set this attribute to an object containing additional, + module-specific data to use when loading the module. Otherwise it should be + set to ``None``. .. attribute:: cached - (:attr:`__cached__`) - - The filename of a compiled version of the module's code. - The :term:`finder` should always set this attribute but it may be ``None`` - for modules that do not need compiled code stored. + The filename of a compiled version of the module's code + (see :attr:`__cached__` attributes on modules). + The :term:`finder` should always set this attribute but it may be ``None`` + for modules that do not need compiled code stored. .. attribute:: parent - (:attr:`__package__`) - - (Read-only) The fully qualified name of the package the module is in (or the - empty string for a top-level module). - If the module is a package then this is the same as :attr:`name`. + (Read-only) The fully qualified name of the package the module is in (or the + empty string for a top-level module). + See :attr:`__package__` attributes on modules. + If the module is a package then this is the same as :attr:`name`. .. attribute:: has_location - ``True`` if the spec's :attr:`origin` refers to a loadable location, - ``False`` otherwise. This value impacts how :attr:`origin` is interpreted - and how the module's :attr:`__file__` is populated. + ``True`` if the spec's :attr:`origin` refers to a loadable location, + ``False`` otherwise. This value impacts how :attr:`origin` is interpreted + and how the module's :attr:`__file__` is populated. .. class:: AppleFrameworkLoader(name, path) @@ -1252,7 +1245,7 @@ find and load modules. be only a single binary per framework, and there can be no executable binary material outside the Frameworks folder. - To accomodate this requirement, when running on iOS, extension module + To accommodate this requirement, when running on iOS, extension module binaries are *not* packaged as ``.so`` files on ``sys.path``, but as individual standalone frameworks. To discover those frameworks, this loader is be registered against the ``.fwork`` file extension, with a ``.fwork`` @@ -1584,20 +1577,34 @@ Note that if ``name`` is a submodule (contains a dot), Importing a source file directly '''''''''''''''''''''''''''''''' -To import a Python source file directly, use the following recipe:: +This recipe should be used with caution: it is an approximation of an import +statement where the file path is specified directly, rather than +:data:`sys.path` being searched. Alternatives should first be considered first, +such as modifying :data:`sys.path` when a proper module is required, or using +:func:`runpy.run_path` when the global namespace resulting from running a Python +file is appropriate. - import importlib.util - import sys +To import a Python source file directly from a path, use the following recipe:: - # For illustrative purposes. - import tokenize - file_path = tokenize.__file__ - module_name = tokenize.__name__ + import importlib.util + import sys + + + def import_from_path(module_name, file_path): + spec = importlib.util.spec_from_file_location(module_name, file_path) + module = importlib.util.module_from_spec(spec) + sys.modules[module_name] = module + spec.loader.exec_module(module) + return module - spec = importlib.util.spec_from_file_location(module_name, file_path) - module = importlib.util.module_from_spec(spec) - sys.modules[module_name] = module - spec.loader.exec_module(module) + + # For illustrative purposes only (use of `json` is arbitrary). + import json + file_path = json.__file__ + module_name = json.__name__ + + # Similar outcome as `import json`. + json = import_from_path(module_name, file_path) Implementing lazy imports @@ -1623,7 +1630,6 @@ The example below shows how to implement lazy imports:: False - Setting up an importer '''''''''''''''''''''' diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst index 7838eeed284..7ed39ae2fec 100644 --- a/Doc/library/inspect.rst +++ b/Doc/library/inspect.rst @@ -457,7 +457,7 @@ attributes (see :ref:`import-mod-attrs` for module attributes): .. versionchanged:: 3.8 Functions wrapped in :func:`functools.partial` now return ``True`` if the - wrapped function is a :term:`asynchronous generator` function. + wrapped function is an :term:`asynchronous generator` function. .. versionchanged:: 3.13 Functions wrapped in :func:`functools.partialmethod` now return ``True`` @@ -520,7 +520,7 @@ attributes (see :ref:`import-mod-attrs` for module attributes): has a :meth:`~object.__get__` method, but not a :meth:`~object.__set__` method or a :meth:`~object.__delete__` method. Beyond that, the set of attributes varies. A :attr:`~definition.__name__` attribute is usually - sensible, and :attr:`!__doc__` often is. + sensible, and :attr:`~definition.__doc__` often is. Methods implemented via descriptors that also pass one of the other tests return ``False`` from the :func:`ismethoddescriptor` test, simply because the @@ -938,7 +938,7 @@ function. .. attribute:: Parameter.kind.description - Describes a enum value of :attr:`Parameter.kind`. + Describes an enum value of :attr:`Parameter.kind`. .. versionadded:: 3.8 @@ -1018,7 +1018,8 @@ function. .. attribute:: BoundArguments.kwargs A dict of keyword arguments values. Dynamically computed from the - :attr:`arguments` attribute. + :attr:`arguments` attribute. Arguments that can be passed positionally + are included in :attr:`args` instead. .. attribute:: BoundArguments.signature @@ -1236,7 +1237,7 @@ Classes and functions This function handles several details for you: * If ``eval_str`` is true, values of type ``str`` will - be un-stringized using :func:`eval()`. This is intended + be un-stringized using :func:`eval`. This is intended for use with stringized annotations (``from __future__ import annotations``). * If ``obj`` doesn't have an annotations dict, returns an @@ -1250,16 +1251,16 @@ Classes and functions * Always, always, always returns a freshly created dict. ``eval_str`` controls whether or not values of type ``str`` are replaced - with the result of calling :func:`eval()` on those values: + with the result of calling :func:`eval` on those values: - * If eval_str is true, :func:`eval()` is called on values of type ``str``. - (Note that ``get_annotations`` doesn't catch exceptions; if :func:`eval()` + * If eval_str is true, :func:`eval` is called on values of type ``str``. + (Note that ``get_annotations`` doesn't catch exceptions; if :func:`eval` raises an exception, it will unwind the stack past the ``get_annotations`` call.) * If eval_str is false (the default), values of type ``str`` are unchanged. - ``globals`` and ``locals`` are passed in to :func:`eval()`; see the documentation - for :func:`eval()` for more information. If ``globals`` or ``locals`` + ``globals`` and ``locals`` are passed in to :func:`eval`; see the documentation + for :func:`eval` for more information. If ``globals`` or ``locals`` is ``None``, this function may replace that value with a context-specific default, contingent on ``type(obj)``: diff --git a/Doc/library/intro.rst b/Doc/library/intro.rst index ffc8939d211..8f76044be48 100644 --- a/Doc/library/intro.rst +++ b/Doc/library/intro.rst @@ -58,7 +58,7 @@ Notes on availability operating system. * If not separately noted, all functions that claim "Availability: Unix" are - supported on macOS and iOS, both of which build on a Unix core. + supported on macOS, iOS and Android, all of which build on a Unix core. * If an availability note contains both a minimum Kernel version and a minimum libc version, then both conditions must hold. For example a feature with note @@ -120,43 +120,57 @@ DOM APIs as well as limited networking capabilities with JavaScript's .. _Pyodide: https://pyodide.org/ .. _PyScript: https://pyscript.net/ +.. _mobile-availability: .. _iOS-availability: -iOS ---- +Mobile platforms +---------------- -iOS is, in most respects, a POSIX operating system. File I/O, socket handling, +Android and iOS are, in most respects, POSIX operating systems. File I/O, socket handling, and threading all behave as they would on any POSIX operating system. However, -there are several major differences between iOS and other POSIX systems. - -* iOS can only use Python in "embedded" mode. There is no Python REPL, and no - ability to execute binaries that are part of the normal Python developer - experience, such as :program:`pip`. To add Python code to your iOS app, you must use - the :ref:`Python embedding API ` to add a Python interpreter to an - iOS app created with Xcode. See the :ref:`iOS usage guide ` for - more details. - -* An iOS app cannot use any form of subprocessing, background processing, or - inter-process communication. If an iOS app attempts to create a subprocess, - the process creating the subprocess will either lock up, or crash. An iOS app - has no visibility of other applications that are running, nor any ability to - communicate with other running applications, outside of the iOS-specific APIs - that exist for this purpose. - -* iOS apps have limited access to modify system resources (such as the system +there are several major differences: + +* Mobile platforms can only use Python in "embedded" mode. There is no Python + REPL, and no ability to use separate executables such as :program:`python` or + :program:`pip`. To add Python code to your mobile app, you must use + the :ref:`Python embedding API `. For more details, see + :ref:`using-android` and :ref:`using-ios`. + +* Subprocesses: + + * On Android, creating subprocesses is possible but `officially unsupported + `__. + In particular, Android does not support any part of the System V IPC API, + so :mod:`multiprocessing` is not available. + + * An iOS app cannot use any form of subprocessing, multiprocessing, or + inter-process communication. If an iOS app attempts to create a subprocess, + the process creating the subprocess will either lock up, or crash. An iOS app + has no visibility of other applications that are running, nor any ability to + communicate with other running applications, outside of the iOS-specific APIs + that exist for this purpose. + +* Mobile apps have limited access to modify system resources (such as the system clock). These resources will often be *readable*, but attempts to modify those resources will usually fail. -* iOS apps have a limited concept of console input and output. ``stdout`` and - ``stderr`` *exist*, and content written to ``stdout`` and ``stderr`` will be - visible in logs when running in Xcode, but this content *won't* be recorded - in the system log. If a user who has installed your app provides their app - logs as a diagnostic aid, they will not include any detail written to - ``stdout`` or ``stderr``. +* Console input and output: + + * On Android, the native ``stdout`` and ``stderr`` are not connected to + anything, so Python installs its own streams which redirect messages to the + system log. These can be seen under the tags ``python.stdout`` and + ``python.stderr`` respectively. + + * iOS apps have a limited concept of console output. ``stdout`` and + ``stderr`` *exist*, and content written to ``stdout`` and ``stderr`` will be + visible in logs when running in Xcode, but this content *won't* be recorded + in the system log. If a user who has installed your app provides their app + logs as a diagnostic aid, they will not include any detail written to + ``stdout`` or ``stderr``. - iOS apps have no concept of ``stdin`` at all. While iOS apps can have a - keyboard, this is a software feature, not something that is attached to - ``stdin``. + * Mobile apps have no usable ``stdin`` at all. While apps can display an on-screen + keyboard, this is a software feature, not something that is attached to + ``stdin``. - As a result, Python library that involve console manipulation (such as - :mod:`curses` and :mod:`readline`) are not available on iOS. + As a result, Python modules that involve console manipulation (such as + :mod:`curses` and :mod:`readline`) are not available on mobile platforms. diff --git a/Doc/library/io.rst b/Doc/library/io.rst index 748c49968f5..f793d7a7ef9 100644 --- a/Doc/library/io.rst +++ b/Doc/library/io.rst @@ -55,7 +55,7 @@ the backing store is natively made of bytes (such as in the case of a file), encoding and decoding of data is made transparently as well as optional translation of platform-specific newline characters. -The easiest way to create a text stream is with :meth:`open()`, optionally +The easiest way to create a text stream is with :meth:`open`, optionally specifying an encoding:: f = open("myfile.txt", "r", encoding="utf-8") @@ -77,7 +77,7 @@ objects. No encoding, decoding, or newline translation is performed. This category of streams can be used for all kinds of non-text data, and also when manual control over the handling of text data is desired. -The easiest way to create a binary stream is with :meth:`open()` with ``'b'`` in +The easiest way to create a binary stream is with :meth:`open` with ``'b'`` in the mode string:: f = open("myfile.jpg", "rb") @@ -950,7 +950,7 @@ Text I/O :class:`TextIOBase`. *encoding* gives the name of the encoding that the stream will be decoded or - encoded with. It defaults to :func:`locale.getencoding()`. + encoded with. It defaults to :func:`locale.getencoding`. ``encoding="locale"`` can be used to specify the current locale's encoding explicitly. See :ref:`io-text-encoding` for more information. @@ -1182,7 +1182,7 @@ re-enter a buffered object which it is already accessing, a :exc:`RuntimeError` is raised. Note this doesn't prohibit a different thread from entering the buffered object. -The above implicitly extends to text files, since the :func:`open()` function +The above implicitly extends to text files, since the :func:`open` function will wrap a buffered object inside a :class:`TextIOWrapper`. This includes -standard streams and therefore affects the built-in :func:`print()` function as +standard streams and therefore affects the built-in :func:`print` function as well. diff --git a/Doc/library/ipaddress.rst b/Doc/library/ipaddress.rst index 0441a7d47b5..be5794588b1 100644 --- a/Doc/library/ipaddress.rst +++ b/Doc/library/ipaddress.rst @@ -1010,7 +1010,7 @@ The module also provides the following module level functions: doesn't make sense. There are some times however, where you may wish to have :mod:`ipaddress` sort these anyway. If you need to do this, you can use - this function as the *key* argument to :func:`sorted()`. + this function as the *key* argument to :func:`sorted`. *obj* is either a network or address object. diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst index 3dc3f60923a..43e665c3f0d 100644 --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -337,7 +337,7 @@ loops that truncate the stream. yield n n += step - When counting with floating point numbers, better accuracy can sometimes be + When counting with floating-point numbers, better accuracy can sometimes be achieved by substituting multiplicative code such as: ``(start + step * i for i in count())``. diff --git a/Doc/library/json.rst b/Doc/library/json.rst index 42cb1f850fe..bb7b1852e80 100644 --- a/Doc/library/json.rst +++ b/Doc/library/json.rst @@ -13,7 +13,7 @@ `JSON (JavaScript Object Notation) `_, specified by :rfc:`7159` (which obsoletes :rfc:`4627`) and by -`ECMA-404 `_, +`ECMA-404 `_, is a lightweight data interchange format inspired by `JavaScript `_ object literal syntax (although it is not a strict subset of JavaScript [#rfc-errata]_ ). @@ -241,28 +241,28 @@ Basic Usage *object_hook* is an optional function that will be called with the result of any object literal decoded (a :class:`dict`). The return value of - *object_hook* will be used instead of the :class:`dict`. This feature can be used - to implement custom decoders (e.g. `JSON-RPC `_ - class hinting). + *object_hook* will be used instead of the :class:`dict`. This feature can + be used to implement custom decoders (e.g. `JSON-RPC + `_ class hinting). *object_pairs_hook* is an optional function that will be called with the result of any object literal decoded with an ordered list of pairs. The return value of *object_pairs_hook* will be used instead of the - :class:`dict`. This feature can be used to implement custom decoders. - If *object_hook* is also defined, the *object_pairs_hook* takes priority. + :class:`dict`. This feature can be used to implement custom decoders. If + *object_hook* is also defined, the *object_pairs_hook* takes priority. .. versionchanged:: 3.1 Added support for *object_pairs_hook*. - *parse_float*, if specified, will be called with the string of every JSON - float to be decoded. By default, this is equivalent to ``float(num_str)``. - This can be used to use another datatype or parser for JSON floats - (e.g. :class:`decimal.Decimal`). + *parse_float* is an optional function that will be called with the string of + every JSON float to be decoded. By default, this is equivalent to + ``float(num_str)``. This can be used to use another datatype or parser for + JSON floats (e.g. :class:`decimal.Decimal`). - *parse_int*, if specified, will be called with the string of every JSON int - to be decoded. By default, this is equivalent to ``int(num_str)``. This can - be used to use another datatype or parser for JSON integers - (e.g. :class:`float`). + *parse_int* is an optional function that will be called with the string of + every JSON int to be decoded. By default, this is equivalent to + ``int(num_str)``. This can be used to use another datatype or parser for + JSON integers (e.g. :class:`float`). .. versionchanged:: 3.11 The default *parse_int* of :func:`int` now limits the maximum length of @@ -270,10 +270,9 @@ Basic Usage conversion length limitation ` to help avoid denial of service attacks. - *parse_constant*, if specified, will be called with one of the following - strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. - This can be used to raise an exception if invalid JSON numbers - are encountered. + *parse_constant* is an optional function that will be called with one of the + following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This can be + used to raise an exception if invalid JSON numbers are encountered. .. versionchanged:: 3.1 *parse_constant* doesn't get called on 'null', 'true', 'false' anymore. @@ -345,34 +344,33 @@ Encoders and Decoders It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as their corresponding ``float`` values, which is outside the JSON spec. - *object_hook*, if specified, will be called with the result of every JSON - object decoded and its return value will be used in place of the given - :class:`dict`. This can be used to provide custom deserializations (e.g. to - support `JSON-RPC `_ class hinting). + *object_hook* is an optional function that will be called with the result of + every JSON object decoded and its return value will be used in place of the + given :class:`dict`. This can be used to provide custom deserializations + (e.g. to support `JSON-RPC `_ class hinting). - *object_pairs_hook*, if specified will be called with the result of every - JSON object decoded with an ordered list of pairs. The return value of - *object_pairs_hook* will be used instead of the :class:`dict`. This - feature can be used to implement custom decoders. If *object_hook* is also - defined, the *object_pairs_hook* takes priority. + *object_pairs_hook* is an optional function that will be called with the + result of every JSON object decoded with an ordered list of pairs. The + return value of *object_pairs_hook* will be used instead of the + :class:`dict`. This feature can be used to implement custom decoders. If + *object_hook* is also defined, the *object_pairs_hook* takes priority. .. versionchanged:: 3.1 Added support for *object_pairs_hook*. - *parse_float*, if specified, will be called with the string of every JSON - float to be decoded. By default, this is equivalent to ``float(num_str)``. - This can be used to use another datatype or parser for JSON floats - (e.g. :class:`decimal.Decimal`). + *parse_float* is an optional function that will be called with the string of + every JSON float to be decoded. By default, this is equivalent to + ``float(num_str)``. This can be used to use another datatype or parser for + JSON floats (e.g. :class:`decimal.Decimal`). - *parse_int*, if specified, will be called with the string of every JSON int - to be decoded. By default, this is equivalent to ``int(num_str)``. This can - be used to use another datatype or parser for JSON integers - (e.g. :class:`float`). + *parse_int* is an optional function that will be called with the string of + every JSON int to be decoded. By default, this is equivalent to + ``int(num_str)``. This can be used to use another datatype or parser for + JSON integers (e.g. :class:`float`). - *parse_constant*, if specified, will be called with one of the following - strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. - This can be used to raise an exception if invalid JSON numbers - are encountered. + *parse_constant* is an optional function that will be called with one of the + following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This can be + used to raise an exception if invalid JSON numbers are encountered. If *strict* is false (``True`` is the default), then control characters will be allowed inside strings. Control characters in this context are @@ -559,7 +557,7 @@ Standard Compliance and Interoperability ---------------------------------------- The JSON format is specified by :rfc:`7159` and by -`ECMA-404 `_. +`ECMA-404 `_. This section details this module's level of compliance with the RFC. For simplicity, :class:`JSONEncoder` and :class:`JSONDecoder` subclasses, and parameters other than those explicitly mentioned, are not considered. diff --git a/Doc/library/locale.rst b/Doc/library/locale.rst index 0a8cbd4f95f..0246f991570 100644 --- a/Doc/library/locale.rst +++ b/Doc/library/locale.rst @@ -424,7 +424,7 @@ The :mod:`locale` module defines the following exception and functions: .. function:: format_string(format, val, grouping=False, monetary=False) Formats a number *val* according to the current :const:`LC_NUMERIC` setting. - The format follows the conventions of the ``%`` operator. For floating point + The format follows the conventions of the ``%`` operator. For floating-point values, the decimal point is modified if appropriate. If *grouping* is ``True``, also takes the grouping into account. @@ -455,7 +455,7 @@ The :mod:`locale` module defines the following exception and functions: .. function:: str(float) - Formats a floating point number using the same format as the built-in function + Formats a floating-point number using the same format as the built-in function ``str(float)``, but takes the decimal point into account. diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst index dfbf0b1cf2f..317ca872824 100644 --- a/Doc/library/logging.config.rst +++ b/Doc/library/logging.config.rst @@ -69,7 +69,7 @@ in :mod:`logging` itself) and defining handlers which are declared either in dictConfigClass(config).configure() For example, a subclass of :class:`DictConfigurator` could call - ``DictConfigurator.__init__()`` in its own :meth:`__init__()`, then + ``DictConfigurator.__init__()`` in its own :meth:`__init__`, then set up custom prefixes which would be usable in the subsequent :meth:`configure` call. :attr:`dictConfigClass` would be bound to this new subclass, and then :func:`dictConfig` could be called exactly as @@ -753,9 +753,12 @@ The ``queue`` and ``listener`` keys are optional. If the ``queue`` key is present, the corresponding value can be one of the following: -* An actual instance of :class:`queue.Queue` or a subclass thereof. This is of course - only possible if you are constructing or modifying the configuration dictionary in - code. +* An object implementing the :class:`queue.Queue` public API. For instance, + this may be an actual instance of :class:`queue.Queue` or a subclass thereof, + or a proxy obtained by :meth:`multiprocessing.managers.SyncManager.Queue`. + + This is of course only possible if you are constructing or modifying + the configuration dictionary in code. * A string that resolves to a callable which, when called with no arguments, returns the :class:`queue.Queue` instance to use. That callable could be a diff --git a/Doc/library/logging.rst b/Doc/library/logging.rst index 0624423c950..8ab107d2a33 100644 --- a/Doc/library/logging.rst +++ b/Doc/library/logging.rst @@ -304,7 +304,8 @@ in a module, ``__name__`` is the module's name in the Python package namespace. parameter mirrors the equivalent one in the :mod:`warnings` module. The fourth keyword argument is *extra* which can be used to pass a - dictionary which is used to populate the __dict__ of the :class:`LogRecord` + dictionary which is used to populate the :attr:`~object.__dict__` of the + :class:`LogRecord` created for the logging event with user-defined attributes. These custom attributes can then be used as you like. For example, they could be incorporated into logged messages. For example:: @@ -352,10 +353,6 @@ in a module, ``__name__`` is the module's name in the Python package namespace. .. versionchanged:: 3.8 The *stacklevel* parameter was added. - .. versionchanged:: 3.13 - Remove the undocumented ``warn()`` method which was an alias to the - :meth:`warning` method. - .. method:: Logger.info(msg, *args, **kwargs) @@ -368,6 +365,10 @@ in a module, ``__name__`` is the module's name in the Python package namespace. Logs a message with level :const:`WARNING` on this logger. The arguments are interpreted as for :meth:`debug`. + .. note:: There is an obsolete method ``warn`` which is functionally + identical to ``warning``. As ``warn`` is deprecated, please do not use + it - use ``warning`` instead. + .. method:: Logger.error(msg, *args, **kwargs) Logs a message with level :const:`ERROR` on this logger. The arguments are @@ -1098,11 +1099,11 @@ information into logging calls. For a usage example, see the section on .. attribute:: manager - Delegates to the underlying :attr:`!manager`` on *logger*. + Delegates to the underlying :attr:`!manager` on *logger*. .. attribute:: _log - Delegates to the underlying :meth:`!_log`` method on *logger*. + Delegates to the underlying :meth:`!_log` method on *logger*. In addition to the above, :class:`LoggerAdapter` supports the following methods of :class:`Logger`: :meth:`~Logger.debug`, :meth:`~Logger.info`, @@ -1124,11 +1125,6 @@ information into logging calls. For a usage example, see the section on Attribute :attr:`!manager` and method :meth:`!_log` were added, which delegate to the underlying logger and allow adapters to be nested. - .. versionchanged:: 3.13 - - Remove the undocumented :meth:`!warn`` method which was an alias to the - :meth:`!warning` method. - .. versionchanged:: 3.13 The *merge_extra* argument was added. @@ -1224,10 +1220,6 @@ functions. identical to ``warning``. As ``warn`` is deprecated, please do not use it - use ``warning`` instead. - .. versionchanged:: 3.13 - Remove the undocumented ``warn()`` function which was an alias to the - :func:`warning` function. - .. function:: error(msg, *args, **kwargs) diff --git a/Doc/library/mailbox.rst b/Doc/library/mailbox.rst index 40ea71cd342..abb32f9bf34 100644 --- a/Doc/library/mailbox.rst +++ b/Doc/library/mailbox.rst @@ -1387,7 +1387,7 @@ When an :class:`!MHMessage` instance is created based upon a .. method:: get_visible() - Return an :class:`Message` instance whose headers are the message's + Return a :class:`Message` instance whose headers are the message's visible headers and whose body is empty. diff --git a/Doc/library/marshal.rst b/Doc/library/marshal.rst index f9ba4d554b0..9e4606df0f7 100644 --- a/Doc/library/marshal.rst +++ b/Doc/library/marshal.rst @@ -42,8 +42,8 @@ supports a substantially wider range of objects than marshal. Not all Python object types are supported; in general, only objects whose value is independent from a particular invocation of Python can be written and read by -this module. The following types are supported: booleans, integers, floating -point numbers, complex numbers, strings, bytes, bytearrays, tuples, lists, sets, +this module. The following types are supported: booleans, integers, floating-point +numbers, complex numbers, strings, bytes, bytearrays, tuples, lists, sets, frozensets, dictionaries, and code objects (if *allow_code* is true), where it should be understood that tuples, lists, sets, frozensets and dictionaries are only supported as long as @@ -142,7 +142,7 @@ In addition, the following constants are defined: Indicates the format that the module uses. Version 0 is the historical format, version 1 shares interned strings and version 2 uses a binary format - for floating point numbers. + for floating-point numbers. Version 3 adds support for object instancing and recursion. The current version is 4. diff --git a/Doc/library/math.rst b/Doc/library/math.rst index 316144992d6..dd2ba419b5b 100644 --- a/Doc/library/math.rst +++ b/Doc/library/math.rst @@ -123,7 +123,7 @@ Number-theoretic and representation functions .. function:: fsum(iterable) - Return an accurate floating point sum of values in the iterable. Avoids + Return an accurate floating-point sum of values in the iterable. Avoids loss of precision by tracking multiple intermediate partial sums. The algorithm's accuracy depends on IEEE-754 arithmetic guarantees and the @@ -133,7 +133,7 @@ Number-theoretic and representation functions least significant bit. For further discussion and two alternative approaches, see the `ASPN cookbook - recipes for accurate floating point summation + recipes for accurate floating-point summation `_\. @@ -304,7 +304,7 @@ Number-theoretic and representation functions If the result of the remainder operation is zero, that zero will have the same sign as *x*. - On platforms using IEEE 754 binary floating-point, the result of this + On platforms using IEEE 754 binary floating point, the result of this operation is always exactly representable: no rounding error is introduced. .. versionadded:: 3.7 diff --git a/Doc/library/mimetypes.rst b/Doc/library/mimetypes.rst index 91e8c30f860..8ad4850584a 100644 --- a/Doc/library/mimetypes.rst +++ b/Doc/library/mimetypes.rst @@ -295,3 +295,13 @@ than one MIME-type database; it provides an interface similar to the one of the types, else to the list of non-standard types. .. versionadded:: 3.2 + + + .. method:: MimeTypes.add_type(type, ext, strict=True) + + Add a mapping from the MIME type *type* to the extension *ext*. When the + extension is already known, the new type will replace the old one. When the type + is already known the extension will be added to the list of known extensions. + + When *strict* is ``True`` (the default), the mapping will be added to the + official MIME types, otherwise to the non-standard ones. diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst index 49762491bae..80d6e4dae24 100644 --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -8,7 +8,7 @@ -------------- -.. include:: ../includes/wasm-ios-notavail.rst +.. include:: ../includes/wasm-mobile-notavail.rst Introduction ------------ @@ -254,6 +254,7 @@ processes: p.join() Queues are thread and process safe. + Any object put into a :mod:`~multiprocessing` queue will be serialized. **Pipes** @@ -281,6 +282,8 @@ processes: of corruption from processes using different ends of the pipe at the same time. + The :meth:`~Connection.send` method serializes the the object and + :meth:`~Connection.recv` re-creates the object. Synchronization between processes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -502,7 +505,7 @@ The :mod:`multiprocessing` package mostly replicates the API of the The constructor should always be called with keyword arguments. *group* should always be ``None``; it exists solely for compatibility with :class:`threading.Thread`. *target* is the callable object to be invoked by - the :meth:`run()` method. It defaults to ``None``, meaning nothing is + the :meth:`run` method. It defaults to ``None``, meaning nothing is called. *name* is the process name (see :attr:`name` for more details). *args* is the argument tuple for the target invocation. *kwargs* is a dictionary of keyword arguments for the target invocation. If provided, @@ -639,7 +642,7 @@ The :mod:`multiprocessing` package mostly replicates the API of the You can use this value if you want to wait on several events at once using :func:`multiprocessing.connection.wait`. Otherwise - calling :meth:`join()` is simpler. + calling :meth:`join` is simpler. On Windows, this is an OS handle usable with the ``WaitForSingleObject`` and ``WaitForMultipleObjects`` family of API calls. On POSIX, this is @@ -666,7 +669,7 @@ The :mod:`multiprocessing` package mostly replicates the API of the .. method:: kill() - Same as :meth:`terminate()` but using the ``SIGKILL`` signal on POSIX. + Same as :meth:`terminate` but using the ``SIGKILL`` signal on POSIX. .. versionadded:: 3.7 @@ -709,7 +712,7 @@ The :mod:`multiprocessing` package mostly replicates the API of the .. exception:: BufferTooShort - Exception raised by :meth:`Connection.recv_bytes_into()` when the supplied + Exception raised by :meth:`Connection.recv_bytes_into` when the supplied buffer object is too small for the message read. If ``e`` is an instance of :exc:`BufferTooShort` then ``e.args[0]`` will give @@ -745,6 +748,11 @@ If you use :class:`JoinableQueue` then you **must** call semaphore used to count the number of unfinished tasks may eventually overflow, raising an exception. +One difference from other Python queue implementations, is that :mod:`multiprocessing` +queues serializes all objects that are put into them using :mod:`pickle`. +The object return by the get method is a re-created object that does not share memory +with the original object. + Note that one can also create a shared queue by using a manager object -- see :ref:`multiprocessing-managers`. @@ -811,6 +819,8 @@ For an example of the usage of queues for interprocess communication see used for receiving messages and ``conn2`` can only be used for sending messages. + The :meth:`~multiprocessing.Connection.send` method serializes the the object using + :mod:`pickle` and the :meth:`~multiprocessing.Connection.recv` re-creates the object. .. class:: Queue([maxsize]) @@ -837,6 +847,8 @@ For an example of the usage of queues for interprocess communication see Return ``True`` if the queue is empty, ``False`` otherwise. Because of multithreading/multiprocessing semantics, this is not reliable. + May raise an :exc:`OSError` on closed queues. (not guaranteed) + .. method:: full() Return ``True`` if the queue is full, ``False`` otherwise. Because of @@ -940,6 +952,8 @@ For an example of the usage of queues for interprocess communication see Return ``True`` if the queue is empty, ``False`` otherwise. + Always raises an :exc:`OSError` if the SimpleQueue is closed. + .. method:: get() Remove and return an item from the queue. @@ -1459,17 +1473,6 @@ object -- see :ref:`multiprocessing-managers`. On macOS, ``sem_timedwait`` is unsupported, so calling ``acquire()`` with a timeout will emulate that function's behavior using a sleeping loop. -.. note:: - - If the SIGINT signal generated by :kbd:`Ctrl-C` arrives while the main thread is - blocked by a call to :meth:`BoundedSemaphore.acquire`, :meth:`Lock.acquire`, - :meth:`RLock.acquire`, :meth:`Semaphore.acquire`, :meth:`Condition.acquire` - or :meth:`Condition.wait` then the call will be immediately interrupted and - :exc:`KeyboardInterrupt` will be raised. - - This differs from the behaviour of :mod:`threading` where SIGINT will be - ignored while the equivalent blocking calls are in progress. - .. note:: Some of this package's functionality requires a functioning shared semaphore @@ -2958,7 +2961,7 @@ Beware of replacing :data:`sys.stdin` with a "file like object" resulting in a bad file descriptor error, but introduces a potential danger to applications which replace :func:`sys.stdin` with a "file-like object" with output buffering. This danger is that if multiple processes call - :meth:`~io.IOBase.close()` on this file-like object, it could result in the same + :meth:`~io.IOBase.close` on this file-like object, it could result in the same data being flushed to the object multiple times, resulting in corruption. If you write a file-like object and implement your own caching, you can diff --git a/Doc/library/optparse.rst b/Doc/library/optparse.rst index 3e96259f94d..74a49a8fb33 100644 --- a/Doc/library/optparse.rst +++ b/Doc/library/optparse.rst @@ -1352,7 +1352,7 @@ The whole point of creating and populating an OptionParser is to call its the list of arguments to process (default: ``sys.argv[1:]``) ``values`` - an :class:`Values` object to store option arguments in (default: a + a :class:`Values` object to store option arguments in (default: a new instance of :class:`Values`) -- if you give an existing object, the option defaults will not be initialized on it diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst index b582321515d..ecbbc1d7605 100644 --- a/Doc/library/os.path.rst +++ b/Doc/library/os.path.rst @@ -81,7 +81,7 @@ the :mod:`glob` module.) Return the longest common sub-path of each pathname in the iterable *paths*. Raise :exc:`ValueError` if *paths* contain both absolute - and relative pathnames, the *paths* are on the different drives or + and relative pathnames, if *paths* are on different drives, or if *paths* is empty. Unlike :func:`commonprefix`, this returns a valid path. @@ -201,14 +201,14 @@ the :mod:`glob` module.) .. function:: getatime(path) - Return the time of last access of *path*. The return value is a floating point number giving + Return the time of last access of *path*. The return value is a floating-point number giving the number of seconds since the epoch (see the :mod:`time` module). Raise :exc:`OSError` if the file does not exist or is inaccessible. .. function:: getmtime(path) - Return the time of last modification of *path*. The return value is a floating point number + Return the time of last modification of *path*. The return value is a floating-point number giving the number of seconds since the epoch (see the :mod:`time` module). Raise :exc:`OSError` if the file does not exist or is inaccessible. @@ -389,7 +389,7 @@ the :mod:`glob` module.) that contains symbolic links. On Windows, it converts forward slashes to backward slashes. To normalize case, use :func:`normcase`. - .. note:: + .. note:: On POSIX systems, in accordance with `IEEE Std 1003.1 2013 Edition; 4.13 Pathname Resolution `_, if a pathname begins with exactly two slashes, the first component diff --git a/Doc/library/os.rst b/Doc/library/os.rst index b93b06d4e72..b93e01e9986 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -34,8 +34,8 @@ Notes on the availability of these functions: * On VxWorks, os.popen, os.fork, os.execv and os.spawn*p* are not supported. -* On WebAssembly platforms, and on iOS, large parts of the :mod:`os` module are - not available or behave differently. API related to processes (e.g. +* On WebAssembly platforms, Android and iOS, large parts of the :mod:`os` module are + not available or behave differently. APIs related to processes (e.g. :func:`~os.fork`, :func:`~os.execve`) and resources (e.g. :func:`~os.nice`) are not available. Others like :func:`~os.getuid` and :func:`~os.getpid` are emulated or stubs. WebAssembly platforms also lack support for signals (e.g. @@ -113,8 +113,8 @@ of the UTF-8 encoding: * Use UTF-8 as the :term:`filesystem encoding `. -* :func:`sys.getfilesystemencoding()` returns ``'utf-8'``. -* :func:`locale.getpreferredencoding()` returns ``'utf-8'`` (the *do_setlocale* +* :func:`sys.getfilesystemencoding` returns ``'utf-8'``. +* :func:`locale.getpreferredencoding` returns ``'utf-8'`` (the *do_setlocale* argument has no effect). * :data:`sys.stdin`, :data:`sys.stdout`, and :data:`sys.stderr` all use UTF-8 as their text encoding, with the ``surrogateescape`` @@ -133,8 +133,8 @@ level APIs also exhibit different default behaviours: * Command line arguments, environment variables and filenames are decoded to text using the UTF-8 encoding. -* :func:`os.fsdecode()` and :func:`os.fsencode()` use the UTF-8 encoding. -* :func:`open()`, :func:`io.open()`, and :func:`codecs.open()` use the UTF-8 +* :func:`os.fsdecode` and :func:`os.fsencode` use the UTF-8 encoding. +* :func:`open`, :func:`io.open`, and :func:`codecs.open` use the UTF-8 encoding by default. However, they still use the strict error handler by default so that attempting to open a binary file in text mode is likely to raise an exception rather than producing nonsense data. @@ -542,7 +542,7 @@ process and user. the groups of which the specified username is a member, plus the specified group id. - .. availability:: Unix, not WASI. + .. availability:: Unix, not WASI, not Android. .. versionadded:: 3.2 @@ -576,21 +576,21 @@ process and user. Set the current process's effective group id. - .. availability:: Unix, not WASI. + .. availability:: Unix, not WASI, not Android. .. function:: seteuid(euid, /) Set the current process's effective user id. - .. availability:: Unix, not WASI. + .. availability:: Unix, not WASI, not Android. .. function:: setgid(gid, /) Set the current process' group id. - .. availability:: Unix, not WASI. + .. availability:: Unix, not WASI, not Android. .. function:: setgroups(groups, /) @@ -684,14 +684,14 @@ process and user. Set the current process's real and effective group ids. - .. availability:: Unix, not WASI. + .. availability:: Unix, not WASI, not Android. .. function:: setresgid(rgid, egid, sgid, /) Set the current process's real, effective, and saved group ids. - .. availability:: Unix, not WASI. + .. availability:: Unix, not WASI, not Android. .. versionadded:: 3.2 @@ -700,7 +700,7 @@ process and user. Set the current process's real, effective, and saved user ids. - .. availability:: Unix, not WASI. + .. availability:: Unix, not WASI, not Android. .. versionadded:: 3.2 @@ -709,7 +709,7 @@ process and user. Set the current process's real and effective user ids. - .. availability:: Unix, not WASI. + .. availability:: Unix, not WASI, not Android. .. function:: getsid(pid, /) @@ -732,7 +732,7 @@ process and user. Set the current process's user id. - .. availability:: Unix, not WASI. + .. availability:: Unix, not WASI, not Android. .. placed in this section since it relates to errno.... a little weak @@ -785,7 +785,7 @@ process and user. ``socket.gethostbyaddr(socket.gethostname())``. On macOS, iOS and Android, this returns the *kernel* name and version (i.e., - ``'Darwin'`` on macOS and iOS; ``'Linux'`` on Android). :func:`platform.uname()` + ``'Darwin'`` on macOS and iOS; ``'Linux'`` on Android). :func:`platform.uname` can be used to get the user-facing operating system name and version on iOS and Android. @@ -1551,7 +1551,7 @@ or `the MSDN `_ on Windo .. function:: pwritev(fd, buffers, offset, flags=0, /) - Write the *buffers* contents to file descriptor *fd* at a offset *offset*, + Write the *buffers* contents to file descriptor *fd* at an offset *offset*, leaving the file offset unchanged. *buffers* must be a sequence of :term:`bytes-like objects `. Buffers are processed in array order. Entire contents of the first buffer is written before @@ -2178,7 +2178,7 @@ features: Change the root directory of the current process to *path*. - .. availability:: Unix, not WASI. + .. availability:: Unix, not WASI, not Android. .. versionchanged:: 3.6 Accepts a :term:`path-like object`. @@ -2830,7 +2830,7 @@ features: .. versionchanged:: 3.6 Added support for the :term:`context manager` protocol and the - :func:`~scandir.close()` method. If a :func:`scandir` iterator is neither + :func:`~scandir.close` method. If a :func:`scandir` iterator is neither exhausted nor explicitly closed a :exc:`ResourceWarning` will be emitted in its destructor. @@ -3775,7 +3775,7 @@ features: new file descriptor is :ref:`non-inheritable `. *initval* is the initial value of the event counter. The initial value - must be an 32 bit unsigned integer. Please note that the initial value is + must be a 32 bit unsigned integer. Please note that the initial value is limited to a 32 bit unsigned int although the event counter is an unsigned 64 bit integer with a maximum value of 2\ :sup:`64`\ -\ 2. @@ -3854,7 +3854,7 @@ features: .. data:: EFD_SEMAPHORE - Provide semaphore-like semantics for reads from a :func:`eventfd` file + Provide semaphore-like semantics for reads from an :func:`eventfd` file descriptor. On read the internal counter is decremented by one. .. availability:: Linux >= 2.6.30 @@ -3862,6 +3862,8 @@ features: .. versionadded:: 3.10 +.. _os-timerfd: + Timer File Descriptors ~~~~~~~~~~~~~~~~~~~~~~ @@ -4282,7 +4284,7 @@ to be ignored. .. audit-event:: os.exec path,args,env os.execl - .. availability:: Unix, Windows, not WASI, not iOS. + .. availability:: Unix, Windows, not WASI, not Android, not iOS. .. versionchanged:: 3.3 Added support for specifying *path* as an open file descriptor @@ -4485,7 +4487,7 @@ written in Python, such as a mail server's external command delivery program. for technical details of why we're surfacing this longstanding platform compatibility problem to developers. - .. availability:: POSIX, not WASI, not iOS. + .. availability:: POSIX, not WASI, not Android, not iOS. .. function:: forkpty() @@ -4512,7 +4514,7 @@ written in Python, such as a mail server's external command delivery program. threads, this now raises a :exc:`DeprecationWarning`. See the longer explanation on :func:`os.fork`. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. function:: kill(pid, sig, /) @@ -4570,7 +4572,7 @@ written in Python, such as a mail server's external command delivery program. See the :manpage:`pidfd_open(2)` man page for more details. - .. availability:: Linux >= 5.3 + .. availability:: Linux >= 5.3, Android >= :func:`build-time ` API level 31 .. versionadded:: 3.9 .. data:: PIDFD_NONBLOCK @@ -4621,7 +4623,7 @@ written in Python, such as a mail server's external command delivery program. documentation for more powerful ways to manage and communicate with subprocesses. - .. availability:: not WASI, not iOS. + .. availability:: not WASI, not Android, not iOS. .. note:: The :ref:`Python UTF-8 Mode ` affects encodings used @@ -4729,7 +4731,7 @@ written in Python, such as a mail server's external command delivery program. ``os.POSIX_SPAWN_CLOSEFROM`` is available on platforms where :c:func:`!posix_spawn_file_actions_addclosefrom_np` exists. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. function:: posix_spawnp(path, argv, env, *, file_actions=None, \ setpgroup=None, resetids=False, setsid=False, setsigmask=(), \ @@ -4745,7 +4747,7 @@ written in Python, such as a mail server's external command delivery program. .. versionadded:: 3.8 - .. availability:: POSIX, not WASI, not iOS. + .. availability:: POSIX, not WASI, not Android, not iOS. See :func:`posix_spawn` documentation. @@ -4778,7 +4780,7 @@ written in Python, such as a mail server's external command delivery program. There is no way to unregister a function. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. versionadded:: 3.7 @@ -4847,7 +4849,7 @@ written in Python, such as a mail server's external command delivery program. .. audit-event:: os.spawn mode,path,args,env os.spawnl - .. availability:: Unix, Windows, not WASI, not iOS. + .. availability:: Unix, Windows, not WASI, not Android, not iOS. :func:`spawnlp`, :func:`spawnlpe`, :func:`spawnvp` and :func:`spawnvpe` are not available on Windows. :func:`spawnle` and @@ -4971,7 +4973,7 @@ written in Python, such as a mail server's external command delivery program. .. audit-event:: os.system command os.system - .. availability:: Unix, Windows, not WASI, not iOS. + .. availability:: Unix, Windows, not WASI, not Android, not iOS. .. function:: times() @@ -5015,7 +5017,7 @@ written in Python, such as a mail server's external command delivery program. :func:`waitstatus_to_exitcode` can be used to convert the exit status into an exit code. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. seealso:: @@ -5049,7 +5051,7 @@ written in Python, such as a mail server's external command delivery program. Otherwise, if there are no matching children that could be waited for, :exc:`ChildProcessError` is raised. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. versionadded:: 3.3 @@ -5090,7 +5092,7 @@ written in Python, such as a mail server's external command delivery program. :func:`waitstatus_to_exitcode` can be used to convert the exit status into an exit code. - .. availability:: Unix, Windows, not WASI, not iOS. + .. availability:: Unix, Windows, not WASI, not Android, not iOS. .. versionchanged:: 3.5 If the system call is interrupted and the signal handler does not raise an @@ -5110,7 +5112,7 @@ written in Python, such as a mail server's external command delivery program. :func:`waitstatus_to_exitcode` can be used to convert the exit status into an exitcode. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. function:: wait4(pid, options) @@ -5124,7 +5126,7 @@ written in Python, such as a mail server's external command delivery program. :func:`waitstatus_to_exitcode` can be used to convert the exit status into an exitcode. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. data:: P_PID @@ -5141,7 +5143,7 @@ written in Python, such as a mail server's external command delivery program. * :data:`!P_PIDFD` - wait for the child identified by the file descriptor *id* (a process file descriptor created with :func:`pidfd_open`). - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. note:: :data:`!P_PIDFD` is only available on Linux >= 5.4. @@ -5156,7 +5158,7 @@ written in Python, such as a mail server's external command delivery program. :func:`waitid` causes child processes to be reported if they have been continued from a job control stop since they were last reported. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. data:: WEXITED @@ -5167,7 +5169,7 @@ written in Python, such as a mail server's external command delivery program. The other ``wait*`` functions always report children that have terminated, so this option is not available for them. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. versionadded:: 3.3 @@ -5179,7 +5181,7 @@ written in Python, such as a mail server's external command delivery program. This option is not available for the other ``wait*`` functions. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. versionadded:: 3.3 @@ -5192,7 +5194,7 @@ written in Python, such as a mail server's external command delivery program. This option is not available for :func:`waitid`. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. data:: WNOHANG @@ -5201,7 +5203,7 @@ written in Python, such as a mail server's external command delivery program. :func:`waitid` to return right away if no child process status is available immediately. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. data:: WNOWAIT @@ -5211,7 +5213,7 @@ written in Python, such as a mail server's external command delivery program. This option is not available for the other ``wait*`` functions. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. data:: CLD_EXITED @@ -5224,7 +5226,7 @@ written in Python, such as a mail server's external command delivery program. These are the possible values for :attr:`!si_code` in the result returned by :func:`waitid`. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. versionadded:: 3.3 @@ -5259,7 +5261,7 @@ written in Python, such as a mail server's external command delivery program. :func:`WIFEXITED`, :func:`WEXITSTATUS`, :func:`WIFSIGNALED`, :func:`WTERMSIG`, :func:`WIFSTOPPED`, :func:`WSTOPSIG` functions. - .. availability:: Unix, Windows, not WASI, not iOS. + .. availability:: Unix, Windows, not WASI, not Android, not iOS. .. versionadded:: 3.9 @@ -5275,7 +5277,7 @@ used to determine the disposition of a process. This function should be employed only if :func:`WIFSIGNALED` is true. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. function:: WIFCONTINUED(status) @@ -5286,7 +5288,7 @@ used to determine the disposition of a process. See :data:`WCONTINUED` option. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. function:: WIFSTOPPED(status) @@ -5298,14 +5300,14 @@ used to determine the disposition of a process. done using :data:`WUNTRACED` option or when the process is being traced (see :manpage:`ptrace(2)`). - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. function:: WIFSIGNALED(status) Return ``True`` if the process was terminated by a signal, otherwise return ``False``. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. function:: WIFEXITED(status) @@ -5314,7 +5316,7 @@ used to determine the disposition of a process. by calling ``exit()`` or ``_exit()``, or by returning from ``main()``; otherwise return ``False``. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. function:: WEXITSTATUS(status) @@ -5323,7 +5325,7 @@ used to determine the disposition of a process. This function should be employed only if :func:`WIFEXITED` is true. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. function:: WSTOPSIG(status) @@ -5332,7 +5334,7 @@ used to determine the disposition of a process. This function should be employed only if :func:`WIFSTOPPED` is true. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. .. function:: WTERMSIG(status) @@ -5341,7 +5343,7 @@ used to determine the disposition of a process. This function should be employed only if :func:`WIFSIGNALED` is true. - .. availability:: Unix, not WASI, not iOS. + .. availability:: Unix, not WASI, not Android, not iOS. Interface to the scheduler diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index 3b48d25f96d..ee8a9086d5e 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -21,6 +21,12 @@ inherit from pure paths but also provide I/O operations. .. image:: pathlib-inheritance.png :align: center :class: invert-in-dark-mode + :alt: Inheritance diagram showing the classes available in pathlib. The + most basic class is PurePath, which has three direct subclasses: + PurePosixPath, PureWindowsPath, and Path. Further to these four + classes, there are two classes that use multiple inheritance: + PosixPath subclasses PurePosixPath and Path, and WindowsPath + subclasses PureWindowsPath and Path. If you've never used this module before or just aren't sure which class is right for your task, :class:`Path` is most likely what you need. It instantiates @@ -172,8 +178,8 @@ we also call *flavours*: A subclass of :class:`PurePath`, this path flavour represents non-Windows filesystem paths:: - >>> PurePosixPath('/etc') - PurePosixPath('/etc') + >>> PurePosixPath('/etc/hosts') + PurePosixPath('/etc/hosts') *pathsegments* is specified similarly to :class:`PurePath`. @@ -182,8 +188,8 @@ we also call *flavours*: A subclass of :class:`PurePath`, this path flavour represents Windows filesystem paths, including `UNC paths`_:: - >>> PureWindowsPath('c:/Program Files/') - PureWindowsPath('c:/Program Files') + >>> PureWindowsPath('c:/', 'Users', 'Ximénez') + PureWindowsPath('c:/Users/Ximénez') >>> PureWindowsPath('//server/share/file') PureWindowsPath('//server/share/file') @@ -764,8 +770,8 @@ calls on path objects. There are three ways to instantiate concrete paths: A subclass of :class:`Path` and :class:`PurePosixPath`, this class represents concrete non-Windows filesystem paths:: - >>> PosixPath('/etc') - PosixPath('/etc') + >>> PosixPath('/etc/hosts') + PosixPath('/etc/hosts') *pathsegments* is specified similarly to :class:`PurePath`. @@ -779,8 +785,8 @@ calls on path objects. There are three ways to instantiate concrete paths: A subclass of :class:`Path` and :class:`PureWindowsPath`, this class represents concrete Windows filesystem paths:: - >>> WindowsPath('c:/Program Files/') - WindowsPath('c:/Program Files') + >>> WindowsPath('c:/', 'Users', 'Ximénez') + WindowsPath('c:/Users/Ximénez') *pathsegments* is specified similarly to :class:`PurePath`. @@ -872,22 +878,123 @@ conforming to :rfc:`8089`. it strictly impure. +Expanding and resolving paths +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. classmethod:: Path.home() + + Return a new path object representing the user's home directory (as + returned by :func:`os.path.expanduser` with ``~`` construct). If the home + directory can't be resolved, :exc:`RuntimeError` is raised. + + :: + + >>> Path.home() + PosixPath('/home/antoine') + + .. versionadded:: 3.5 + + +.. method:: Path.expanduser() + + Return a new path with expanded ``~`` and ``~user`` constructs, + as returned by :meth:`os.path.expanduser`. If a home directory can't be + resolved, :exc:`RuntimeError` is raised. + + :: + + >>> p = PosixPath('~/films/Monty Python') + >>> p.expanduser() + PosixPath('/home/eric/films/Monty Python') + + .. versionadded:: 3.5 + + +.. classmethod:: Path.cwd() + + Return a new path object representing the current directory (as returned + by :func:`os.getcwd`):: + + >>> Path.cwd() + PosixPath('/home/antoine/pathlib') + + +.. method:: Path.absolute() + + Make the path absolute, without normalization or resolving symlinks. + Returns a new path object:: + + >>> p = Path('tests') + >>> p + PosixPath('tests') + >>> p.absolute() + PosixPath('/home/antoine/pathlib/tests') + + +.. method:: Path.resolve(strict=False) + + Make the path absolute, resolving any symlinks. A new path object is + returned:: + + >>> p = Path() + >>> p + PosixPath('.') + >>> p.resolve() + PosixPath('/home/antoine/pathlib') + + "``..``" components are also eliminated (this is the only method to do so):: + + >>> p = Path('docs/../setup.py') + >>> p.resolve() + PosixPath('/home/antoine/pathlib/setup.py') + + If a path doesn't exist or a symlink loop is encountered, and *strict* is + ``True``, :exc:`OSError` is raised. If *strict* is ``False``, the path is + resolved as far as possible and any remainder is appended without checking + whether it exists. + + .. versionchanged:: 3.6 + The *strict* parameter was added (pre-3.6 behavior is strict). + + .. versionchanged:: 3.13 + Symlink loops are treated like other errors: :exc:`OSError` is raised in + strict mode, and no exception is raised in non-strict mode. In previous + versions, :exc:`RuntimeError` is raised no matter the value of *strict*. + + +.. method:: Path.readlink() + + Return the path to which the symbolic link points (as returned by + :func:`os.readlink`):: + + >>> p = Path('mylink') + >>> p.symlink_to('setup.py') + >>> p.readlink() + PosixPath('setup.py') + + .. versionadded:: 3.9 + + .. versionchanged:: 3.13 + Raises :exc:`UnsupportedOperation` if :func:`os.readlink` is not + available. In previous versions, :exc:`NotImplementedError` was raised. + + Querying file type and status ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. versionchanged:: 3.8 - :meth:`~Path.exists()`, :meth:`~Path.is_dir()`, :meth:`~Path.is_file()`, - :meth:`~Path.is_mount()`, :meth:`~Path.is_symlink()`, - :meth:`~Path.is_block_device()`, :meth:`~Path.is_char_device()`, - :meth:`~Path.is_fifo()`, :meth:`~Path.is_socket()` now return ``False`` + :meth:`~Path.exists`, :meth:`~Path.is_dir`, :meth:`~Path.is_file`, + :meth:`~Path.is_mount`, :meth:`~Path.is_symlink`, + :meth:`~Path.is_block_device`, :meth:`~Path.is_char_device`, + :meth:`~Path.is_fifo`, :meth:`~Path.is_socket` now return ``False`` instead of raising an exception for paths that contain characters unrepresentable at the OS level. .. method:: Path.stat(*, follow_symlinks=True) - Return a :class:`os.stat_result` object containing information about this path, like :func:`os.stat`. + Return an :class:`os.stat_result` object containing information about this path, like :func:`os.stat`. The result is looked up at each call to this method. This method normally follows symlinks; to stat a symlink add the argument @@ -1260,7 +1367,7 @@ Reading directories This can be used to prune the search, or to impose a specific order of visiting, or even to inform :meth:`Path.walk` about directories the caller creates or renames before it resumes :meth:`Path.walk` again. Modifying *dirnames* when - *top_down* is false has no effect on the behavior of :meth:`Path.walk()` since the + *top_down* is false has no effect on the behavior of :meth:`Path.walk` since the directories in *dirnames* have already been generated by the time *dirnames* is yielded to the caller. @@ -1537,107 +1644,6 @@ Permissions and ownership symbolic link's mode is changed rather than its target's. -Other methods -^^^^^^^^^^^^^ - -.. classmethod:: Path.cwd() - - Return a new path object representing the current directory (as returned - by :func:`os.getcwd`):: - - >>> Path.cwd() - PosixPath('/home/antoine/pathlib') - - -.. classmethod:: Path.home() - - Return a new path object representing the user's home directory (as - returned by :func:`os.path.expanduser` with ``~`` construct). If the home - directory can't be resolved, :exc:`RuntimeError` is raised. - - :: - - >>> Path.home() - PosixPath('/home/antoine') - - .. versionadded:: 3.5 - - -.. method:: Path.expanduser() - - Return a new path with expanded ``~`` and ``~user`` constructs, - as returned by :meth:`os.path.expanduser`. If a home directory can't be - resolved, :exc:`RuntimeError` is raised. - - :: - - >>> p = PosixPath('~/films/Monty Python') - >>> p.expanduser() - PosixPath('/home/eric/films/Monty Python') - - .. versionadded:: 3.5 - - -.. method:: Path.readlink() - - Return the path to which the symbolic link points (as returned by - :func:`os.readlink`):: - - >>> p = Path('mylink') - >>> p.symlink_to('setup.py') - >>> p.readlink() - PosixPath('setup.py') - - .. versionadded:: 3.9 - - .. versionchanged:: 3.13 - Raises :exc:`UnsupportedOperation` if :func:`os.readlink` is not - available. In previous versions, :exc:`NotImplementedError` was raised. - - -.. method:: Path.absolute() - - Make the path absolute, without normalization or resolving symlinks. - Returns a new path object:: - - >>> p = Path('tests') - >>> p - PosixPath('tests') - >>> p.absolute() - PosixPath('/home/antoine/pathlib/tests') - - -.. method:: Path.resolve(strict=False) - - Make the path absolute, resolving any symlinks. A new path object is - returned:: - - >>> p = Path() - >>> p - PosixPath('.') - >>> p.resolve() - PosixPath('/home/antoine/pathlib') - - "``..``" components are also eliminated (this is the only method to do so):: - - >>> p = Path('docs/../setup.py') - >>> p.resolve() - PosixPath('/home/antoine/pathlib/setup.py') - - If a path doesn't exist or a symlink loop is encountered, and *strict* is - ``True``, :exc:`OSError` is raised. If *strict* is ``False``, the path is - resolved as far as possible and any remainder is appended without checking - whether it exists. - - .. versionchanged:: 3.6 - The *strict* parameter was added (pre-3.6 behavior is strict). - - .. versionchanged:: 3.13 - Symlink loops are treated like other errors: :exc:`OSError` is raised in - strict mode, and no exception is raised in non-strict mode. In previous - versions, :exc:`RuntimeError` is raised no matter the value of *strict*. - - .. _pathlib-pattern-language: Pattern language @@ -1759,39 +1765,54 @@ Corresponding tools Below is a table mapping various :mod:`os` functions to their corresponding :class:`PurePath`/:class:`Path` equivalent. -==================================== ============================== -:mod:`os` and :mod:`os.path` :mod:`pathlib` -==================================== ============================== -:func:`os.path.abspath` :meth:`Path.absolute` -:func:`os.path.realpath` :meth:`Path.resolve` -:func:`os.chmod` :meth:`Path.chmod` -:func:`os.mkdir` :meth:`Path.mkdir` -:func:`os.makedirs` :meth:`Path.mkdir` -:func:`os.rename` :meth:`Path.rename` -:func:`os.replace` :meth:`Path.replace` -:func:`os.rmdir` :meth:`Path.rmdir` -:func:`os.remove`, :func:`os.unlink` :meth:`Path.unlink` -:func:`os.getcwd` :func:`Path.cwd` -:func:`os.path.exists` :meth:`Path.exists` -:func:`os.path.expanduser` :meth:`Path.expanduser` and - :meth:`Path.home` -:func:`os.listdir` :meth:`Path.iterdir` -:func:`os.walk` :meth:`Path.walk` -:func:`os.path.isdir` :meth:`Path.is_dir` -:func:`os.path.isfile` :meth:`Path.is_file` -:func:`os.path.islink` :meth:`Path.is_symlink` -:func:`os.link` :meth:`Path.hardlink_to` -:func:`os.symlink` :meth:`Path.symlink_to` -:func:`os.readlink` :meth:`Path.readlink` -:func:`os.path.relpath` :meth:`PurePath.relative_to` -:func:`os.stat` :meth:`Path.stat`, - :meth:`Path.owner`, - :meth:`Path.group` -:func:`os.path.isabs` :meth:`PurePath.is_absolute` -:func:`os.path.join` :func:`PurePath.joinpath` -:func:`os.path.basename` :attr:`PurePath.name` -:func:`os.path.dirname` :attr:`PurePath.parent` -:func:`os.path.samefile` :meth:`Path.samefile` -:func:`os.path.splitext` :attr:`PurePath.stem` and - :attr:`PurePath.suffix` -==================================== ============================== +===================================== ============================================== +:mod:`os` and :mod:`os.path` :mod:`pathlib` +===================================== ============================================== +:func:`os.path.dirname` :attr:`PurePath.parent` +:func:`os.path.basename` :attr:`PurePath.name` +:func:`os.path.splitext` :attr:`PurePath.stem`, :attr:`PurePath.suffix` +:func:`os.path.join` :meth:`PurePath.joinpath` +:func:`os.path.isabs` :meth:`PurePath.is_absolute` +:func:`os.path.relpath` :meth:`PurePath.relative_to` [1]_ +:func:`os.path.expanduser` :meth:`Path.expanduser` [2]_ +:func:`os.path.realpath` :meth:`Path.resolve` +:func:`os.path.abspath` :meth:`Path.absolute` [3]_ +:func:`os.path.exists` :meth:`Path.exists` +:func:`os.path.isfile` :meth:`Path.is_file` +:func:`os.path.isdir` :meth:`Path.is_dir` +:func:`os.path.islink` :meth:`Path.is_symlink` +:func:`os.path.isjunction` :meth:`Path.is_junction` +:func:`os.path.ismount` :meth:`Path.is_mount` +:func:`os.path.samefile` :meth:`Path.samefile` +:func:`os.getcwd` :meth:`Path.cwd` +:func:`os.stat` :meth:`Path.stat` +:func:`os.lstat` :meth:`Path.lstat` +:func:`os.listdir` :meth:`Path.iterdir` +:func:`os.walk` :meth:`Path.walk` [4]_ +:func:`os.mkdir`, :func:`os.makedirs` :meth:`Path.mkdir` +:func:`os.link` :meth:`Path.hardlink_to` +:func:`os.symlink` :meth:`Path.symlink_to` +:func:`os.readlink` :meth:`Path.readlink` +:func:`os.rename` :meth:`Path.rename` +:func:`os.replace` :meth:`Path.replace` +:func:`os.remove`, :func:`os.unlink` :meth:`Path.unlink` +:func:`os.rmdir` :meth:`Path.rmdir` +:func:`os.chmod` :meth:`Path.chmod` +:func:`os.lchmod` :meth:`Path.lchmod` +===================================== ============================================== + +.. rubric:: Footnotes + +.. [1] :func:`os.path.relpath` calls :func:`~os.path.abspath` to make paths + absolute and remove "``..``" parts, whereas :meth:`PurePath.relative_to` + is a lexical operation that raises :exc:`ValueError` when its inputs' + anchors differ (e.g. if one path is absolute and the other relative.) +.. [2] :func:`os.path.expanduser` returns the path unchanged if the home + directory can't be resolved, whereas :meth:`Path.expanduser` raises + :exc:`RuntimeError`. +.. [3] :func:`os.path.abspath` removes "``..``" components without resolving + symlinks, which may change the meaning of the path, whereas + :meth:`Path.absolute` leaves any "``..``" components in the path. +.. [4] :func:`os.walk` always follows symlinks when categorizing paths into + *dirnames* and *filenames*, whereas :meth:`Path.walk` categorizes all + symlinks into *filenames* when *follow_symlinks* is false (the default.) diff --git a/Doc/library/pdb.rst b/Doc/library/pdb.rst index cd649620394..9478d7b7d55 100644 --- a/Doc/library/pdb.rst +++ b/Doc/library/pdb.rst @@ -49,7 +49,7 @@ You can then step through the code following this statement, and continue running without the debugger using the :pdbcmd:`continue` command. .. versionchanged:: 3.7 - The built-in :func:`breakpoint()`, when called with defaults, can be used + The built-in :func:`breakpoint`, when called with defaults, can be used instead of ``import pdb; pdb.set_trace()``. :: diff --git a/Doc/library/pkgutil.rst b/Doc/library/pkgutil.rst index 5d4ff34ba02..f095cc84173 100644 --- a/Doc/library/pkgutil.rst +++ b/Doc/library/pkgutil.rst @@ -34,9 +34,9 @@ support. *name* argument. This feature is similar to :file:`\*.pth` files (see the :mod:`site` module for more information), except that it doesn't special-case lines starting with ``import``. A :file:`\*.pkg` file is trusted at face - value: apart from checking for duplicates, all entries found in a - :file:`\*.pkg` file are added to the path, regardless of whether they exist - on the filesystem. (This is a feature.) + value: apart from skipping blank lines and ignoring comments, all entries + found in a :file:`\*.pkg` file are added to the path, regardless of whether + they exist on the filesystem (this is a feature). If the input path is not a list (as is the case for frozen packages) it is returned unchanged. The input path is not modified; an extended copy is diff --git a/Doc/library/platform.rst b/Doc/library/platform.rst index f082393ef93..1beb3b9eb89 100644 --- a/Doc/library/platform.rst +++ b/Doc/library/platform.rst @@ -150,7 +150,7 @@ Cross Platform On iOS and Android, this returns the user-facing OS name (i.e, ``'iOS``, ``'iPadOS'`` or ``'Android'``). To obtain the kernel name (``'Darwin'`` or - ``'Linux'``), use :func:`os.uname()`. + ``'Linux'``), use :func:`os.uname`. .. function:: system_alias(system, release, version) @@ -165,7 +165,7 @@ Cross Platform returned if the value cannot be determined. On iOS and Android, this is the user-facing OS version. To obtain the - Darwin or Linux kernel version, use :func:`os.uname()`. + Darwin or Linux kernel version, use :func:`os.uname`. .. function:: uname() diff --git a/Doc/library/plistlib.rst b/Doc/library/plistlib.rst index 78b3c2697bd..2906ebe7822 100644 --- a/Doc/library/plistlib.rst +++ b/Doc/library/plistlib.rst @@ -117,7 +117,7 @@ This module defines the following functions: when a key of a dictionary is not a string, otherwise such keys are skipped. When *aware_datetime* is true and any field with type ``datetime.datetime`` - is set as a :ref:`aware object `, it will convert to + is set as an :ref:`aware object `, it will convert to UTC timezone before writing it. A :exc:`TypeError` will be raised if the object is of an unsupported type or diff --git a/Doc/library/profile.rst b/Doc/library/profile.rst index 9721da7220d..3334833eba6 100644 --- a/Doc/library/profile.rst +++ b/Doc/library/profile.rst @@ -682,7 +682,7 @@ you are using :class:`profile.Profile` or :class:`cProfile.Profile`, that you choose (see :ref:`profile-calibration`). For most machines, a timer that returns a lone integer value will provide the best results in terms of low overhead during profiling. (:func:`os.times` is *pretty* bad, as it - returns a tuple of floating point values). If you want to substitute a + returns a tuple of floating-point values). If you want to substitute a better timer in the cleanest fashion, derive a class and hardwire a replacement dispatch method that best handles your timer call, along with the appropriate calibration constant. @@ -699,7 +699,7 @@ you are using :class:`profile.Profile` or :class:`cProfile.Profile`, As the :class:`cProfile.Profile` class cannot be calibrated, custom timer functions should be used with care and should be as fast as possible. For the best results with a custom timer, it might be necessary to hard-code it - in the C source of the internal :mod:`_lsprof` module. + in the C source of the internal :mod:`!_lsprof` module. Python 3.3 adds several new functions in :mod:`time` that can be used to make precise measurements of process or wall-clock time. For example, see diff --git a/Doc/library/pydoc.rst b/Doc/library/pydoc.rst index f7ca1e04569..70e9c604eba 100644 --- a/Doc/library/pydoc.rst +++ b/Doc/library/pydoc.rst @@ -21,7 +21,7 @@ modules. The documentation can be presented as pages of text on the console, served to a web browser, or saved to HTML files. For modules, classes, functions and methods, the displayed documentation is -derived from the docstring (i.e. the :attr:`!__doc__` attribute) of the object, +derived from the docstring (i.e. the :attr:`~definition.__doc__` attribute) of the object, and recursively of its documentable members. If there is no docstring, :mod:`!pydoc` tries to obtain a description from the block of comment lines just above the definition of the class, function or method in the source file, or at diff --git a/Doc/library/random.rst b/Doc/library/random.rst index 755d1c8908c..ef0cfb0e76c 100644 --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -200,8 +200,8 @@ Functions for sequences For a given seed, the :func:`choices` function with equal weighting typically produces a different sequence than repeated calls to - :func:`choice`. The algorithm used by :func:`choices` uses floating - point arithmetic for internal consistency and speed. The algorithm used + :func:`choice`. The algorithm used by :func:`choices` uses floating-point + arithmetic for internal consistency and speed. The algorithm used by :func:`choice` defaults to integer arithmetic with repeated selections to avoid small biases from round-off error. @@ -298,12 +298,12 @@ be found in any statistics text. .. function:: random() - Return the next random floating point number in the range ``0.0 <= X < 1.0`` + Return the next random floating-point number in the range ``0.0 <= X < 1.0`` .. function:: uniform(a, b) - Return a random floating point number *N* such that ``a <= N <= b`` for + Return a random floating-point number *N* such that ``a <= N <= b`` for ``a <= b`` and ``b <= N <= a`` for ``b < a``. The end-point value ``b`` may or may not be included in the range @@ -313,7 +313,7 @@ be found in any statistics text. .. function:: triangular(low, high, mode) - Return a random floating point number *N* such that ``low <= N <= high`` and + Return a random floating-point number *N* such that ``low <= N <= high`` and with the specified *mode* between those bounds. The *low* and *high* bounds default to zero and one. The *mode* argument defaults to the midpoint between the bounds, giving a symmetric distribution. @@ -741,7 +741,7 @@ The following options are accepted: .. option:: -f --float - Print a random floating point number between 1 and N inclusive, + Print a random floating-point number between 0 and N inclusive, using :meth:`uniform`. If no options are given, the output depends on the input: diff --git a/Doc/library/re.rst b/Doc/library/re.rst index cc979fe66f7..9db6f1da3be 100644 --- a/Doc/library/re.rst +++ b/Doc/library/re.rst @@ -572,6 +572,12 @@ character ``'$'``. Word boundaries are determined by the current locale if the :py:const:`~re.LOCALE` flag is used. + .. note:: + + Note that ``\B`` does not match an empty string, which differs from + RE implementations in other programming languages such as Perl. + This behavior is kept for compatibility reasons. + .. index:: single: \d; in regular expressions ``\d`` @@ -600,10 +606,9 @@ character ``'$'``. ``\s`` For Unicode (str) patterns: - Matches Unicode whitespace characters (which includes - ``[ \t\n\r\f\v]``, and also many other characters, for example the - non-breaking spaces mandated by typography rules in many - languages). + Matches Unicode whitespace characters (as defined by :py:meth:`str.isspace`). + This includes ``[ \t\n\r\f\v]``, and also many other characters, for example the + non-breaking spaces mandated by typography rules in many languages. Matches ``[ \t\n\r\f\v]`` if the :py:const:`~re.ASCII` flag is used. diff --git a/Doc/library/readline.rst b/Doc/library/readline.rst index 5658b93c81d..4a042056632 100644 --- a/Doc/library/readline.rst +++ b/Doc/library/readline.rst @@ -24,7 +24,7 @@ in the GNU Readline manual for information about the format and allowable constructs of that file, and the capabilities of the Readline library in general. -.. include:: ../includes/wasm-ios-notavail.rst +.. include:: ../includes/wasm-mobile-notavail.rst .. note:: @@ -45,6 +45,10 @@ Readline library in general. python:bind -v python:bind ^I rl_complete + Also note that different libraries may use different history file formats. + When switching the underlying library, existing history files may become + unusable. + .. data:: backend The name of the underlying Readline library being used, either diff --git a/Doc/library/resource.rst b/Doc/library/resource.rst index dd80b1e6670..0515d205bbc 100644 --- a/Doc/library/resource.rst +++ b/Doc/library/resource.rst @@ -305,7 +305,7 @@ These functions are used to retrieve resource usage information: elements. The fields :attr:`ru_utime` and :attr:`ru_stime` of the return value are - floating point values representing the amount of time spent executing in user + floating-point values representing the amount of time spent executing in user mode and the amount of time spent executing in system mode, respectively. The remaining values are integers. Consult the :manpage:`getrusage(2)` man page for detailed information about these values. A brief summary is presented here: diff --git a/Doc/library/secrets.rst b/Doc/library/secrets.rst index 1401a925103..75dafc54d40 100644 --- a/Doc/library/secrets.rst +++ b/Doc/library/secrets.rst @@ -52,7 +52,7 @@ randomness that your operating system provides. .. function:: randbits(k) - Return an int with *k* random bits. + Return a non-negative int with *k* random bits. Generating tokens diff --git a/Doc/library/select.rst b/Doc/library/select.rst index 06ebaf0201e..f23a249f44b 100644 --- a/Doc/library/select.rst +++ b/Doc/library/select.rst @@ -129,7 +129,7 @@ The module defines the following: Empty iterables are allowed, but acceptance of three empty iterables is platform-dependent. (It is known to work on Unix but not on Windows.) The - optional *timeout* argument specifies a time-out as a floating point number + optional *timeout* argument specifies a time-out as a floating-point number in seconds. When the *timeout* argument is omitted the function blocks until at least one file descriptor is ready. A time-out value of zero specifies a poll and never blocks. diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst index fd32479195e..af7e35f5fb2 100644 --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -449,9 +449,10 @@ Directory and files operations *mode* is a permission mask passed to :func:`os.access`, by default determining if the file exists and is executable. - *path* is a "``PATH`` string" specifying the lookup directory list. When no - *path* is specified, the results of :func:`os.environ` are used, returning - either the "PATH" value or a fallback of :data:`os.defpath`. + *path* is a "``PATH`` string" specifying the directories to look in, + delimited by :data:`os.pathsep`. When no *path* is specified, the + :envvar:`PATH` environment variable is read from :data:`os.environ`, + falling back to :data:`os.defpath` if it is not set. On Windows, the current directory is prepended to the *path* if *mode* does not include ``os.X_OK``. When the *mode* does include ``os.X_OK``, the @@ -460,9 +461,9 @@ Directory and files operations consulting the current working directory for executables: set the environment variable ``NoDefaultCurrentDirectoryInExePath``. - Also on Windows, the ``PATHEXT`` variable is used to resolve commands - that may not already include an extension. For example, if you call - ``shutil.which("python")``, :func:`which` will search ``PATHEXT`` + Also on Windows, the :envvar:`PATHEXT` environment variable is used to + resolve commands that may not already include an extension. For example, + if you call ``shutil.which("python")``, :func:`which` will search ``PATHEXT`` to know that it should look for ``python.exe`` within the *path* directories. For example, on Windows:: diff --git a/Doc/library/signal.rst b/Doc/library/signal.rst index 48c6841c648..17fcb2b3707 100644 --- a/Doc/library/signal.rst +++ b/Doc/library/signal.rst @@ -411,7 +411,7 @@ The :mod:`signal` module defines the following functions: See the :manpage:`pidfd_send_signal(2)` man page for more information. - .. availability:: Linux >= 5.1 + .. availability:: Linux >= 5.1, Android >= :func:`build-time ` API level 31 .. versionadded:: 3.9 @@ -425,7 +425,7 @@ The :mod:`signal` module defines the following functions: signal to a particular Python thread would be to force a running system call to fail with :exc:`InterruptedError`. - Use :func:`threading.get_ident()` or the :attr:`~threading.Thread.ident` + Use :func:`threading.get_ident` or the :attr:`~threading.Thread.ident` attribute of :class:`threading.Thread` objects to get a suitable value for *thread_id*. diff --git a/Doc/library/site.rst b/Doc/library/site.rst index 1c420419568..4508091f679 100644 --- a/Doc/library/site.rst +++ b/Doc/library/site.rst @@ -15,8 +15,9 @@ import can be suppressed using the interpreter's :option:`-S` option. .. index:: triple: module; search; path -Importing this module will append site-specific paths to the module search path -and add a few builtins, unless :option:`-S` was used. In that case, this module +Importing this module normally appends site-specific paths to the module search path +and adds :ref:`callables `, including :func:`help` to the built-in +namespace. However, Python startup option :option:`-S` blocks this and this module can be safely imported with no automatic modifications to the module search path or additions to the builtins. To explicitly trigger the usual site-specific additions, call the :func:`main` function. @@ -32,7 +33,10 @@ It starts by constructing up to four directories from a head and a tail part. For the head part, it uses ``sys.prefix`` and ``sys.exec_prefix``; empty heads are skipped. For the tail part, it uses the empty string and then :file:`lib/site-packages` (on Windows) or -:file:`lib/python{X.Y}/site-packages` (on Unix and macOS). For each +:file:`lib/python{X.Y[t]}/site-packages` (on Unix and macOS). (The +optional suffix "t" indicates the :term:`free threading` build, and is +appended if ``"t"`` is present in the :attr:`sys.abiflags` constant.) +For each of the distinct head-tail combinations, it sees if it refers to an existing directory, and if so, adds it to ``sys.path`` and also inspects the newly added path for configuration files. @@ -40,6 +44,11 @@ added path for configuration files. .. versionchanged:: 3.5 Support for the "site-python" directory has been removed. +.. versionchanged:: 3.13 + On Unix, :term:`Free threading ` Python installations are + identified by the "t" suffix in the version-specific directory name, such as + :file:`lib/python3.13t/`. + If a file named "pyvenv.cfg" exists one directory above sys.executable, sys.prefix and sys.exec_prefix are set to that directory and it is also checked for site-packages (sys.base_prefix and @@ -188,11 +197,12 @@ Module contents Path to the user site-packages for the running Python. Can be ``None`` if :func:`getusersitepackages` hasn't been called yet. Default value is - :file:`~/.local/lib/python{X.Y}/site-packages` for UNIX and non-framework + :file:`~/.local/lib/python{X.Y}[t]/site-packages` for UNIX and non-framework macOS builds, :file:`~/Library/Python/{X.Y}/lib/python/site-packages` for macOS framework builds, and :file:`{%APPDATA%}\\Python\\Python{XY}\\site-packages` - on Windows. This directory is a site directory, which means that - :file:`.pth` files in it will be processed. + on Windows. The optional "t" indicates the free-threaded build. This + directory is a site directory, which means that :file:`.pth` files in it + will be processed. .. data:: USER_BASE diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index 2df0257d1f2..dc1baa28e1c 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -700,6 +700,13 @@ Constants .. versionadded:: 3.12 +.. data:: SHUT_RD + SHUT_WR + SHUT_RDWR + + These constants are used by the :meth:`~socket.socket.shutdown` method of socket objects. + + .. availability:: not WASI. Functions ^^^^^^^^^ @@ -729,7 +736,7 @@ The following functions all create :ref:`socket objects `. of :meth:`socket.getpeername` but not the actual OS resource. Unlike :func:`socket.fromfd`, *fileno* will return the same socket and not a duplicate. This may help close a detached socket using - :meth:`socket.close()`. + :meth:`socket.close`. The newly created socket is :ref:`non-inheritable `. @@ -1265,7 +1272,7 @@ The :mod:`socket` module also offers various network-related services: .. audit-event:: socket.sethostname name socket.sethostname - .. availability:: Unix. + .. availability:: Unix, not Android. .. versionadded:: 3.3 @@ -1408,7 +1415,7 @@ to sockets. .. method:: socket.close() Mark the socket closed. The underlying system resource (e.g. a file - descriptor) is also closed when all file objects from :meth:`makefile()` + descriptor) is also closed when all file objects from :meth:`makefile` are closed. Once that happens, all future operations on the socket object will fail. The remote end will receive no more data (after queued data is flushed). @@ -1423,10 +1430,10 @@ to sockets. .. note:: - :meth:`close()` releases the resource associated with a connection but + :meth:`close` releases the resource associated with a connection but does not necessarily close the connection immediately. If you want - to close the connection in a timely fashion, call :meth:`shutdown()` - before :meth:`close()`. + to close the connection in a timely fashion, call :meth:`shutdown` + before :meth:`close`. .. method:: socket.connect(address) @@ -1922,7 +1929,7 @@ to sockets. .. method:: socket.settimeout(value) Set a timeout on blocking socket operations. The *value* argument can be a - nonnegative floating point number expressing seconds, or ``None``. + nonnegative floating-point number expressing seconds, or ``None``. If a non-zero value is given, subsequent socket operations will raise a :exc:`timeout` exception if the timeout period *value* has elapsed before the operation has completed. If zero is given, the socket is put in @@ -2035,7 +2042,7 @@ can be changed by calling :func:`setdefaulttimeout`. in non-blocking mode. Also, the blocking and timeout modes are shared between file descriptors and socket objects that refer to the same network endpoint. This implementation detail can have visible consequences if e.g. you decide - to use the :meth:`~socket.fileno()` of a socket. + to use the :meth:`~socket.fileno` of a socket. Timeouts and the ``connect`` method ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst index 6da8798ddfe..91250c45410 100644 --- a/Doc/library/sqlite3.rst +++ b/Doc/library/sqlite3.rst @@ -127,7 +127,7 @@ and call :meth:`res.fetchone() ` to fetch the resulting row: We can see that the table has been created, as the query returns a :class:`tuple` containing the table's name. If we query ``sqlite_master`` for a non-existent table ``spam``, -:meth:`!res.fetchone()` will return ``None``: +:meth:`!res.fetchone` will return ``None``: .. doctest:: @@ -525,21 +525,20 @@ Module constants The mappings from SQLite threading modes to DB-API 2.0 threadsafety levels are as follows: - +------------------+-----------------+----------------------+-------------------------------+ - | SQLite threading | `threadsafety`_ | `SQLITE_THREADSAFE`_ | DB-API 2.0 meaning | - | mode | | | | - +==================+=================+======================+===============================+ - | single-thread | 0 | 0 | Threads may not share the | - | | | | module | - +------------------+-----------------+----------------------+-------------------------------+ - | multi-thread | 1 | 2 | Threads may share the module, | - | | | | but not connections | - +------------------+-----------------+----------------------+-------------------------------+ - | serialized | 3 | 1 | Threads may share the module, | - | | | | connections and cursors | - +------------------+-----------------+----------------------+-------------------------------+ - - .. _threadsafety: https://peps.python.org/pep-0249/#threadsafety + +------------------+----------------------+----------------------+-------------------------------+ + | SQLite threading | :pep:`threadsafety | `SQLITE_THREADSAFE`_ | DB-API 2.0 meaning | + | mode | <0249#threadsafety>` | | | + +==================+======================+======================+===============================+ + | single-thread | 0 | 0 | Threads may not share the | + | | | | module | + +------------------+----------------------+----------------------+-------------------------------+ + | multi-thread | 1 | 2 | Threads may share the module, | + | | | | but not connections | + +------------------+----------------------+----------------------+-------------------------------+ + | serialized | 3 | 1 | Threads may share the module, | + | | | | connections and cursors | + +------------------+----------------------+----------------------+-------------------------------+ + .. _SQLITE_THREADSAFE: https://sqlite.org/compile.html#threadsafe .. versionchanged:: 3.11 diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst index dc72f67c636..b7fb1fc07d1 100644 --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -1049,25 +1049,25 @@ SSL Sockets SSL sockets provide the following methods of :ref:`socket-objects`: - - :meth:`~socket.socket.accept()` - - :meth:`~socket.socket.bind()` - - :meth:`~socket.socket.close()` - - :meth:`~socket.socket.connect()` - - :meth:`~socket.socket.detach()` - - :meth:`~socket.socket.fileno()` - - :meth:`~socket.socket.getpeername()`, :meth:`~socket.socket.getsockname()` - - :meth:`~socket.socket.getsockopt()`, :meth:`~socket.socket.setsockopt()` - - :meth:`~socket.socket.gettimeout()`, :meth:`~socket.socket.settimeout()`, - :meth:`~socket.socket.setblocking()` - - :meth:`~socket.socket.listen()` - - :meth:`~socket.socket.makefile()` - - :meth:`~socket.socket.recv()`, :meth:`~socket.socket.recv_into()` + - :meth:`~socket.socket.accept` + - :meth:`~socket.socket.bind` + - :meth:`~socket.socket.close` + - :meth:`~socket.socket.connect` + - :meth:`~socket.socket.detach` + - :meth:`~socket.socket.fileno` + - :meth:`~socket.socket.getpeername`, :meth:`~socket.socket.getsockname` + - :meth:`~socket.socket.getsockopt`, :meth:`~socket.socket.setsockopt` + - :meth:`~socket.socket.gettimeout`, :meth:`~socket.socket.settimeout`, + :meth:`~socket.socket.setblocking` + - :meth:`~socket.socket.listen` + - :meth:`~socket.socket.makefile` + - :meth:`~socket.socket.recv`, :meth:`~socket.socket.recv_into` (but passing a non-zero ``flags`` argument is not allowed) - - :meth:`~socket.socket.send()`, :meth:`~socket.socket.sendall()` (with + - :meth:`~socket.socket.send`, :meth:`~socket.socket.sendall` (with the same limitation) - - :meth:`~socket.socket.sendfile()` (but :mod:`os.sendfile` will be used - for plain-text sockets only, else :meth:`~socket.socket.send()` will be used) - - :meth:`~socket.socket.shutdown()` + - :meth:`~socket.socket.sendfile` (but :mod:`os.sendfile` will be used + for plain-text sockets only, else :meth:`~socket.socket.send` will be used) + - :meth:`~socket.socket.shutdown` However, since the SSL (and TLS) protocol has its own framing atop of TCP, the SSL sockets abstraction can, in certain respects, diverge from @@ -1566,7 +1566,7 @@ to speed up repeated connections from the same clients. The *capath* string, if present, is the path to a directory containing several CA certificates in PEM format, following an `OpenSSL specific layout - `_. + `_. The *cadata* object, if present, is either an ASCII string of one or more PEM-encoded certificates or a :term:`bytes-like object` of DER-encoded @@ -1641,7 +1641,7 @@ to speed up repeated connections from the same clients. Set the available ciphers for sockets created with this context. It should be a string in the `OpenSSL cipher list format - `_. + `_. If no cipher can be selected (because compile-time options or other configuration forbids use of all the specified ciphers), an :class:`SSLError` will be raised. @@ -1742,7 +1742,7 @@ to speed up repeated connections from the same clients. IDN-encoded internationalized domain name, the *server_name_callback* receives a decoded U-label (``"pythön.org"``). - If there is an decoding error on the server name, the TLS connection will + If there is a decoding error on the server name, the TLS connection will terminate with an :const:`ALERT_DESCRIPTION_INTERNAL_ERROR` fatal TLS alert message to the client. @@ -1874,7 +1874,7 @@ to speed up repeated connections from the same clients. .. method:: SSLContext.session_stats() Get statistics about the SSL sessions created or managed by this context. - A dictionary is returned which maps the names of each `piece of information `_ to their + A dictionary is returned which maps the names of each `piece of information `_ to their numeric values. For example, here is the total number of hits and misses in the session cache since the context was created:: @@ -2017,7 +2017,7 @@ to speed up repeated connections from the same clients. .. attribute:: SSLContext.security_level An integer representing the `security level - `_ + `_ for the context. This attribute is read-only. .. versionadded:: 3.10 @@ -2710,7 +2710,7 @@ Verifying certificates When calling the :class:`SSLContext` constructor directly, :const:`CERT_NONE` is the default. Since it does not authenticate the other -peer, it can be insecure, especially in client mode where most of time you +peer, it can be insecure, especially in client mode where most of the time you would like to ensure the authenticity of the server you're talking to. Therefore, when in client mode, it is highly recommended to use :const:`CERT_REQUIRED`. However, it is in itself not sufficient; you also @@ -2759,7 +2759,7 @@ enabled when negotiating a SSL session is possible through the :meth:`SSLContext.set_ciphers` method. Starting from Python 3.2.3, the ssl module disables certain weak ciphers by default, but you may want to further restrict the cipher choice. Be sure to read OpenSSL's documentation -about the `cipher list format `_. +about the `cipher list format `_. If you want to check which ciphers are enabled by a given cipher list, use :meth:`SSLContext.get_ciphers` or the ``openssl ciphers`` command on your system. diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst index 8453135d2e1..614f5b905a4 100644 --- a/Doc/library/statistics.rst +++ b/Doc/library/statistics.rst @@ -73,7 +73,7 @@ or sample. ======================= =============================================================== :func:`mean` Arithmetic mean ("average") of data. -:func:`fmean` Fast, floating point arithmetic mean, with optional weighting. +:func:`fmean` Fast, floating-point arithmetic mean, with optional weighting. :func:`geometric_mean` Geometric mean of data. :func:`harmonic_mean` Harmonic mean of data. :func:`kde` Estimate the probability density distribution of the data. @@ -485,6 +485,12 @@ However, for reading convenience, most of the examples show sorted sequences. >>> mode(["red", "blue", "blue", "red", "green", "red", "red"]) 'red' + Only hashable inputs are supported. To handle type :class:`set`, + consider casting to :class:`frozenset`. To handle type :class:`list`, + consider casting to :class:`tuple`. For mixed or nested inputs, consider + using this slower quadratic algorithm that only depends on equality tests: + ``max(data, key=data.count)``. + .. versionchanged:: 3.8 Now handles multimodal datasets by returning the first mode encountered. Formerly, it raised :exc:`StatisticsError` when more than one mode was diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 79687b94f0a..8b9d29545e3 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -209,18 +209,18 @@ Numeric Types --- :class:`int`, :class:`float`, :class:`complex` pair: object; numeric pair: object; Boolean pair: object; integer - pair: object; floating point + pair: object; floating-point pair: object; complex number pair: C; language -There are three distinct numeric types: :dfn:`integers`, :dfn:`floating -point numbers`, and :dfn:`complex numbers`. In addition, Booleans are a -subtype of integers. Integers have unlimited precision. Floating point +There are three distinct numeric types: :dfn:`integers`, :dfn:`floating-point +numbers`, and :dfn:`complex numbers`. In addition, Booleans are a +subtype of integers. Integers have unlimited precision. Floating-point numbers are usually implemented using :c:expr:`double` in C; information -about the precision and internal representation of floating point +about the precision and internal representation of floating-point numbers for the machine on which your program is running is available in :data:`sys.float_info`. Complex numbers have a real and imaginary -part, which are each a floating point number. To extract these parts +part, which are each a floating-point number. To extract these parts from a complex number *z*, use ``z.real`` and ``z.imag``. (The standard library includes the additional numeric types :mod:`fractions.Fraction`, for rationals, and :mod:`decimal.Decimal`, for floating-point numbers with @@ -229,7 +229,7 @@ user-definable precision.) .. index:: pair: numeric; literals pair: integer; literals - pair: floating point; literals + pair: floating-point; literals pair: complex number; literals pair: hexadecimal; literals pair: octal; literals @@ -238,7 +238,7 @@ user-definable precision.) Numbers are created by numeric literals or as the result of built-in functions and operators. Unadorned integer literals (including hex, octal and binary numbers) yield integers. Numeric literals containing a decimal point or an -exponent sign yield floating point numbers. Appending ``'j'`` or ``'J'`` to a +exponent sign yield floating-point numbers. Appending ``'j'`` or ``'J'`` to a numeric literal yields an imaginary number (a complex number with a zero real part) which you can add to an integer or float to get a complex number with real and imaginary parts. @@ -832,7 +832,7 @@ over ``&``, ``|`` and ``^``. .. deprecated:: 3.12 The use of the bitwise inversion operator ``~`` is deprecated and will - raise an error in Python 3.14. + raise an error in Python 3.16. :class:`bool` is a subclass of :class:`int` (see :ref:`typesnumeric`). In many numeric contexts, ``False`` and ``True`` behave like the integers 0 and 1, respectively. @@ -1209,8 +1209,9 @@ accepts integers that meet the value restriction ``0 <= x <= 255``). | ``s.pop()`` or ``s.pop(i)`` | retrieves the item at *i* and | \(2) | | | also removes it from *s* | | +------------------------------+--------------------------------+---------------------+ -| ``s.remove(x)`` | remove the first item from *s* | \(3) | -| | where ``s[i]`` is equal to *x* | | +| ``s.remove(x)`` | removes the first item from | \(3) | +| | *s* where ``s[i]`` is equal to | | +| | *x* | | +------------------------------+--------------------------------+---------------------+ | ``s.reverse()`` | reverses the items of *s* in | \(4) | | | place | | @@ -1497,8 +1498,8 @@ objects that compare equal might have different :attr:`~range.start`, .. seealso:: * The `linspace recipe `_ - shows how to implement a lazy version of range suitable for floating - point applications. + shows how to implement a lazy version of range suitable for floating-point + applications. .. index:: single: string; text sequence type @@ -2095,8 +2096,9 @@ expression support in the :mod:`re` module). If *sep* is given, consecutive delimiters are not grouped together and are deemed to delimit empty strings (for example, ``'1,,2'.split(',')`` returns ``['1', '', '2']``). The *sep* argument may consist of multiple characters - (for example, ``'1<>2<>3'.split('<>')`` returns ``['1', '2', '3']``). - Splitting an empty string with a specified separator returns ``['']``. + as a single delimiter (to split with multiple delimiters, use + :func:`re.split`). Splitting an empty string with a specified separator + returns ``['']``. For example:: @@ -2106,6 +2108,8 @@ expression support in the :mod:`re` module). ['1', '2,3'] >>> '1,2,,3,'.split(',') ['1', '2', '', '3', ''] + >>> '1<>2<>3<4'.split('<>') + ['1', '2', '3<4'] If *sep* is not specified or is ``None``, a different splitting algorithm is applied: runs of consecutive whitespace are regarded as a single separator, @@ -2439,19 +2443,19 @@ The conversion types are: +------------+-----------------------------------------------------+-------+ | ``'X'`` | Signed hexadecimal (uppercase). | \(2) | +------------+-----------------------------------------------------+-------+ -| ``'e'`` | Floating point exponential format (lowercase). | \(3) | +| ``'e'`` | Floating-point exponential format (lowercase). | \(3) | +------------+-----------------------------------------------------+-------+ -| ``'E'`` | Floating point exponential format (uppercase). | \(3) | +| ``'E'`` | Floating-point exponential format (uppercase). | \(3) | +------------+-----------------------------------------------------+-------+ -| ``'f'`` | Floating point decimal format. | \(3) | +| ``'f'`` | Floating-point decimal format. | \(3) | +------------+-----------------------------------------------------+-------+ -| ``'F'`` | Floating point decimal format. | \(3) | +| ``'F'`` | Floating-point decimal format. | \(3) | +------------+-----------------------------------------------------+-------+ -| ``'g'`` | Floating point format. Uses lowercase exponential | \(4) | +| ``'g'`` | Floating-point format. Uses lowercase exponential | \(4) | | | format if exponent is less than -4 or not less than | | | | precision, decimal format otherwise. | | +------------+-----------------------------------------------------+-------+ -| ``'G'`` | Floating point format. Uses uppercase exponential | \(4) | +| ``'G'`` | Floating-point format. Uses uppercase exponential | \(4) | | | format if exponent is less than -4 or not less than | | | | precision, decimal format otherwise. | | +------------+-----------------------------------------------------+-------+ @@ -3149,10 +3153,9 @@ produce new objects. If *sep* is given, consecutive delimiters are not grouped together and are deemed to delimit empty subsequences (for example, ``b'1,,2'.split(b',')`` returns ``[b'1', b'', b'2']``). The *sep* argument may consist of a - multibyte sequence (for example, ``b'1<>2<>3'.split(b'<>')`` returns - ``[b'1', b'2', b'3']``). Splitting an empty sequence with a specified - separator returns ``[b'']`` or ``[bytearray(b'')]`` depending on the type - of object being split. The *sep* argument may be any + multibyte sequence as a single delimiter. Splitting an empty sequence with + a specified separator returns ``[b'']`` or ``[bytearray(b'')]`` depending + on the type of object being split. The *sep* argument may be any :term:`bytes-like object`. For example:: @@ -3163,6 +3166,8 @@ produce new objects. [b'1', b'2,3'] >>> b'1,2,,3,'.split(b',') [b'1', b'2', b'', b'3', b''] + >>> b'1<>2<>3<4'.split(b'<>') + [b'1', b'2', b'3<4'] If *sep* is not specified or is ``None``, a different splitting algorithm is applied: runs of consecutive ASCII whitespace are regarded as a single @@ -3436,7 +3441,7 @@ place, and instead produce new objects. ``b'abcdefghijklmnopqrstuvwxyz'``. Uppercase ASCII characters are those byte values in the sequence ``b'ABCDEFGHIJKLMNOPQRSTUVWXYZ'``. - Unlike :func:`str.swapcase()`, it is always the case that + Unlike :func:`str.swapcase`, it is always the case that ``bin.swapcase().swapcase() == bin`` for the binary versions. Case conversions are symmetrical in ASCII, even though that is not generally true for arbitrary Unicode code points. @@ -3657,19 +3662,19 @@ The conversion types are: +------------+-----------------------------------------------------+-------+ | ``'X'`` | Signed hexadecimal (uppercase). | \(2) | +------------+-----------------------------------------------------+-------+ -| ``'e'`` | Floating point exponential format (lowercase). | \(3) | +| ``'e'`` | Floating-point exponential format (lowercase). | \(3) | +------------+-----------------------------------------------------+-------+ -| ``'E'`` | Floating point exponential format (uppercase). | \(3) | +| ``'E'`` | Floating-point exponential format (uppercase). | \(3) | +------------+-----------------------------------------------------+-------+ -| ``'f'`` | Floating point decimal format. | \(3) | +| ``'f'`` | Floating-point decimal format. | \(3) | +------------+-----------------------------------------------------+-------+ -| ``'F'`` | Floating point decimal format. | \(3) | +| ``'F'`` | Floating-point decimal format. | \(3) | +------------+-----------------------------------------------------+-------+ -| ``'g'`` | Floating point format. Uses lowercase exponential | \(4) | +| ``'g'`` | Floating-point format. Uses lowercase exponential | \(4) | | | format if exponent is less than -4 or not less than | | | | precision, decimal format otherwise. | | +------------+-----------------------------------------------------+-------+ -| ``'G'`` | Floating point format. Uses uppercase exponential | \(4) | +| ``'G'`` | Floating-point format. Uses uppercase exponential | \(4) | | | format if exponent is less than -4 or not less than | | | | precision, decimal format otherwise. | | +------------+-----------------------------------------------------+-------+ @@ -3891,7 +3896,7 @@ copying. >>> a == b False - Note that, as with floating point numbers, ``v is w`` does *not* imply + Note that, as with floating-point numbers, ``v is w`` does *not* imply ``v == w`` for memoryview objects. .. versionchanged:: 3.3 @@ -3982,7 +3987,7 @@ copying. dangling resources) as soon as possible. After this method has been called, any further operation on the view - raises a :class:`ValueError` (except :meth:`release()` itself which can + raises a :class:`ValueError` (except :meth:`release` itself which can be called multiple times):: >>> m = memoryview(b'abc') @@ -4565,7 +4570,7 @@ can be used interchangeably to index the same dictionary entry. Return a shallow copy of the dictionary. - .. classmethod:: fromkeys(iterable, value=None) + .. classmethod:: fromkeys(iterable, value=None, /) Create a new dictionary with keys from *iterable* and values set to *value*. @@ -5481,22 +5486,6 @@ types, where they are relevant. Some of these are not reported by the :func:`dir` built-in function. -.. attribute:: object.__dict__ - - A dictionary or other mapping object used to store an object's (writable) - attributes. - - -.. attribute:: instance.__class__ - - The class to which a class instance belongs. - - -.. attribute:: class.__bases__ - - The tuple of base classes of a class object. - - .. attribute:: definition.__name__ The name of the class, function, method, descriptor, or @@ -5511,43 +5500,24 @@ types, where they are relevant. Some of these are not reported by the .. versionadded:: 3.3 -.. attribute:: definition.__type_params__ - - The :ref:`type parameters ` of generic classes, functions, - and :ref:`type aliases `. - - .. versionadded:: 3.12 - - -.. attribute:: class.__mro__ +.. attribute:: definition.__module__ - This attribute is a tuple of classes that are considered when looking for - base classes during method resolution. + The name of the module in which a class or function was defined. -.. method:: class.mro() +.. attribute:: definition.__doc__ - This method can be overridden by a metaclass to customize the method - resolution order for its instances. It is called at class instantiation, and - its result is stored in :attr:`~class.__mro__`. + The documentation string of a class or function, or ``None`` if undefined. -.. method:: class.__subclasses__ - - Each class keeps a list of weak references to its immediate subclasses. This - method returns a list of all those references still alive. The list is in - definition order. Example:: - - >>> int.__subclasses__() - [, , , ] - +.. attribute:: definition.__type_params__ -.. attribute:: class.__static_attributes__ + The :ref:`type parameters ` of generic classes, functions, + and :ref:`type aliases `. For classes and functions that + are not generic, this will be an empty tuple. - A tuple containing names of attributes of this class which are accessed - through ``self.X`` from any function in its body. + .. versionadded:: 3.12 - .. versionadded:: 3.13 .. _int_max_str_digits: diff --git a/Doc/library/string.rst b/Doc/library/string.rst index c3c0d732cf1..57a1f920523 100644 --- a/Doc/library/string.rst +++ b/Doc/library/string.rst @@ -418,7 +418,7 @@ instead. .. index:: single: _ (underscore); in string formatting The ``'_'`` option signals the use of an underscore for a thousands -separator for floating point presentation types and for integer +separator for floating-point presentation types and for integer presentation type ``'d'``. For integer presentation types ``'b'``, ``'o'``, ``'x'``, and ``'X'``, underscores will be inserted every 4 digits. For other presentation types, specifying this option is an @@ -491,9 +491,9 @@ The available integer presentation types are: +---------+----------------------------------------------------------+ In addition to the above presentation types, integers can be formatted -with the floating point presentation types listed below (except +with the floating-point presentation types listed below (except ``'n'`` and ``None``). When doing so, :func:`float` is used to convert the -integer to a floating point number before formatting. +integer to a floating-point number before formatting. The available presentation types for :class:`float` and :class:`~decimal.Decimal` values are: @@ -574,11 +574,13 @@ The available presentation types for :class:`float` and | ``'%'`` | Percentage. Multiplies the number by 100 and displays | | | in fixed (``'f'``) format, followed by a percent sign. | +---------+----------------------------------------------------------+ - | None | For :class:`float` this is the same as ``'g'``, except | + | None | For :class:`float` this is like the ``'g'`` type, except | | | that when fixed-point notation is used to format the | | | result, it always includes at least one digit past the | - | | decimal point. The precision used is as large as needed | - | | to represent the given value faithfully. | + | | decimal point, and switches to the scientific notation | + | | when ``exp >= p - 1``. When the precision is not | + | | specified, the latter will be as large as needed to | + | | represent the given value faithfully. | | | | | | For :class:`~decimal.Decimal`, this is the same as | | | either ``'g'`` or ``'G'`` depending on the value of | diff --git a/Doc/library/struct.rst b/Doc/library/struct.rst index a2c293443e2..4769affdf1d 100644 --- a/Doc/library/struct.rst +++ b/Doc/library/struct.rst @@ -279,9 +279,9 @@ Notes: (1) .. index:: single: ? (question mark); in struct format strings - The ``'?'`` conversion code corresponds to the :c:expr:`_Bool` type defined by - C99. If this type is not available, it is simulated using a :c:expr:`char`. In - standard mode, it is always represented by one byte. + The ``'?'`` conversion code corresponds to the :c:expr:`_Bool` type + defined by C standards since C99. In standard mode, it is + represented by one byte. (2) When attempting to pack a non-integer using any of the integer conversion diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index 3a2178e2b7b..59c98e867ff 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -25,7 +25,7 @@ modules and functions can be found in the following sections. :pep:`324` -- PEP proposing the subprocess module -.. include:: ../includes/wasm-ios-notavail.rst +.. include:: ../includes/wasm-mobile-notavail.rst Using the :mod:`subprocess` Module ---------------------------------- @@ -608,7 +608,7 @@ functions. If *group* is not ``None``, the setregid() system call will be made in the child process prior to the execution of the subprocess. If the provided - value is a string, it will be looked up via :func:`grp.getgrnam()` and + value is a string, it will be looked up via :func:`grp.getgrnam` and the value in ``gr_gid`` will be used. If the value is an integer, it will be passed verbatim. (POSIX only) @@ -618,7 +618,7 @@ functions. If *extra_groups* is not ``None``, the setgroups() system call will be made in the child process prior to the execution of the subprocess. Strings provided in *extra_groups* will be looked up via - :func:`grp.getgrnam()` and the values in ``gr_gid`` will be used. + :func:`grp.getgrnam` and the values in ``gr_gid`` will be used. Integer values will be passed verbatim. (POSIX only) .. availability:: POSIX @@ -626,7 +626,7 @@ functions. If *user* is not ``None``, the setreuid() system call will be made in the child process prior to the execution of the subprocess. If the provided - value is a string, it will be looked up via :func:`pwd.getpwnam()` and + value is a string, it will be looked up via :func:`pwd.getpwnam` and the value in ``pw_uid`` will be used. If the value is an integer, it will be passed verbatim. (POSIX only) @@ -1126,7 +1126,7 @@ The :mod:`subprocess` module exposes the following constants. .. data:: NORMAL_PRIORITY_CLASS A :class:`Popen` ``creationflags`` parameter to specify that a new process - will have an normal priority. (default) + will have a normal priority. (default) .. versionadded:: 3.7 diff --git a/Doc/library/sys.monitoring.rst b/Doc/library/sys.monitoring.rst index 0fa06da5220..5f08deba332 100644 --- a/Doc/library/sys.monitoring.rst +++ b/Doc/library/sys.monitoring.rst @@ -258,7 +258,7 @@ Per code object events Events can also be controlled on a per code object basis. The functions defined below which accept a :class:`types.CodeType` should be prepared to accept a look-alike object from functions which are not defined -in Python (see :ref:`monitoring`). +in Python (see :ref:`c-api-monitoring`). .. function:: get_local_events(tool_id: int, code: CodeType, /) -> int diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index ed809d04167..1e1f65851fd 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -735,11 +735,11 @@ always available. regardless of their size. This function is mainly useful for tracking and debugging memory leaks. Because of the interpreter's internal caches, the result can vary from call to call; you may have to call - :func:`_clear_internal_caches()` and :func:`gc.collect()` to get more + :func:`_clear_internal_caches` and :func:`gc.collect` to get more predictable results. If a Python build or implementation cannot reasonably compute this - information, :func:`getallocatedblocks()` is allowed to return 0 instead. + information, :func:`getallocatedblocks` is allowed to return 0 instead. .. versionadded:: 3.4 diff --git a/Doc/library/sysconfig.rst b/Doc/library/sysconfig.rst index 9556da808f8..3921908b7c7 100644 --- a/Doc/library/sysconfig.rst +++ b/Doc/library/sysconfig.rst @@ -305,7 +305,7 @@ Installation path functions mix with those by the other. End users should not use this function, but :func:`get_default_scheme` and - :func:`get_preferred_scheme()` instead. + :func:`get_preferred_scheme` instead. .. versionadded:: 3.10 @@ -376,7 +376,7 @@ Other functions This is used mainly to distinguish platform-specific build directories and platform-specific built distributions. Typically includes the OS name and - version and the architecture (as supplied by 'os.uname()'), although the + version and the architecture (as supplied by :func:`os.uname`), although the exact information included depends on the OS; e.g., on Linux, the kernel version isn't particularly important. diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst index 5b624f35331..08a2951bc42 100644 --- a/Doc/library/tarfile.rst +++ b/Doc/library/tarfile.rst @@ -617,7 +617,7 @@ be finalized; only the internally used file object will be closed. See the it is best practice to only do so in top-level applications or :mod:`site configuration `. To set a global default this way, a filter function needs to be wrapped in - :func:`staticmethod()` to prevent injection of a ``self`` argument. + :func:`staticmethod` to prevent injection of a ``self`` argument. .. method:: TarFile.add(name, arcname=None, recursive=True, *, filter=None) diff --git a/Doc/library/test.rst b/Doc/library/test.rst index 2a61f0aaef2..04d28aee0f8 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -946,7 +946,7 @@ The :mod:`test.support` module defines the following functions: other modules, possibly a C backend (like ``csv`` and its ``_csv``). The *extra* argument can be a set of names that wouldn't otherwise be automatically - detected as "public", like objects without a proper ``__module__`` + detected as "public", like objects without a proper :attr:`~definition.__module__` attribute. If provided, it will be added to the automatically detected ones. The *not_exported* argument can be a set of names that must not be treated @@ -1701,7 +1701,7 @@ The :mod:`test.support.warnings_helper` module provides support for warnings tes .. function:: check_warnings(*filters, quiet=True) - A convenience wrapper for :func:`warnings.catch_warnings()` that makes it + A convenience wrapper for :func:`warnings.catch_warnings` that makes it easier to test that a warning was correctly raised. It is approximately equivalent to calling ``warnings.catch_warnings(record=True)`` with :meth:`warnings.simplefilter` set to ``always`` and with the option to diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst index 7b259e22dc7..cb82fea3776 100644 --- a/Doc/library/threading.rst +++ b/Doc/library/threading.rst @@ -412,7 +412,7 @@ since it is impossible to detect the termination of alien threads. timeout occurs. When the *timeout* argument is present and not ``None``, it should be a - floating point number specifying a timeout for the operation in seconds + floating-point number specifying a timeout for the operation in seconds (or fractions thereof). As :meth:`~Thread.join` always returns ``None``, you must call :meth:`~Thread.is_alive` after :meth:`~Thread.join` to decide whether a timeout happened -- if the thread is still alive, the @@ -794,7 +794,7 @@ item to the buffer only needs to wake up one consumer thread. occurs. Once awakened or timed out, it re-acquires the lock and returns. When the *timeout* argument is present and not ``None``, it should be a - floating point number specifying a timeout for the operation in seconds + floating-point number specifying a timeout for the operation in seconds (or fractions thereof). When the underlying lock is an :class:`RLock`, it is not released using @@ -1018,10 +1018,10 @@ method. The :meth:`~Event.wait` method blocks until the flag is true. has not expired. The return value represents the reason that this blocking method returned; ``True`` if returning because the internal flag is set to true, or ``False`` if a timeout is given and - the the internal flag did not become true within the given wait time. + the internal flag did not become true within the given wait time. When the timeout argument is present and not ``None``, it should be a - floating point number specifying a timeout for the operation in seconds, + floating-point number specifying a timeout for the operation in seconds, or fractions thereof. .. versionchanged:: 3.1 diff --git a/Doc/library/time.rst b/Doc/library/time.rst index 4d7661715aa..a0bf13fc0a3 100644 --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -69,7 +69,7 @@ An explanation of some terminology and conventions is in order. systems, the clock "ticks" only 50 or 100 times a second. * On the other hand, the precision of :func:`.time` and :func:`sleep` is better - than their Unix equivalents: times are expressed as floating point numbers, + than their Unix equivalents: times are expressed as floating-point numbers, :func:`.time` returns the most accurate time available (using Unix :c:func:`!gettimeofday` where available), and :func:`sleep` will accept a time with a nonzero fraction (Unix :c:func:`!select` is used to implement this, where @@ -193,7 +193,7 @@ Functions Use :func:`clock_settime_ns` to avoid the precision loss caused by the :class:`float` type. - .. availability:: Unix. + .. availability:: Unix, not Android, not iOS. .. versionadded:: 3.3 @@ -202,7 +202,7 @@ Functions Similar to :func:`clock_settime` but set time with nanoseconds. - .. availability:: Unix. + .. availability:: Unix, not Android, not iOS. .. versionadded:: 3.7 @@ -273,7 +273,7 @@ Functions This is the inverse function of :func:`localtime`. Its argument is the :class:`struct_time` or full 9-tuple (since the dst flag is needed; use ``-1`` as the dst flag if it is unknown) which expresses the time in *local* time, not - UTC. It returns a floating point number, for compatibility with :func:`.time`. + UTC. It returns a floating-point number, for compatibility with :func:`.time`. If the input value cannot be represented as a valid time, either :exc:`OverflowError` or :exc:`ValueError` will be raised (which depends on whether the invalid value is caught by Python or the underlying C libraries). @@ -327,7 +327,7 @@ Functions .. impl-detail:: - On CPython, use the same clock than :func:`time.monotonic()` and is a + On CPython, use the same clock than :func:`time.monotonic` and is a monotonic clock, i.e. a clock that cannot go backwards. Use :func:`perf_counter_ns` to avoid the precision loss caused by the @@ -339,7 +339,7 @@ Functions On Windows, the function is now system-wide. .. versionchanged:: 3.13 - Use the same clock than :func:`time.monotonic()`. + Use the same clock than :func:`time.monotonic`. .. function:: perf_counter_ns() -> int @@ -376,7 +376,7 @@ Functions .. function:: sleep(secs) Suspend execution of the calling thread for the given number of seconds. - The argument may be a floating point number to indicate a more precise sleep + The argument may be a floating-point number to indicate a more precise sleep time. If the sleep is interrupted by a signal and no exception is raised by the @@ -665,13 +665,13 @@ Functions .. function:: time() -> float - Return the time in seconds since the epoch_ as a floating point + Return the time in seconds since the epoch_ as a floating-point number. The handling of `leap seconds`_ is platform dependent. On Windows and most Unix systems, the leap seconds are not counted towards the time in seconds since the epoch_. This is commonly referred to as `Unix time `_. - Note that even though the time is always returned as a floating point + Note that even though the time is always returned as a floating-point number, not all systems provide time with a better precision than 1 second. While this function normally returns non-decreasing values, it can return a lower value than a previous call if the system clock has been set back diff --git a/Doc/library/tkinter.rst b/Doc/library/tkinter.rst index f40790c1175..f284988daf2 100644 --- a/Doc/library/tkinter.rst +++ b/Doc/library/tkinter.rst @@ -58,7 +58,7 @@ details that are unchanged. * `Modern Tkinter for Busy Python Developers `_ By Mark Roseman. (ISBN 978-1999149567) - * `Python GUI programming with Tkinter `_ + * `Python GUI programming with Tkinter `_ By Alan D. Moore. (ISBN 978-1788835886) * `Programming Python `_ @@ -983,10 +983,10 @@ option (other options are available as well). Added the :class:`!PhotoImage` method :meth:`!copy_replace` to copy a region from one image to other image, possibly with pixel zooming and/or subsampling. - Add *from_coords* parameter to :class:`!PhotoImage` methods :meth:`!copy()`, - :meth:`!zoom()` and :meth:`!subsample()`. + Add *from_coords* parameter to :class:`!PhotoImage` methods :meth:`!copy`, + :meth:`!zoom` and :meth:`!subsample`. Add *zoom* and *subsample* parameters to :class:`!PhotoImage` method - :meth:`!copy()`. + :meth:`!copy`. The image object can then be used wherever an ``image`` option is supported by some widget (e.g. labels, buttons, menus). In these cases, Tk will not keep a diff --git a/Doc/library/token.rst b/Doc/library/token.rst index 919ff590b72..0cc9dddd91e 100644 --- a/Doc/library/token.rst +++ b/Doc/library/token.rst @@ -75,7 +75,7 @@ the :mod:`tokenize` module. :noindex: Token value indicating that a type comment was recognized. Such - tokens are only produced when :func:`ast.parse()` is invoked with + tokens are only produced when :func:`ast.parse` is invoked with ``type_comments=True``. diff --git a/Doc/library/tomllib.rst b/Doc/library/tomllib.rst index b523ad93b35..521a7a17fb3 100644 --- a/Doc/library/tomllib.rst +++ b/Doc/library/tomllib.rst @@ -13,7 +13,7 @@ -------------- -This module provides an interface for parsing TOML (Tom's Obvious Minimal +This module provides an interface for parsing TOML 1.0.0 (Tom's Obvious Minimal Language, `https://toml.io `_). This module does not support writing TOML. diff --git a/Doc/library/traceback.rst b/Doc/library/traceback.rst index bfd2c3efc4b..401e12be45f 100644 --- a/Doc/library/traceback.rst +++ b/Doc/library/traceback.rst @@ -42,6 +42,14 @@ The module defines the following functions: :term:`file ` or :term:`file-like object` to receive the output. + .. note:: + + The meaning of the *limit* parameter is different than the meaning + of :const:`sys.tracebacklimit`. A negative *limit* value corresponds to + a positive value of :const:`!sys.tracebacklimit`, whereas the behaviour of + a positive *limit* value cannot be achieved with + :const:`!sys.tracebacklimit`. + .. versionchanged:: 3.5 Added negative *limit* support. diff --git a/Doc/library/types.rst b/Doc/library/types.rst index 116868c24be..3c3c760c206 100644 --- a/Doc/library/types.rst +++ b/Doc/library/types.rst @@ -91,8 +91,8 @@ Dynamic Type Creation For classes that have an ``__orig_bases__`` attribute, this function returns the value of ``cls.__orig_bases__``. - For classes without the ``__orig_bases__`` attribute, ``cls.__bases__`` is - returned. + For classes without the ``__orig_bases__`` attribute, + :attr:`cls.__bases__ ` is returned. Examples:: @@ -392,7 +392,7 @@ Standard names are defined for the following types: In addition, when a class is defined with a :attr:`~object.__slots__` attribute, then for each slot, an instance of :class:`!MemberDescriptorType` will be added as an attribute - on the class. This allows the slot to appear in the class's :attr:`~object.__dict__`. + on the class. This allows the slot to appear in the class's :attr:`~type.__dict__`. .. impl-detail:: diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index 53fe6ddf9b2..40df38c5503 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -208,7 +208,7 @@ Annotating callable objects =========================== Functions -- or other :term:`callable` objects -- can be annotated using -:class:`collections.abc.Callable` or :data:`typing.Callable`. +:class:`collections.abc.Callable` or deprecated :data:`typing.Callable`. ``Callable[[int], str]`` signifies a function that takes a single parameter of type :class:`int` and returns a :class:`str`. @@ -401,7 +401,7 @@ The type of class objects ========================= A variable annotated with ``C`` may accept a value of type ``C``. In -contrast, a variable annotated with ``type[C]`` (or +contrast, a variable annotated with ``type[C]`` (or deprecated :class:`typing.Type[C] `) may accept values that are classes themselves -- specifically, it will accept the *class object* of ``C``. For example:: @@ -441,6 +441,87 @@ For example:: ``type[Any]`` is equivalent to :class:`type`, which is the root of Python's :ref:`metaclass hierarchy `. + +.. _annotating-generators-and-coroutines: + +Annotating generators and coroutines +==================================== + +A generator can be annotated using the generic type +:class:`Generator[YieldType, SendType, ReturnType] `. +For example:: + + def echo_round() -> Generator[int, float, str]: + sent = yield 0 + while sent >= 0: + sent = yield round(sent) + return 'Done' + +Note that unlike many other generic classes in the standard library, +the ``SendType`` of :class:`~collections.abc.Generator` behaves +contravariantly, not covariantly or invariantly. + +The ``SendType`` and ``ReturnType`` parameters default to :const:`!None`:: + + def infinite_stream(start: int) -> Generator[int]: + while True: + yield start + start += 1 + +It is also possible to set these types explicitly:: + + def infinite_stream(start: int) -> Generator[int, None, None]: + while True: + yield start + start += 1 + +Simple generators that only ever yield values can also be annotated +as having a return type of either +:class:`Iterable[YieldType] ` +or :class:`Iterator[YieldType] `:: + + def infinite_stream(start: int) -> Iterator[int]: + while True: + yield start + start += 1 + +Async generators are handled in a similar fashion, but don't +expect a ``ReturnType`` type argument +(:class:`AsyncGenerator[YieldType, SendType] `). +The ``SendType`` argument defaults to :const:`!None`, so the following definitions +are equivalent:: + + async def infinite_stream(start: int) -> AsyncGenerator[int]: + while True: + yield start + start = await increment(start) + + async def infinite_stream(start: int) -> AsyncGenerator[int, None]: + while True: + yield start + start = await increment(start) + +As in the synchronous case, +:class:`AsyncIterable[YieldType] ` +and :class:`AsyncIterator[YieldType] ` are +available as well:: + + async def infinite_stream(start: int) -> AsyncIterator[int]: + while True: + yield start + start = await increment(start) + +Coroutines can be annotated using +:class:`Coroutine[YieldType, SendType, ReturnType] `. +Generic arguments correspond to those of :class:`~collections.abc.Generator`, +for example:: + + from collections.abc import Coroutine + c: Coroutine[list[str], str, int] # Some coroutine defined elsewhere + x = c.send('hi') # Inferred type of 'x' is list[str] + async def bar() -> None: + y = await c # Inferred type of 'y' is int + .. _user-defined-generics: User-defined generic types @@ -1235,7 +1316,7 @@ These can be used as types in annotations. They all support subscription using year: int def mutate_movie(m: Movie) -> None: - m["year"] = 1992 # allowed + m["year"] = 1999 # allowed m["title"] = "The Matrix" # typechecker error There is no runtime checking for this property. @@ -1377,6 +1458,23 @@ These can be used as types in annotations. They all support subscription using >>> X.__metadata__ ('very', 'important', 'metadata') + * At runtime, if you want to retrieve the original + type wrapped by ``Annotated``, use the :attr:`!__origin__` attribute: + + .. doctest:: + + >>> from typing import Annotated, get_origin + >>> Password = Annotated[str, "secret"] + >>> Password.__origin__ + + + Note that using :func:`get_origin` will return ``Annotated`` itself: + + .. doctest:: + + >>> get_origin(Password) + typing.Annotated + .. seealso:: :pep:`593` - Flexible function and variable annotations @@ -2175,7 +2273,9 @@ types. Backward-compatible usage:: - # For creating a generic NamedTuple on Python 3.11 or lower + # For creating a generic NamedTuple on Python 3.11 + T = TypeVar("T") + class Group(NamedTuple, Generic[T]): key: T group: list[T] @@ -2709,7 +2809,7 @@ Functions and decorators .. seealso:: `Unreachable Code and Exhaustiveness Checking - `__ has more + `__ has more information about exhaustiveness checking with static typing. .. versionadded:: 3.11 @@ -3095,7 +3195,8 @@ Introspection helpers empty dictionary is returned. * If *obj* is a class ``C``, the function returns a dictionary that merges annotations from ``C``'s base classes with those on ``C`` directly. This - is done by traversing ``C.__mro__`` and iteratively combining + is done by traversing :attr:`C.__mro__ ` and iteratively + combining ``__annotations__`` dictionaries. Annotations on classes appearing earlier in the :term:`method resolution order` always take precedence over annotations on classes appearing later in the method resolution order. @@ -3141,6 +3242,7 @@ Introspection helpers assert get_origin(str) is None assert get_origin(Dict[str, int]) is dict assert get_origin(Union[int, str]) is Union + assert get_origin(Annotated[str, "metadata"]) is Annotated P = ParamSpec('P') assert get_origin(P.args) is P assert get_origin(P.kwargs) is P @@ -3318,14 +3420,9 @@ Aliases to built-in types Deprecated alias to :class:`dict`. Note that to annotate arguments, it is preferred - to use an abstract collection type such as :class:`Mapping` + to use an abstract collection type such as :class:`~collections.abc.Mapping` rather than to use :class:`dict` or :class:`!typing.Dict`. - This type can be used as follows:: - - def count_words(text: str) -> Dict[str, int]: - ... - .. deprecated:: 3.9 :class:`builtins.dict ` now supports subscripting (``[]``). See :pep:`585` and :ref:`types-genericalias`. @@ -3335,16 +3432,9 @@ Aliases to built-in types Deprecated alias to :class:`list`. Note that to annotate arguments, it is preferred - to use an abstract collection type such as :class:`Sequence` or - :class:`Iterable` rather than to use :class:`list` or :class:`!typing.List`. - - This type may be used as follows:: - - def vec2[T: (int, float)](x: T, y: T) -> List[T]: - return [x, y] - - def keep_positives[T: (int, float)](vector: Sequence[T]) -> List[T]: - return [item for item in vector if item > 0] + to use an abstract collection type such as + :class:`~collections.abc.Sequence` or :class:`~collections.abc.Iterable` + rather than to use :class:`list` or :class:`!typing.List`. .. deprecated:: 3.9 :class:`builtins.list ` now supports subscripting (``[]``). @@ -3355,8 +3445,8 @@ Aliases to built-in types Deprecated alias to :class:`builtins.set `. Note that to annotate arguments, it is preferred - to use an abstract collection type such as :class:`AbstractSet` - rather than to use :class:`set` or :class:`!typing.Set`. + to use an abstract collection type such as :class:`collections.abc.Set` + rather than to use :class:`set` or :class:`typing.Set`. .. deprecated:: 3.9 :class:`builtins.set ` now supports subscripting (``[]``). @@ -3552,11 +3642,6 @@ Aliases to container ABCs in :mod:`collections.abc` Deprecated alias to :class:`collections.abc.Mapping`. - This type can be used as follows:: - - def get_position_in_index(word_list: Mapping[str, int], word: str) -> int: - return word_list[word] - .. deprecated:: 3.9 :class:`collections.abc.Mapping` now supports subscripting (``[]``). See :pep:`585` and :ref:`types-genericalias`. @@ -3620,14 +3705,9 @@ Aliases to asynchronous ABCs in :mod:`collections.abc` Deprecated alias to :class:`collections.abc.Coroutine`. - The variance and order of type variables - correspond to those of :class:`Generator`, for example:: - - from collections.abc import Coroutine - c: Coroutine[list[str], str, int] # Some coroutine defined elsewhere - x = c.send('hi') # Inferred type of 'x' is list[str] - async def bar() -> None: - y = await c # Inferred type of 'y' is int + See :ref:`annotating-generators-and-coroutines` + for details on using :class:`collections.abc.Coroutine` + and ``typing.Coroutine`` in type annotations. .. versionadded:: 3.5.3 @@ -3639,40 +3719,9 @@ Aliases to asynchronous ABCs in :mod:`collections.abc` Deprecated alias to :class:`collections.abc.AsyncGenerator`. - An async generator can be annotated by the generic type - ``AsyncGenerator[YieldType, SendType]``. For example:: - - async def echo_round() -> AsyncGenerator[int, float]: - sent = yield 0 - while sent >= 0.0: - rounded = await round(sent) - sent = yield rounded - - Unlike normal generators, async generators cannot return a value, so there - is no ``ReturnType`` type parameter. As with :class:`Generator`, the - ``SendType`` behaves contravariantly. - - The ``SendType`` defaults to :const:`!None`:: - - async def infinite_stream(start: int) -> AsyncGenerator[int]: - while True: - yield start - start = await increment(start) - - It is also possible to set this type explicitly:: - - async def infinite_stream(start: int) -> AsyncGenerator[int, None]: - while True: - yield start - start = await increment(start) - - Alternatively, annotate your generator as having a return type of - either ``AsyncIterable[YieldType]`` or ``AsyncIterator[YieldType]``:: - - async def infinite_stream(start: int) -> AsyncIterator[int]: - while True: - yield start - start = await increment(start) + See :ref:`annotating-generators-and-coroutines` + for details on using :class:`collections.abc.AsyncGenerator` + and ``typing.AsyncGenerator`` in type annotations. .. versionadded:: 3.6.1 @@ -3754,40 +3803,9 @@ Aliases to other ABCs in :mod:`collections.abc` Deprecated alias to :class:`collections.abc.Generator`. - A generator can be annotated by the generic type - ``Generator[YieldType, SendType, ReturnType]``. For example:: - - def echo_round() -> Generator[int, float, str]: - sent = yield 0 - while sent >= 0: - sent = yield round(sent) - return 'Done' - - Note that unlike many other generics in the typing module, the ``SendType`` - of :class:`Generator` behaves contravariantly, not covariantly or - invariantly. - - The ``SendType`` and ``ReturnType`` parameters default to :const:`!None`:: - - def infinite_stream(start: int) -> Generator[int]: - while True: - yield start - start += 1 - - It is also possible to set these types explicitly:: - - def infinite_stream(start: int) -> Generator[int, None, None]: - while True: - yield start - start += 1 - - Alternatively, annotate your generator as having a return type of - either ``Iterable[YieldType]`` or ``Iterator[YieldType]``:: - - def infinite_stream(start: int) -> Iterator[int]: - while True: - yield start - start += 1 + See :ref:`annotating-generators-and-coroutines` + for details on using :class:`collections.abc.Generator` + and ``typing.Generator`` in type annotations. .. deprecated:: 3.9 :class:`collections.abc.Generator` now supports subscripting (``[]``). diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst index d8ba24c3146..55ebf1acdb7 100644 --- a/Doc/library/unittest.mock.rst +++ b/Doc/library/unittest.mock.rst @@ -239,7 +239,7 @@ the *new_callable* argument to :func:`patch`. Accessing any attribute not in this list will raise an :exc:`AttributeError`. If *spec* is an object (rather than a list of strings) then - :attr:`~instance.__class__` returns the class of the spec object. This + :attr:`~object.__class__` returns the class of the spec object. This allows mocks to pass :func:`isinstance` tests. * *spec_set*: A stricter variant of *spec*. If used, attempting to *set* @@ -860,6 +860,20 @@ object:: 3 >>> p.assert_called_once_with() +.. caution:: + + If an :exc:`AttributeError` is raised by :class:`PropertyMock`, + it will be interpreted as a missing descriptor and + :meth:`~object.__getattr__` will be called on the parent mock:: + + >>> m = MagicMock() + >>> no_attribute = PropertyMock(side_effect=AttributeError) + >>> type(m).my_property = no_attribute + >>> m.my_property + + + See :meth:`~object.__getattr__` for details. + .. class:: AsyncMock(spec=None, side_effect=None, return_value=DEFAULT, wraps=None, name=None, spec_set=None, unsafe=False, **kwargs) diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst index eb42210e096..c49aba69b12 100644 --- a/Doc/library/unittest.rst +++ b/Doc/library/unittest.rst @@ -2316,8 +2316,8 @@ Loading and running tests (see :ref:`Warning control `), otherwise it will be set to ``'default'``. - Calling ``main`` actually returns an instance of the ``TestProgram`` class. - This stores the result of the tests run as the ``result`` attribute. + Calling ``main`` returns an object with the ``result`` attribute that contains + the result of the tests run as a :class:`unittest.TestResult`. .. versionchanged:: 3.1 The *exit* parameter was added. @@ -2529,7 +2529,7 @@ Signal Handling .. versionadded:: 3.2 The :option:`-c/--catch ` command-line option to unittest, -along with the ``catchbreak`` parameter to :func:`unittest.main()`, provide +along with the ``catchbreak`` parameter to :func:`unittest.main`, provide more friendly handling of control-C during a test run. With catch break behavior enabled control-C will allow the currently running test to complete, and the test run will then end and report all the results so far. A second diff --git a/Doc/library/urllib.parse.rst b/Doc/library/urllib.parse.rst index 27909b763e9..fb5353e1895 100644 --- a/Doc/library/urllib.parse.rst +++ b/Doc/library/urllib.parse.rst @@ -22,11 +22,19 @@ to an absolute URL given a "base URL." The module has been designed to match the internet RFC on Relative Uniform Resource Locators. It supports the following URL schemes: ``file``, ``ftp``, -``gopher``, ``hdl``, ``http``, ``https``, ``imap``, ``mailto``, ``mms``, +``gopher``, ``hdl``, ``http``, ``https``, ``imap``, ``itms-services``, ``mailto``, ``mms``, ``news``, ``nntp``, ``prospero``, ``rsync``, ``rtsp``, ``rtsps``, ``rtspu``, ``sftp``, ``shttp``, ``sip``, ``sips``, ``snews``, ``svn``, ``svn+ssh``, ``telnet``, ``wais``, ``ws``, ``wss``. +.. impl-detail:: + + The inclusion of the ``itms-services`` URL scheme can prevent an app from + passing Apple's App Store review process for the macOS and iOS App Stores. + Handling for the ``itms-services`` scheme is always removed on iOS; on + macOS, it *may* be removed if CPython has been built with the + :option:`--with-app-store-compliance` option. + The :mod:`urllib.parse` module defines functions that fall into two broad categories: URL parsing and URL quoting. These are covered in detail in the following sections. diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst index 754405e0fbe..ce82552a3ae 100644 --- a/Doc/library/urllib.request.rst +++ b/Doc/library/urllib.request.rst @@ -241,7 +241,7 @@ The following classes are provided: *method* should be a string that indicates the HTTP request method that will be used (e.g. ``'HEAD'``). If provided, its value is stored in the - :attr:`~Request.method` attribute and is used by :meth:`get_method()`. + :attr:`~Request.method` attribute and is used by :meth:`get_method`. The default is ``'GET'`` if *data* is ``None`` or ``'POST'`` otherwise. Subclasses may indicate a different default method by setting the :attr:`~Request.method` attribute in the class itself. @@ -1092,7 +1092,7 @@ FileHandler Objects .. versionchanged:: 3.2 This method is applicable only for local hostnames. When a remote - hostname is given, an :exc:`~urllib.error.URLError` is raised. + hostname is given, a :exc:`~urllib.error.URLError` is raised. .. _data-handler-objects: @@ -1107,7 +1107,7 @@ DataHandler Objects ignores white spaces in base64 encoded data URLs so the URL may be wrapped in whatever source file it comes from. But even though some browsers don't mind about a missing padding at the end of a base64 encoded data URL, this - implementation will raise an :exc:`ValueError` in that case. + implementation will raise a :exc:`ValueError` in that case. .. _ftp-handler-objects: diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst index fff1075c247..e2c77963ff3 100644 --- a/Doc/library/venv.rst +++ b/Doc/library/venv.rst @@ -37,14 +37,14 @@ A virtual environment is (amongst other things): are by default isolated from software in other virtual environments and Python interpreters and libraries installed in the operating system. -* Contained in a directory, conventionally either named ``venv`` or ``.venv`` in +* Contained in a directory, conventionally named ``.venv`` or ``venv`` in the project directory, or under a container directory for lots of virtual environments, such as ``~/.virtualenvs``. * Not checked into source control systems such as Git. * Considered as disposable -- it should be simple to delete and recreate it from - scratch. You don't place any project code in the environment + scratch. You don't place any project code in the environment. * Not considered as movable or copyable -- you just recreate the same environment in the target location. @@ -56,12 +56,132 @@ See :pep:`405` for more background on Python virtual environments. `Python Packaging User Guide: Creating and using virtual environments `__ -.. include:: ../includes/wasm-ios-notavail.rst +.. include:: ../includes/wasm-mobile-notavail.rst Creating virtual environments ----------------------------- -.. include:: /using/venv-create.inc +:ref:`Virtual environments ` are created by executing the ``venv`` +module: + +.. code-block:: shell + + python -m venv /path/to/new/virtual/environment + +This creates the target directory (including parent directories as needed) +and places a :file:`pyvenv.cfg` file in it with a ``home`` key +pointing to the Python installation from which the command was run. +It also creates a :file:`bin` (or :file:`Scripts` on Windows) subdirectory +containing a copy or symlink of the Python executable +(as appropriate for the platform or arguments used at environment creation time). +It also creates a :file:`lib/pythonX.Y/site-packages` subdirectory +(on Windows, this is :file:`Lib\site-packages`). +If an existing directory is specified, it will be re-used. + +.. versionchanged:: 3.5 + The use of ``venv`` is now recommended for creating virtual environments. + +.. deprecated-removed:: 3.6 3.8 + :program:`pyvenv` was the recommended tool for creating virtual environments + for Python 3.3 and 3.4, and replaced in 3.5 by executing ``venv`` directly. + +.. highlight:: none + +On Windows, invoke the ``venv`` command as follows: + +.. code-block:: ps1con + + PS> python -m venv C:\path\to\new\virtual\environment + +The command, if run with ``-h``, will show the available options:: + + usage: venv [-h] [--system-site-packages] [--symlinks | --copies] [--clear] + [--upgrade] [--without-pip] [--prompt PROMPT] [--upgrade-deps] + [--without-scm-ignore-files] + ENV_DIR [ENV_DIR ...] + + Creates virtual Python environments in one or more target directories. + + positional arguments: + ENV_DIR A directory to create the environment in. + + options: + -h, --help show this help message and exit + --system-site-packages + Give the virtual environment access to the system + site-packages dir. + --symlinks Try to use symlinks rather than copies, when + symlinks are not the default for the platform. + --copies Try to use copies rather than symlinks, even when + symlinks are the default for the platform. + --clear Delete the contents of the environment directory + if it already exists, before environment creation. + --upgrade Upgrade the environment directory to use this + version of Python, assuming Python has been + upgraded in-place. + --without-pip Skips installing or upgrading pip in the virtual + environment (pip is bootstrapped by default) + --prompt PROMPT Provides an alternative prompt prefix for this + environment. + --upgrade-deps Upgrade core dependencies (pip) to the latest + version in PyPI + --without-scm-ignore-files + Skips adding SCM ignore files to the environment + directory (Git is supported by default). + + Once an environment has been created, you may wish to activate it, e.g. by + sourcing an activate script in its bin directory. + + +.. versionchanged:: 3.4 + Installs pip by default, added the ``--without-pip`` and ``--copies`` + options. + +.. versionchanged:: 3.4 + In earlier versions, if the target directory already existed, an error was + raised, unless the ``--clear`` or ``--upgrade`` option was provided. + +.. versionchanged:: 3.9 + Add ``--upgrade-deps`` option to upgrade pip + setuptools to the latest on PyPI. + +.. versionchanged:: 3.12 + + ``setuptools`` is no longer a core venv dependency. + +.. versionchanged:: 3.13 + + Added the ``--without-scm-ignore-files`` option. +.. versionchanged:: 3.13 + ``venv`` now creates a :file:`.gitignore` file for Git by default. + +.. note:: + While symlinks are supported on Windows, they are not recommended. Of + particular note is that double-clicking ``python.exe`` in File Explorer + will resolve the symlink eagerly and ignore the virtual environment. + +.. note:: + On Microsoft Windows, it may be required to enable the ``Activate.ps1`` + script by setting the execution policy for the user. You can do this by + issuing the following PowerShell command: + + .. code-block:: powershell + + PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + + See `About Execution Policies + `_ + for more information. + +The created :file:`pyvenv.cfg` file also includes the +``include-system-site-packages`` key, set to ``true`` if ``venv`` is +run with the ``--system-site-packages`` option, ``false`` otherwise. + +Unless the ``--without-pip`` option is given, :mod:`ensurepip` will be +invoked to bootstrap ``pip`` into the virtual environment. + +Multiple paths can be given to ``venv``, in which case an identical virtual +environment will be created, according to the given options, at each provided +path. .. _venv-explanation: @@ -117,7 +237,7 @@ should be runnable without activating it. In order to achieve this, scripts installed into virtual environments have a "shebang" line which points to the environment's Python interpreter, -i.e. :samp:`#!/{}/bin/python`. +:samp:`#!/{}/bin/python`. This means that the script will run with that interpreter regardless of the value of :envvar:`PATH`. On Windows, "shebang" line processing is supported if you have the :ref:`launcher` installed. Thus, double-clicking an installed @@ -168,31 +288,31 @@ creation according to their needs, the :class:`EnvBuilder` class. The :class:`EnvBuilder` class accepts the following keyword arguments on instantiation: - * ``system_site_packages`` -- a Boolean value indicating that the system Python + * *system_site_packages* -- a boolean value indicating that the system Python site-packages should be available to the environment (defaults to ``False``). - * ``clear`` -- a Boolean value which, if true, will delete the contents of + * *clear* -- a boolean value which, if true, will delete the contents of any existing target directory, before creating the environment. - * ``symlinks`` -- a Boolean value indicating whether to attempt to symlink the + * *symlinks* -- a boolean value indicating whether to attempt to symlink the Python binary rather than copying. - * ``upgrade`` -- a Boolean value which, if true, will upgrade an existing + * *upgrade* -- a boolean value which, if true, will upgrade an existing environment with the running Python - for use when that Python has been upgraded in-place (defaults to ``False``). - * ``with_pip`` -- a Boolean value which, if true, ensures pip is + * *with_pip* -- a boolean value which, if true, ensures pip is installed in the virtual environment. This uses :mod:`ensurepip` with the ``--default-pip`` option. - * ``prompt`` -- a String to be used after virtual environment is activated + * *prompt* -- a string to be used after virtual environment is activated (defaults to ``None`` which means directory name of the environment would be used). If the special string ``"."`` is provided, the basename of the current directory is used as the prompt. - * ``upgrade_deps`` -- Update the base venv modules to the latest on PyPI + * *upgrade_deps* -- Update the base venv modules to the latest on PyPI - * ``scm_ignore_files`` -- Create ignore files based for the specified source + * *scm_ignore_files* -- Create ignore files based for the specified source control managers (SCM) in the iterable. Support is defined by having a method named ``create_{scm}_ignore_file``. The only value supported by default is ``"git"`` via :meth:`create_git_ignore_file`. @@ -210,10 +330,7 @@ creation according to their needs, the :class:`EnvBuilder` class. .. versionchanged:: 3.13 Added the ``scm_ignore_files`` parameter - Creators of third-party virtual environment tools will be free to use the - provided :class:`EnvBuilder` class as a base class. - - The returned env-builder is an object which has a method, ``create``: + :class:`EnvBuilder` may be used as a base class. .. method:: create(env_dir) @@ -313,14 +430,14 @@ creation according to their needs, the :class:`EnvBuilder` class. .. method:: upgrade_dependencies(context) - Upgrades the core venv dependency packages (currently ``pip``) + Upgrades the core venv dependency packages (currently :pypi:`pip`) in the environment. This is done by shelling out to the ``pip`` executable in the environment. .. versionadded:: 3.9 .. versionchanged:: 3.12 - ``setuptools`` is no longer a core venv dependency. + :pypi:`setuptools` is no longer a core venv dependency. .. method:: post_setup(context) @@ -328,25 +445,15 @@ creation according to their needs, the :class:`EnvBuilder` class. implementations to pre-install packages in the virtual environment or perform other post-creation steps. - .. versionchanged:: 3.7.2 - Windows now uses redirector scripts for ``python[w].exe`` instead of - copying the actual binaries. In 3.7.2 only :meth:`setup_python` does - nothing unless running from a build in the source tree. - - .. versionchanged:: 3.7.3 - Windows copies the redirector scripts as part of :meth:`setup_python` - instead of :meth:`setup_scripts`. This was not the case in 3.7.2. - When using symlinks, the original executables will be linked. - - In addition, :class:`EnvBuilder` provides this utility method that can be - called from :meth:`setup_scripts` or :meth:`post_setup` in subclasses to - assist in installing custom scripts into the virtual environment. - .. method:: install_scripts(context, path) + This method can be + called from :meth:`setup_scripts` or :meth:`post_setup` in subclasses to + assist in installing custom scripts into the virtual environment. + *path* is the path to a directory that should contain subdirectories - "common", "posix", "nt", each containing scripts destined for the bin - directory in the environment. The contents of "common" and the + ``common``, ``posix``, ``nt``; each containing scripts destined for the + ``bin`` directory in the environment. The contents of ``common`` and the directory corresponding to :data:`os.name` are copied after some text replacement of placeholders: @@ -371,10 +478,20 @@ creation according to their needs, the :class:`EnvBuilder` class. .. method:: create_git_ignore_file(context) Creates a ``.gitignore`` file within the virtual environment that causes - the entire directory to be ignored by the ``git`` source control manager. + the entire directory to be ignored by the Git source control manager. .. versionadded:: 3.13 + .. versionchanged:: 3.7.2 + Windows now uses redirector scripts for ``python[w].exe`` instead of + copying the actual binaries. In 3.7.2 only :meth:`setup_python` does + nothing unless running from a build in the source tree. + + .. versionchanged:: 3.7.3 + Windows copies the redirector scripts as part of :meth:`setup_python` + instead of :meth:`setup_scripts`. This was not the case in 3.7.2. + When using symlinks, the original executables will be linked. + There is also a module-level convenience function: .. function:: create(env_dir, system_site_packages=False, clear=False, \ @@ -387,16 +504,16 @@ There is also a module-level convenience function: .. versionadded:: 3.3 .. versionchanged:: 3.4 - Added the ``with_pip`` parameter + Added the *with_pip* parameter .. versionchanged:: 3.6 - Added the ``prompt`` parameter + Added the *prompt* parameter .. versionchanged:: 3.9 - Added the ``upgrade_deps`` parameter + Added the *upgrade_deps* parameter .. versionchanged:: 3.13 - Added the ``scm_ignore_files`` parameter + Added the *scm_ignore_files* parameter An example of extending ``EnvBuilder`` -------------------------------------- diff --git a/Doc/library/warnings.rst b/Doc/library/warnings.rst index c66e65abee4..83163cc6882 100644 --- a/Doc/library/warnings.rst +++ b/Doc/library/warnings.rst @@ -595,6 +595,9 @@ Available Context Managers passed to :func:`simplefilter` as if it were called immediately on entering the context. + See :ref:`warning-filter` for the meaning of the *category* and *lineno* + parameters. + .. note:: The :class:`catch_warnings` manager works by replacing and diff --git a/Doc/library/wave.rst b/Doc/library/wave.rst index 89664693cc8..36c2bde87fb 100644 --- a/Doc/library/wave.rst +++ b/Doc/library/wave.rst @@ -46,8 +46,8 @@ The :mod:`wave` module defines the following function and exception: the file object. The :func:`.open` function may be used in a :keyword:`with` statement. When - the :keyword:`!with` block completes, the :meth:`Wave_read.close()` or - :meth:`Wave_write.close()` method is called. + the :keyword:`!with` block completes, the :meth:`Wave_read.close` or + :meth:`Wave_write.close` method is called. .. versionchanged:: 3.4 Added support for unseekable files. diff --git a/Doc/library/weakref.rst b/Doc/library/weakref.rst index d6e062df945..2a25ed045c6 100644 --- a/Doc/library/weakref.rst +++ b/Doc/library/weakref.rst @@ -197,7 +197,7 @@ See :ref:`__slots__ documentation ` for details. >>> del k1 # d = {k2: 2} .. versionchanged:: 3.9 - Added support for ``|`` and ``|=`` operators, specified in :pep:`584`. + Added support for ``|`` and ``|=`` operators, as specified in :pep:`584`. :class:`WeakKeyDictionary` objects have an additional method that exposes the internal references directly. The references are not guaranteed to diff --git a/Doc/library/webbrowser.rst b/Doc/library/webbrowser.rst index 334f21f01c1..2d19c514ce4 100644 --- a/Doc/library/webbrowser.rst +++ b/Doc/library/webbrowser.rst @@ -51,7 +51,7 @@ The options are, naturally, mutually exclusive. Usage example:: python -m webbrowser -t "https://www.python.org" -.. include:: ../includes/wasm-notavail.rst +.. availability:: not WASI, not Android. The following exception is defined: @@ -72,6 +72,8 @@ The following functions are defined: (note that under many window managers this will occur regardless of the setting of this variable). + Returns ``True`` if a browser was successfully launched, ``False`` otherwise. + Note that on some platforms, trying to open a filename using this function, may work and start the operating system's associated program. However, this is neither supported nor portable. @@ -84,11 +86,16 @@ The following functions are defined: Open *url* in a new window of the default browser, if possible, otherwise, open *url* in the only browser window. + Returns ``True`` if a browser was successfully launched, ``False`` otherwise. + + .. function:: open_new_tab(url) Open *url* in a new page ("tab") of the default browser, if possible, otherwise equivalent to :func:`open_new`. + Returns ``True`` if a browser was successfully launched, ``False`` otherwise. + .. function:: get(using=None) diff --git a/Doc/library/wsgiref.rst b/Doc/library/wsgiref.rst index e46730f1716..8d4c5eb6600 100644 --- a/Doc/library/wsgiref.rst +++ b/Doc/library/wsgiref.rst @@ -783,8 +783,8 @@ in :pep:`3333`. .. class:: StartResponse() - A :class:`typing.Protocol` describing `start_response() - `_ + A :class:`typing.Protocol` describing :pep:`start_response() + <3333#the-start-response-callable>` callables (:pep:`3333`). .. data:: WSGIEnvironment @@ -797,18 +797,18 @@ in :pep:`3333`. .. class:: InputStream() - A :class:`typing.Protocol` describing a `WSGI Input Stream - `_. + A :class:`typing.Protocol` describing a :pep:`WSGI Input Stream + <3333#input-and-error-streams>`. .. class:: ErrorStream() - A :class:`typing.Protocol` describing a `WSGI Error Stream - `_. + A :class:`typing.Protocol` describing a :pep:`WSGI Error Stream + <3333#input-and-error-streams>`. .. class:: FileWrapper() - A :class:`typing.Protocol` describing a `file wrapper - `_. + A :class:`typing.Protocol` describing a :pep:`file wrapper + <3333#optional-platform-specific-file-handling>`. See :class:`wsgiref.util.FileWrapper` for a concrete implementation of this protocol. diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst index 4c1e7bd7e67..1daf6628013 100644 --- a/Doc/library/xml.etree.elementtree.rst +++ b/Doc/library/xml.etree.elementtree.rst @@ -874,6 +874,7 @@ Element Objects .. module:: xml.etree.ElementTree :noindex: + :no-index: .. class:: Element(tag, attrib={}, **extra) @@ -970,7 +971,7 @@ Element Objects .. method:: extend(subelements) - Appends *subelements* from a sequence object with zero or more elements. + Appends *subelements* from an iterable of elements. Raises :exc:`TypeError` if a subelement is not an :class:`Element`. .. versionadded:: 3.2 @@ -1374,7 +1375,7 @@ XMLParser Objects .. versionchanged:: 3.8 Parameters are now :ref:`keyword-only `. - The *html* argument no longer supported. + The *html* argument is no longer supported. .. method:: close() diff --git a/Doc/library/xmlrpc.client.rst b/Doc/library/xmlrpc.client.rst index 614fb19d1f5..c57f433e6ef 100644 --- a/Doc/library/xmlrpc.client.rst +++ b/Doc/library/xmlrpc.client.rst @@ -165,7 +165,7 @@ between conformable Python objects and XML on the wire. A good description of XML-RPC operation and client software in several languages. Contains pretty much everything an XML-RPC client developer needs to know. - `XML-RPC Introspection `_ + `XML-RPC Introspection `_ Describes the XML-RPC protocol extension for introspection. `XML-RPC Specification `_ diff --git a/Doc/library/zipapp.rst b/Doc/library/zipapp.rst index cf561b454e9..cdaba07ab46 100644 --- a/Doc/library/zipapp.rst +++ b/Doc/library/zipapp.rst @@ -332,7 +332,7 @@ Formally, the Python zip application format is therefore: interpreter name, and then a newline (``b'\n'``) character. The interpreter name can be anything acceptable to the OS "shebang" processing, or the Python launcher on Windows. The interpreter should be encoded in UTF-8 on Windows, - and in :func:`sys.getfilesystemencoding()` on POSIX. + and in :func:`sys.getfilesystemencoding` on POSIX. 2. Standard zipfile data, as generated by the :mod:`zipfile` module. The zipfile content *must* include a file called ``__main__.py`` (which must be in the "root" of the zipfile - i.e., it cannot be in a subdirectory). The diff --git a/Doc/reference/compound_stmts.rst b/Doc/reference/compound_stmts.rst index 8181b975951..c7e43ef7739 100644 --- a/Doc/reference/compound_stmts.rst +++ b/Doc/reference/compound_stmts.rst @@ -841,7 +841,7 @@ A literal pattern corresponds to most : | "None" : | "True" : | "False" - : | `signed_number`: NUMBER | "-" NUMBER + signed_number: ["-"] NUMBER The rule ``strings`` and the token ``NUMBER`` are defined in the :doc:`standard Python grammar <./grammar>`. Triple-quoted strings are @@ -1217,9 +1217,10 @@ A function definition defines a user-defined function object (see section : | `parameter_list_no_posonly` parameter_list_no_posonly: `defparameter` ("," `defparameter`)* ["," [`parameter_list_starargs`]] : | `parameter_list_starargs` - parameter_list_starargs: "*" [`parameter`] ("," `defparameter`)* ["," ["**" `parameter` [","]]] + parameter_list_starargs: "*" [`star_parameter`] ("," `defparameter`)* ["," ["**" `parameter` [","]]] : | "**" `parameter` [","] parameter: `identifier` [":" `expression`] + star_parameter: `identifier` [":" ["*"] `expression`] defparameter: `parameter` ["=" `expression`] funcname: `identifier` @@ -1326,7 +1327,8 @@ and may only be passed by positional arguments. Parameters may have an :term:`annotation ` of the form "``: expression``" following the parameter name. Any parameter may have an annotation, even those of the form -``*identifier`` or ``**identifier``. Functions may have "return" annotation of +``*identifier`` or ``**identifier``. (As a special case, parameters of the form +``*identifier`` may have an annotation "``: *expression``".) Functions may have "return" annotation of the form "``-> expression``" after the parameter list. These annotations can be any valid Python expression. The presence of annotations does not change the semantics of a function. The annotation values are available as values of @@ -1337,6 +1339,10 @@ enables postponed evaluation. Otherwise, they are evaluated when the function definition is executed. In this case annotations may be evaluated in a different order than they appear in the source code. +.. versionchanged:: 3.11 + Parameters of the form "``*identifier``" may have an annotation + "``: *expression``". See :pep:`646`. + .. index:: pair: lambda; expression It is also possible to create anonymous functions (functions not bound to a @@ -1422,7 +1428,7 @@ dictionary. The class name is bound to this class object in the original local namespace. The order in which attributes are defined in the class body is preserved -in the new class's ``__dict__``. Note that this is reliable only right +in the new class's :attr:`~type.__dict__`. Note that this is reliable only right after the class is created and only for classes that were defined using the definition syntax. @@ -1453,8 +1459,8 @@ decorators. The result is then bound to the class name. A list of :ref:`type parameters ` may be given in square brackets immediately after the class's name. This indicates to static type checkers that the class is generic. At runtime, -the type parameters can be retrieved from the class's ``__type_params__`` -attribute. See :ref:`generic-classes` for more. +the type parameters can be retrieved from the class's +:attr:`~type.__type_params__` attribute. See :ref:`generic-classes` for more. .. versionchanged:: 3.12 Type parameter lists are new in Python 3.12. @@ -1667,8 +1673,8 @@ with more precision. The scope of type parameters is modeled with a special function (technically, an :ref:`annotation scope `) that wraps the creation of the generic object. -Generic functions, classes, and type aliases have a :attr:`!__type_params__` -attribute listing their type parameters. +Generic functions, classes, and type aliases have a +:attr:`~definition.__type_params__` attribute listing their type parameters. Type parameters come in three kinds: @@ -1892,5 +1898,5 @@ like ``TYPE_PARAMS_OF_ListOrSet`` are not actually bound at runtime. therefore the function's :term:`docstring`. .. [#] A string literal appearing as the first statement in the class body is - transformed into the namespace's ``__doc__`` item and therefore the class's - :term:`docstring`. + transformed into the namespace's :attr:`~type.__doc__` item and therefore + the class's :term:`docstring`. diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst index 527eac6f62c..57d9b286c6c 100644 --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -106,12 +106,16 @@ that mutable object is changed. Types affect almost all aspects of object behavior. Even the importance of object identity is affected in some sense: for immutable types, operations that compute new values may actually return a reference to any existing object with -the same type and value, while for mutable objects this is not allowed. E.g., -after ``a = 1; b = 1``, ``a`` and ``b`` may or may not refer to the same object -with the value one, depending on the implementation, but after ``c = []; d = -[]``, ``c`` and ``d`` are guaranteed to refer to two different, unique, newly -created empty lists. (Note that ``c = d = []`` assigns the same object to both -``c`` and ``d``.) +the same type and value, while for mutable objects this is not allowed. +For example, after ``a = 1; b = 1``, *a* and *b* may or may not refer to +the same object with the value one, depending on the implementation. +This is because :class:`int` is an immutable type, so the reference to ``1`` +can be reused. This behaviour depends on the implementation used, so should +not be relied upon, but is something to be aware of when making use of object +identity tests. +However, after ``c = []; d = []``, *c* and *d* are guaranteed to refer to two +different, unique, newly created empty lists. (Note that ``e = f = []`` assigns +the *same* object to both *e* and *f*.) .. _types: @@ -215,7 +219,7 @@ properties: * A sign is shown only when the number is negative. -Python distinguishes between integers, floating point numbers, and complex +Python distinguishes between integers, floating-point numbers, and complex numbers: @@ -259,18 +263,18 @@ Booleans (:class:`bool`) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. index:: - pair: object; floating point - pair: floating point; number + pair: object; floating-point + pair: floating-point; number pair: C; language pair: Java; language -These represent machine-level double precision floating point numbers. You are +These represent machine-level double precision floating-point numbers. You are at the mercy of the underlying machine architecture (and C or Java implementation) for the accepted range and handling of overflow. Python does not -support single-precision floating point numbers; the savings in processor and +support single-precision floating-point numbers; the savings in processor and memory usage that are usually the reason for using these are dwarfed by the overhead of using objects in Python, so there is no reason to complicate the -language with two kinds of floating point numbers. +language with two kinds of floating-point numbers. :class:`numbers.Complex` (:class:`complex`) @@ -281,7 +285,7 @@ language with two kinds of floating point numbers. pair: complex; number These represent complex numbers as a pair of machine-level double precision -floating point numbers. The same caveats apply as for floating point numbers. +floating-point numbers. The same caveats apply as for floating-point numbers. The real and imaginary parts of a complex number ``z`` can be retrieved through the read-only attributes ``z.real`` and ``z.imag``. @@ -373,7 +377,7 @@ Bytes A bytes object is an immutable array. The items are 8-bit bytes, represented by integers in the range 0 <= x < 256. Bytes literals - (like ``b'abc'``) and the built-in :func:`bytes()` constructor + (like ``b'abc'``) and the built-in :func:`bytes` constructor can be used to create bytes objects. Also, bytes objects can be decoded to strings via the :meth:`~bytes.decode` method. @@ -492,7 +496,7 @@ in the same order they were added sequentially over the dictionary. Replacing an existing key does not change the order, however removing a key and re-inserting it will add it to the end instead of keeping its old place. -Dictionaries are mutable; they can be created by the ``{...}`` notation (see +Dictionaries are mutable; they can be created by the ``{}`` notation (see section :ref:`dict`). .. index:: @@ -587,7 +591,6 @@ Most of these attributes check the type of the assigned value: * - .. attribute:: function.__doc__ - The function's documentation string, or ``None`` if unavailable. - Not inherited by subclasses. * - .. attribute:: function.__name__ - The function's name. @@ -727,14 +730,7 @@ When an instance method object is derived from a :class:`classmethod` object, th itself, so that calling either ``x.f(1)`` or ``C.f(1)`` is equivalent to calling ``f(C,1)`` where ``f`` is the underlying function. -Note that the transformation from :ref:`function object ` -to instance method -object happens each time the attribute is retrieved from the instance. In -some cases, a fruitful optimization is to assign the attribute to a local -variable and call that local variable. Also notice that this -transformation only happens for user-defined functions; other callable -objects (and all non-callable objects) are retrieved without -transformation. It is also important to note that user-defined functions +It is important to note that user-defined functions which are attributes of a class instance are not converted to bound methods; this *only* happens when the function is an attribute of the class. @@ -835,6 +831,7 @@ this case, the special read-only attribute :attr:`!__self__` is set to the objec denoted by *alist*. (The attribute has the same semantics as it does with :attr:`other instance methods `.) +.. _classes: Classes ^^^^^^^ @@ -920,6 +917,8 @@ namespace as a dictionary object. or keep the module around while using its dictionary directly. +.. _class-attrs-and-methods: + Custom classes -------------- @@ -962,6 +961,9 @@ of a base class. A class object can be called (see above) to yield a class instance (see below). +Special attributes +^^^^^^^^^^^^^^^^^^ + .. index:: single: __name__ (class attribute) single: __module__ (class attribute) @@ -973,42 +975,100 @@ A class object can be called (see above) to yield a class instance (see below). single: __static_attributes__ (class attribute) single: __firstlineno__ (class attribute) -Special attributes: +.. list-table:: + :header-rows: 1 - :attr:`~definition.__name__` - The class name. + * - Attribute + - Meaning - :attr:`__module__` - The name of the module in which the class was defined. + * - .. attribute:: type.__name__ + - The class's name. + See also: :attr:`__name__ attributes `. - :attr:`~object.__dict__` - The dictionary containing the class's namespace. + * - .. attribute:: type.__qualname__ + - The class's :term:`qualified name`. + See also: :attr:`__qualname__ attributes `. - :attr:`~class.__bases__` - A tuple containing the base classes, in the order of - their occurrence in the base class list. + * - .. attribute:: type.__module__ + - The name of the module in which the class was defined. - :attr:`__doc__` - The class's documentation string, or ``None`` if undefined. + * - .. attribute:: type.__dict__ + - A :class:`mapping proxy ` + providing a read-only view of the class's namespace. + See also: :attr:`__dict__ attributes `. - :attr:`__annotations__` - A dictionary containing - :term:`variable annotations ` - collected during class body execution. For best practices on - working with :attr:`__annotations__`, please see - :ref:`annotations-howto`. + * - .. attribute:: type.__bases__ + - A :class:`tuple` containing the class's bases. + In most cases, for a class defined as ``class X(A, B, C)``, + ``X.__bases__`` will be exactly equal to ``(A, B, C)``. + + * - .. attribute:: type.__doc__ + - The class's documentation string, or ``None`` if undefined. + Not inherited by subclasses. + + * - .. attribute:: type.__annotations__ + - A dictionary containing + :term:`variable annotations ` + collected during class body execution. For best practices on working + with :attr:`!__annotations__`, please see :ref:`annotations-howto`. + + .. caution:: + + Accessing the :attr:`!__annotations__` attribute of a class + object directly may yield incorrect results in the presence of + metaclasses. In addition, the attribute may not exist for + some classes. Use :func:`inspect.get_annotations` to + retrieve class annotations safely. - :attr:`__type_params__` - A tuple containing the :ref:`type parameters ` of - a :ref:`generic class `. + * - .. attribute:: type.__type_params__ + - A :class:`tuple` containing the :ref:`type parameters ` of + a :ref:`generic class `. + + .. versionadded:: 3.12 + + * - .. attribute:: type.__static_attributes__ + - A :class:`tuple` containing names of attributes of this class which are + assigned through ``self.X`` from any function in its body. + + .. versionadded:: 3.13 + + * - .. attribute:: type.__firstlineno__ + - The line number of the first line of the class definition, + including decorators. + Setting the :attr:`__module__` attribute removes the + :attr:`!__firstlineno__` item from the type's dictionary. + + .. versionadded:: 3.13 + + * - .. attribute:: type.__mro__ + - The :class:`tuple` of classes that are considered when looking for + base classes during method resolution. + + +Special methods +^^^^^^^^^^^^^^^ + +In addition to the special attributes described above, all Python classes also +have the following two methods available: + +.. method:: type.mro + + This method can be overridden by a metaclass to customize the method + resolution order for its instances. It is called at class instantiation, + and its result is stored in :attr:`~type.__mro__`. + +.. method:: type.__subclasses__ - :attr:`~class.__static_attributes__` - A tuple containing names of attributes of this class which are accessed - through ``self.X`` from any function in its body. + Each class keeps a list of weak references to its immediate subclasses. This + method returns a list of all those references still alive. The list is in + definition order. Example: - :attr:`__firstlineno__` - The line number of the first line of the class definition, including decorators. + .. doctest:: + >>> class A: pass + >>> class B(A): pass + >>> A.__subclasses__() + [] Class instances --------------- @@ -1048,12 +1108,22 @@ dictionary directly. Class instances can pretend to be numbers, sequences, or mappings if they have methods with certain special names. See section :ref:`specialnames`. +Special attributes +^^^^^^^^^^^^^^^^^^ + .. index:: single: __dict__ (instance attribute) single: __class__ (instance attribute) -Special attributes: :attr:`~object.__dict__` is the attribute dictionary; -:attr:`~instance.__class__` is the instance's class. +.. attribute:: object.__class__ + + The class to which a class instance belongs. + +.. attribute:: object.__dict__ + + A dictionary or other mapping object used to store an object's (writable) + attributes. Not all instances have a :attr:`!__dict__` attribute; see the + section on :ref:`slots` for more details. I/O objects (also known as file objects) @@ -2283,9 +2353,9 @@ Notes on using *__slots__*: * The action of a *__slots__* declaration is not limited to the class where it is defined. *__slots__* declared in parents are available in - child classes. However, child subclasses will get a :attr:`~object.__dict__` and - *__weakref__* unless they also define *__slots__* (which should only - contain names of any *additional* slots). + child classes. However, instances of a child subclass will get a + :attr:`~object.__dict__` and *__weakref__* unless the subclass also defines + *__slots__* (which should only contain names of any *additional* slots). * If a class defines a slot also defined in a base class, the instance variable defined by the base class slot is inaccessible (except by retrieving its @@ -2304,7 +2374,7 @@ Notes on using *__slots__*: to provide per-attribute docstrings that will be recognised by :func:`inspect.getdoc` and displayed in the output of :func:`help`. -* :attr:`~instance.__class__` assignment works only if both classes have the +* :attr:`~object.__class__` assignment works only if both classes have the same *__slots__*. * :ref:`Multiple inheritance ` with multiple slotted parent @@ -2570,7 +2640,7 @@ in the local namespace as the defined class. When a new class is created by ``type.__new__``, the object provided as the namespace parameter is copied to a new ordered mapping and the original object is discarded. The new copy is wrapped in a read-only proxy, which -becomes the :attr:`~object.__dict__` attribute of the class object. +becomes the :attr:`~type.__dict__` attribute of the class object. .. seealso:: @@ -2598,14 +2668,14 @@ order to allow the addition of Abstract Base Classes (ABCs) as "virtual base classes" to any class or type (including built-in types), including other ABCs. -.. method:: class.__instancecheck__(self, instance) +.. method:: type.__instancecheck__(self, instance) Return true if *instance* should be considered a (direct or indirect) instance of *class*. If defined, called to implement ``isinstance(instance, class)``. -.. method:: class.__subclasscheck__(self, subclass) +.. method:: type.__subclasscheck__(self, subclass) Return true if *subclass* should be considered a (direct or indirect) subclass of *class*. If defined, called to implement ``issubclass(subclass, @@ -2621,8 +2691,8 @@ case the instance is itself a class. :pep:`3119` - Introducing Abstract Base Classes Includes the specification for customizing :func:`isinstance` and - :func:`issubclass` behavior through :meth:`~class.__instancecheck__` and - :meth:`~class.__subclasscheck__`, with motivation for this functionality + :func:`issubclass` behavior through :meth:`~type.__instancecheck__` and + :meth:`~type.__subclasscheck__`, with motivation for this functionality in the context of adding Abstract Base Classes (see the :mod:`abc` module) to the language. diff --git a/Doc/reference/executionmodel.rst b/Doc/reference/executionmodel.rst index f24e1537af3..c6d98fc2e70 100644 --- a/Doc/reference/executionmodel.rst +++ b/Doc/reference/executionmodel.rst @@ -225,8 +225,8 @@ Annotation scopes differ from function scopes in the following ways: statements in inner scopes. This includes only type parameters, as no other syntactic elements that can appear within annotation scopes can introduce new names. * While annotation scopes have an internal name, that name is not reflected in the - :term:`__qualname__ ` of objects defined within the scope. - Instead, the :attr:`!__qualname__` + :term:`qualified name` of objects defined within the scope. + Instead, the :attr:`~definition.__qualname__` of such objects is as if the object were defined in the enclosing scope. .. versionadded:: 3.12 diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst index 00b57effd3e..ab72ad49d04 100644 --- a/Doc/reference/expressions.rst +++ b/Doc/reference/expressions.rst @@ -33,7 +33,7 @@ implementation for built-in types works as follows: * If either argument is a complex number, the other is converted to complex; -* otherwise, if either argument is a floating point number, the other is +* otherwise, if either argument is a floating-point number, the other is converted to floating point; * otherwise, both must be integers and no conversion is necessary. @@ -83,18 +83,47 @@ exception. pair: name; mangling pair: private; names -**Private name mangling:** When an identifier that textually occurs in a class -definition begins with two or more underscore characters and does not end in two -or more underscores, it is considered a :dfn:`private name` of that class. -Private names are transformed to a longer form before code is generated for -them. The transformation inserts the class name, with leading underscores -removed and a single underscore inserted, in front of the name. For example, -the identifier ``__spam`` occurring in a class named ``Ham`` will be transformed -to ``_Ham__spam``. This transformation is independent of the syntactical -context in which the identifier is used. If the transformed name is extremely -long (longer than 255 characters), implementation defined truncation may happen. -If the class name consists only of underscores, no transformation is done. +Private name mangling +^^^^^^^^^^^^^^^^^^^^^ +When an identifier that textually occurs in a class definition begins with two +or more underscore characters and does not end in two or more underscores, it +is considered a :dfn:`private name` of that class. + +.. seealso:: + + The :ref:`class specifications `. + +More precisely, private names are transformed to a longer form before code is +generated for them. If the transformed name is longer than 255 characters, +implementation-defined truncation may happen. + +The transformation is independent of the syntactical context in which the +identifier is used but only the following private identifiers are mangled: + +- Any name used as the name of a variable that is assigned or read or any + name of an attribute being accessed. + + The :attr:`~definition.__name__` attribute of nested functions, classes, and + type aliases is however not mangled. + +- The name of imported modules, e.g., ``__spam`` in ``import __spam``. + If the module is part of a package (i.e., its name contains a dot), + the name is *not* mangled, e.g., the ``__foo`` in ``import __foo.bar`` + is not mangled. + +- The name of an imported member, e.g., ``__f`` in ``from spam import __f``. + +The transformation rule is defined as follows: + +- The class name, with leading underscores removed and a single leading + underscore inserted, is inserted in front of the identifier, e.g., the + identifier ``__spam`` occurring in a class named ``Foo``, ``_Foo`` or + ``__Foo`` is transformed to ``_Foo__spam``. + +- If the class name consists only of underscores, the transformation is the + identity, e.g., the identifier ``__spam`` occurring in a class named ``_`` + or ``__`` is left as is. .. _atom-literals: @@ -110,8 +139,8 @@ Python supports string and bytes literals and various numeric literals: : | `integer` | `floatnumber` | `imagnumber` Evaluation of a literal yields an object of the given type (string, bytes, -integer, floating point number, complex number) with the given value. The value -may be approximated in the case of floating point and imaginary (complex) +integer, floating-point number, complex number) with the given value. The value +may be approximated in the case of floating-point and imaginary (complex) literals. See section :ref:`literals` for details. .. index:: @@ -218,10 +247,12 @@ A comprehension in an :keyword:`!async def` function may consist of either a :keyword:`!for` or :keyword:`!async for` clause following the leading expression, may contain additional :keyword:`!for` or :keyword:`!async for` clauses, and may also use :keyword:`await` expressions. -If a comprehension contains either :keyword:`!async for` clauses or -:keyword:`!await` expressions or other asynchronous comprehensions it is called -an :dfn:`asynchronous comprehension`. An asynchronous comprehension may -suspend the execution of the coroutine function in which it appears. + +If a comprehension contains :keyword:`!async for` clauses, or if it contains +:keyword:`!await` expressions or other asynchronous comprehensions anywhere except +the iterable expression in the leftmost :keyword:`!for` clause, it is called an +:dfn:`asynchronous comprehension`. An asynchronous comprehension may suspend the +execution of the coroutine function in which it appears. See also :pep:`530`. .. versionadded:: 3.6 @@ -253,7 +284,7 @@ A list display is a possibly empty series of expressions enclosed in square brackets: .. productionlist:: python-grammar - list_display: "[" [`starred_list` | `comprehension`] "]" + list_display: "[" [`flexible_expression_list` | `comprehension`] "]" A list display yields a new list object, the contents being specified by either a list of expressions or a comprehension. When a comma-separated list of @@ -278,7 +309,7 @@ A set display is denoted by curly braces and distinguishable from dictionary displays by the lack of colons separating keys and values: .. productionlist:: python-grammar - set_display: "{" (`starred_list` | `comprehension`) "}" + set_display: "{" (`flexible_expression_list` | `comprehension`) "}" A set display yields a new mutable set object, the contents being specified by either a sequence of expressions or a comprehension. When a comma-separated @@ -423,7 +454,7 @@ Yield expressions .. productionlist:: python-grammar yield_atom: "(" `yield_expression` ")" yield_from: "yield" "from" `expression` - yield_expression: "yield" `expression_list` | `yield_from` + yield_expression: "yield" `yield_list` | `yield_from` The yield expression is used when defining a :term:`generator` function or an :term:`asynchronous generator` function and @@ -454,9 +485,9 @@ When a generator function is called, it returns an iterator known as a generator. That generator then controls the execution of the generator function. The execution starts when one of the generator's methods is called. At that time, the execution proceeds to the first yield expression, where it is -suspended again, returning the value of :token:`~python-grammar:expression_list` +suspended again, returning the value of :token:`~python-grammar:yield_list` to the generator's caller, -or ``None`` if :token:`~python-grammar:expression_list` is omitted. +or ``None`` if :token:`~python-grammar:yield_list` is omitted. By suspended, we mean that all local state is retained, including the current bindings of local variables, the instruction pointer, the internal evaluation stack, and the state of any exception handling. @@ -545,7 +576,7 @@ is already executing raises a :exc:`ValueError` exception. :meth:`~generator.__next__` method, the current yield expression always evaluates to :const:`None`. The execution then continues to the next yield expression, where the generator is suspended again, and the value of the - :token:`~python-grammar:expression_list` is returned to :meth:`__next__`'s + :token:`~python-grammar:yield_list` is returned to :meth:`__next__`'s caller. If the generator exits without yielding another value, a :exc:`StopIteration` exception is raised. @@ -664,7 +695,7 @@ how a generator object would be used in a :keyword:`for` statement. Calling one of the asynchronous generator's methods returns an :term:`awaitable` object, and the execution starts when this object is awaited on. At that time, the execution proceeds to the first yield expression, where it is suspended -again, returning the value of :token:`~python-grammar:expression_list` to the +again, returning the value of :token:`~python-grammar:yield_list` to the awaiting coroutine. As with a generator, suspension means that all local state is retained, including the current bindings of local variables, the instruction pointer, the internal evaluation stack, and the state of any exception handling. @@ -728,7 +759,7 @@ which are used to control the execution of a generator function. asynchronous generator function is resumed with an :meth:`~agen.__anext__` method, the current yield expression always evaluates to :const:`None` in the returned awaitable, which when run will continue to the next yield - expression. The value of the :token:`~python-grammar:expression_list` of the + expression. The value of the :token:`~python-grammar:yield_list` of the yield expression is the value of the :exc:`StopIteration` exception raised by the completing coroutine. If the asynchronous generator exits without yielding another value, the awaitable instead raises a @@ -741,7 +772,7 @@ which are used to control the execution of a generator function. .. coroutinemethod:: agen.asend(value) Returns an awaitable which when run resumes the execution of the - asynchronous generator. As with the :meth:`~generator.send()` method for a + asynchronous generator. As with the :meth:`~generator.send` method for a generator, this "sends" a value into the asynchronous generator function, and the *value* argument becomes the result of the current yield expression. The awaitable returned by the :meth:`asend` method will return the next @@ -861,7 +892,7 @@ will generally select an element from the container. The subscription of a :ref:`GenericAlias ` object. .. productionlist:: python-grammar - subscription: `primary` "[" `expression_list` "]" + subscription: `primary` "[" `flexible_expression_list` "]" When an object is subscripted, the interpreter will evaluate the primary and the expression list. @@ -873,9 +904,13 @@ primary is subscripted, the evaluated result of the expression list will be passed to one of these methods. For more details on when ``__class_getitem__`` is called instead of ``__getitem__``, see :ref:`classgetitem-versus-getitem`. -If the expression list contains at least one comma, it will evaluate to a -:class:`tuple` containing the items of the expression list. Otherwise, the -expression list will evaluate to the value of the list's sole member. +If the expression list contains at least one comma, or if any of the expressions +are starred, the expression list will evaluate to a :class:`tuple` containing +the items of the expression list. Otherwise, the expression list will evaluate +to the value of the list's sole member. + +.. versionchanged:: 3.11 + Expressions in an expression list may be starred. See :pep:`646`. For built-in objects, there are two types of objects that support subscription via :meth:`~object.__getitem__`: @@ -1211,7 +1246,8 @@ Raising ``0.0`` to a negative power results in a :exc:`ZeroDivisionError`. Raising a negative number to a fractional power results in a :class:`complex` number. (In earlier versions it raised a :exc:`ValueError`.) -This operation can be customized using the special :meth:`~object.__pow__` method. +This operation can be customized using the special :meth:`~object.__pow__` and +:meth:`~object.__rpow__` methods. .. _unary: @@ -1299,6 +1335,9 @@ This operation can be customized using the special :meth:`~object.__mul__` and The ``@`` (at) operator is intended to be used for matrix multiplication. No builtin Python types implement this operator. +This operation can be customized using the special :meth:`~object.__matmul__` and +:meth:`~object.__rmatmul__` methods. + .. versionadded:: 3.5 .. index:: @@ -1314,8 +1353,10 @@ integer; the result is that of mathematical division with the 'floor' function applied to the result. Division by zero raises the :exc:`ZeroDivisionError` exception. -This operation can be customized using the special :meth:`~object.__truediv__` and -:meth:`~object.__floordiv__` methods. +The division operation can be customized using the special :meth:`~object.__truediv__` +and :meth:`~object.__rtruediv__` methods. +The floor division operation can be customized using the special +:meth:`~object.__floordiv__` and :meth:`~object.__rfloordiv__` methods. .. index:: single: modulo @@ -1324,7 +1365,7 @@ This operation can be customized using the special :meth:`~object.__truediv__` a The ``%`` (modulo) operator yields the remainder from the division of the first argument by the second. The numeric arguments are first converted to a common type. A zero right argument raises the :exc:`ZeroDivisionError` exception. The -arguments may be floating point numbers, e.g., ``3.14%0.7`` equals ``0.34`` +arguments may be floating-point numbers, e.g., ``3.14%0.7`` equals ``0.34`` (since ``3.14`` equals ``4*0.7 + 0.34``.) The modulo operator always yields a result with the same sign as its second operand (or zero); the absolute value of the result is strictly smaller than the absolute value of the second operand @@ -1340,11 +1381,12 @@ also overloaded by string objects to perform old-style string formatting (also known as interpolation). The syntax for string formatting is described in the Python Library Reference, section :ref:`old-string-formatting`. -The *modulo* operation can be customized using the special :meth:`~object.__mod__` method. +The *modulo* operation can be customized using the special :meth:`~object.__mod__` +and :meth:`~object.__rmod__` methods. The floor division operator, the modulo operator, and the :func:`divmod` -function are not defined for complex numbers. Instead, convert to a floating -point number using the :func:`abs` function if appropriate. +function are not defined for complex numbers. Instead, convert to a +floating-point number using the :func:`abs` function if appropriate. .. index:: single: addition @@ -1367,7 +1409,8 @@ This operation can be customized using the special :meth:`~object.__add__` and The ``-`` (subtraction) operator yields the difference of its arguments. The numeric arguments are first converted to a common type. -This operation can be customized using the special :meth:`~object.__sub__` method. +This operation can be customized using the special :meth:`~object.__sub__` and +:meth:`~object.__rsub__` methods. .. _shifting: @@ -1388,8 +1431,10 @@ The shifting operations have lower priority than the arithmetic operations: These operators accept integers as arguments. They shift the first argument to the left or right by the number of bits given by the second argument. -This operation can be customized using the special :meth:`~object.__lshift__` and -:meth:`~object.__rshift__` methods. +The left shift operation can be customized using the special :meth:`~object.__lshift__` +and :meth:`~object.__rlshift__` methods. +The right shift operation can be customized using the special :meth:`~object.__rshift__` +and :meth:`~object.__rrshift__` methods. .. index:: pair: exception; ValueError @@ -1762,6 +1807,7 @@ returns a boolean value regardless of the type of its argument single: assignment expression single: walrus operator single: named expression + pair: assignment; expression Assignment expressions ====================== @@ -1864,10 +1910,12 @@ Expression lists single: , (comma); expression list .. productionlist:: python-grammar + starred_expression: ["*"] `or_expr` + flexible_expression: `assignment_expression` | `starred_expression` + flexible_expression_list: `flexible_expression` ("," `flexible_expression`)* [","] + starred_expression_list: `starred_expression` ("," `starred_expression`)* [","] expression_list: `expression` ("," `expression`)* [","] - starred_list: `starred_item` ("," `starred_item`)* [","] - starred_expression: `expression` | (`starred_item` ",")* [`starred_item`] - starred_item: `assignment_expression` | "*" `or_expr` + yield_list: `expression_list` | `starred_expression` "," [`starred_expression_list`] .. index:: pair: object; tuple @@ -1888,6 +1936,9 @@ the unpacking. .. versionadded:: 3.5 Iterable unpacking in expression lists, originally proposed by :pep:`448`. +.. versionadded:: 3.11 + Any item in an expression list may be starred. See :pep:`646`. + .. index:: pair: trailing; comma A trailing comma is required only to create a one-item tuple, diff --git a/Doc/reference/import.rst b/Doc/reference/import.rst index f8c9724114d..19b8aa05072 100644 --- a/Doc/reference/import.rst +++ b/Doc/reference/import.rst @@ -281,7 +281,7 @@ When the named module is not found in :data:`sys.modules`, Python next searches :data:`sys.meta_path`, which contains a list of meta path finder objects. These finders are queried in order to see if they know how to handle the named module. Meta path finders must implement a method called -:meth:`~importlib.abc.MetaPathFinder.find_spec()` which takes three arguments: +:meth:`~importlib.abc.MetaPathFinder.find_spec` which takes three arguments: a name, an import path, and (optionally) a target module. The meta path finder can use any strategy it wants to determine whether it can handle the named module or not. @@ -292,7 +292,7 @@ spec object. If it cannot handle the named module, it returns ``None``. If a spec, then a :exc:`ModuleNotFoundError` is raised. Any other exceptions raised are simply propagated up, aborting the import process. -The :meth:`~importlib.abc.MetaPathFinder.find_spec()` method of meta path +The :meth:`~importlib.abc.MetaPathFinder.find_spec` method of meta path finders is called with two or three arguments. The first is the fully qualified name of the module being imported, for example ``foo.bar.baz``. The second argument is the path entries to use for the module search. For @@ -596,6 +596,10 @@ listed below. Raise :exc:`DeprecationWarning` instead of :exc:`ImportWarning` when falling back to ``__package__``. + .. deprecated-removed:: 3.13 3.15 + ``__package__`` will cease to be set or taken into consideration + by the import system or standard library. + .. attribute:: __spec__ @@ -653,6 +657,10 @@ listed below. It is **strongly** recommended that you rely on :attr:`__spec__` instead of ``__cached__``. + .. deprecated-removed:: 3.13 3.15 + ``__cached__`` will cease to be set or taken into consideration + by the import system or standard library. + .. _package-path-rules: module.__path__ diff --git a/Doc/reference/introduction.rst b/Doc/reference/introduction.rst index cf186705e6e..b7b70e6be5a 100644 --- a/Doc/reference/introduction.rst +++ b/Doc/reference/introduction.rst @@ -74,7 +74,7 @@ PyPy and a Just in Time compiler. One of the goals of the project is to encourage experimentation with the language itself by making it easier to modify the interpreter (since it is written in Python). Additional information is - available on `the PyPy project's home page `_. + available on `the PyPy project's home page `_. Each of these implementations varies in some way from the language as documented in this manual, or introduces specific information beyond what's covered in the diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst index 41ea89fd234..c2f5f145bb2 100644 --- a/Doc/reference/lexical_analysis.rst +++ b/Doc/reference/lexical_analysis.rst @@ -503,11 +503,10 @@ must be expressed with escapes. single: r"; raw string literal Both string and bytes literals may optionally be prefixed with a letter ``'r'`` -or ``'R'``; such strings are called :dfn:`raw strings` and treat backslashes as -literal characters. As a result, in string literals, ``'\U'`` and ``'\u'`` -escapes in raw strings are not treated specially. Given that Python 2.x's raw -unicode literals behave differently than Python 3.x's the ``'ur'`` syntax -is not supported. +or ``'R'``; such constructs are called :dfn:`raw string literals` +and :dfn:`raw bytes literals` respectively and treat backslashes as +literal characters. As a result, in raw string literals, ``'\U'`` and ``'\u'`` +escapes are not treated specially. .. versionadded:: 3.3 The ``'rb'`` prefix of raw bytes literals has been added as a synonym @@ -879,10 +878,10 @@ Numeric literals ---------------- .. index:: number, numeric literal, integer literal - floating point literal, hexadecimal literal + floating-point literal, hexadecimal literal octal literal, binary literal, decimal literal, imaginary literal, complex literal -There are three types of numeric literals: integers, floating point numbers, and +There are three types of numeric literals: integers, floating-point numbers, and imaginary numbers. There are no complex literals (complex numbers can be formed by adding a real number and an imaginary number). @@ -943,10 +942,10 @@ Some examples of integer literals:: single: _ (underscore); in numeric literal .. _floating: -Floating point literals +Floating-point literals ----------------------- -Floating point literals are described by the following lexical definitions: +Floating-point literals are described by the following lexical definitions: .. productionlist:: python-grammar floatnumber: `pointfloat` | `exponentfloat` @@ -958,10 +957,10 @@ Floating point literals are described by the following lexical definitions: Note that the integer and exponent parts are always interpreted using radix 10. For example, ``077e010`` is legal, and denotes the same number as ``77e10``. The -allowed range of floating point literals is implementation-dependent. As in +allowed range of floating-point literals is implementation-dependent. As in integer literals, underscores are supported for digit grouping. -Some examples of floating point literals:: +Some examples of floating-point literals:: 3.14 10. .001 1e100 3.14e-10 0e0 3.14_15_93 @@ -982,9 +981,9 @@ Imaginary literals are described by the following lexical definitions: imagnumber: (`floatnumber` | `digitpart`) ("j" | "J") An imaginary literal yields a complex number with a real part of 0.0. Complex -numbers are represented as a pair of floating point numbers and have the same +numbers are represented as a pair of floating-point numbers and have the same restrictions on their range. To create a complex number with a nonzero real -part, add a floating point number to it, e.g., ``(3+4j)``. Some examples of +part, add a floating-point number to it, e.g., ``(3+4j)``. Some examples of imaginary literals:: 3.14j 10.j 10j .001j 1e100j 3.14e-10j 3.14_15_93j @@ -1019,9 +1018,9 @@ The following tokens serve as delimiters in the grammar: .. code-block:: none ( ) [ ] { } - , : . ; @ = -> - += -= *= /= //= %= @= - &= |= ^= >>= <<= **= + , : ! . ; @ = + -> += -= *= /= //= %= + @= &= |= ^= >>= <<= **= The period can also occur in floating-point and imaginary literals. A sequence of three periods has a special meaning as an ellipsis literal. The second half diff --git a/Doc/reference/simple_stmts.rst b/Doc/reference/simple_stmts.rst index 4f6c0c63ae4..618664b23f0 100644 --- a/Doc/reference/simple_stmts.rst +++ b/Doc/reference/simple_stmts.rst @@ -293,7 +293,7 @@ statements, cannot be an unpacking) and the expression list, performs the binary operation specific to the type of assignment on the two operands, and assigns the result to the original target. The target is only evaluated once. -An augmented assignment expression like ``x += 1`` can be rewritten as ``x = x + +An augmented assignment statement like ``x += 1`` can be rewritten as ``x = x + 1`` to achieve a similar, but not exactly equal effect. In the augmented version, ``x`` is only evaluated once. Also, when possible, the actual operation is performed *in-place*, meaning that rather than creating a new object and diff --git a/Doc/requirements-oldest-sphinx.txt b/Doc/requirements-oldest-sphinx.txt index 4e49ba1a8ed..068fe0cb426 100644 --- a/Doc/requirements-oldest-sphinx.txt +++ b/Doc/requirements-oldest-sphinx.txt @@ -14,7 +14,7 @@ python-docs-theme>=2022.1 alabaster==0.7.16 Babel==2.15.0 -certifi==2024.6.2 +certifi==2024.7.4 charset-normalizer==3.3.2 docutils==0.19 idna==3.7 diff --git a/Doc/requirements.txt b/Doc/requirements.txt index b47a9d8a863..bf1028020b7 100644 --- a/Doc/requirements.txt +++ b/Doc/requirements.txt @@ -6,12 +6,12 @@ # Sphinx version is pinned so that new versions that introduce new warnings # won't suddenly cause build failures. Updating the version is fine as long # as no warnings are raised by doing so. -sphinx~=7.3.0 +sphinx~=8.0.0 blurb -sphinxext-opengraph==0.7.5 -sphinx-notfound-page==1.0.0 +sphinxext-opengraph~=0.9.0 +sphinx-notfound-page~=1.0.0 # The theme used by the documentation is stored separately, so we need # to install that as well. diff --git a/Doc/tools/check-warnings.py b/Doc/tools/check-warnings.py index c50b00636c3..d9c4a33b775 100644 --- a/Doc/tools/check-warnings.py +++ b/Doc/tools/check-warnings.py @@ -2,6 +2,7 @@ """ Check the output of running Sphinx in nit-picky mode (missing references). """ + from __future__ import annotations import argparse @@ -14,7 +15,7 @@ from typing import TextIO # Fail if NEWS nit found before this line number -NEWS_NIT_THRESHOLD = 200 +NEWS_NIT_THRESHOLD = 1400 # Exclude these whether they're dirty or clean, # because they trigger a rebuild of dirty files. @@ -206,7 +207,9 @@ def annotate_diff( def fail_if_regression( - warnings: list[str], files_with_expected_nits: set[str], files_with_nits: set[str] + warnings: list[str], + files_with_expected_nits: set[str], + files_with_nits: set[str], ) -> int: """ Ensure some files always pass Sphinx nit-picky mode (no missing references). @@ -252,17 +255,11 @@ def fail_if_new_news_nit(warnings: list[str], threshold: int) -> int: """ Ensure no warnings are found in the NEWS file before a given line number. """ - news_nits = ( - warning - for warning in warnings - if "/build/NEWS:" in warning - ) + news_nits = (warning for warning in warnings if "/build/NEWS:" in warning) # Nits found before the threshold line new_news_nits = [ - nit - for nit in news_nits - if int(nit.split(":")[1]) <= threshold + nit for nit in news_nits if int(nit.split(":")[1]) <= threshold ] if new_news_nits: @@ -311,7 +308,8 @@ def main(argv: list[str] | None = None) -> int: exit_code = 0 wrong_directory_msg = "Must run this script from the repo root" - assert Path("Doc").exists() and Path("Doc").is_dir(), wrong_directory_msg + if not Path("Doc").exists() or not Path("Doc").is_dir(): + raise RuntimeError(wrong_directory_msg) with Path("Doc/sphinx-warnings.txt").open(encoding="UTF-8") as f: warnings = f.read().splitlines() @@ -339,7 +337,9 @@ def main(argv: list[str] | None = None) -> int: ) if args.fail_if_improved: - exit_code += fail_if_improved(files_with_expected_nits, files_with_nits) + exit_code += fail_if_improved( + files_with_expected_nits, files_with_nits + ) if args.fail_if_new_news_nit: exit_code += fail_if_new_news_nit(warnings, args.fail_if_new_news_nit) diff --git a/Doc/tools/extensions/audit_events.py b/Doc/tools/extensions/audit_events.py new file mode 100644 index 00000000000..23d82c0f441 --- /dev/null +++ b/Doc/tools/extensions/audit_events.py @@ -0,0 +1,264 @@ +"""Support for documenting audit events.""" + +from __future__ import annotations + +import re +from typing import TYPE_CHECKING + +from docutils import nodes +from sphinx.errors import NoUri +from sphinx.locale import _ as sphinx_gettext +from sphinx.transforms.post_transforms import SphinxPostTransform +from sphinx.util import logging +from sphinx.util.docutils import SphinxDirective + +if TYPE_CHECKING: + from collections.abc import Iterator + + from sphinx.application import Sphinx + from sphinx.builders import Builder + from sphinx.environment import BuildEnvironment + +logger = logging.getLogger(__name__) + +# This list of sets are allowable synonyms for event argument names. +# If two names are in the same set, they are treated as equal for the +# purposes of warning. This won't help if the number of arguments is +# different! +_SYNONYMS = [ + frozenset({"file", "path", "fd"}), +] + + +class AuditEvents: + def __init__(self) -> None: + self.events: dict[str, list[str]] = {} + self.sources: dict[str, list[tuple[str, str]]] = {} + + def __iter__(self) -> Iterator[tuple[str, list[str], tuple[str, str]]]: + for name, args in self.events.items(): + for source in self.sources[name]: + yield name, args, source + + def add_event( + self, name, args: list[str], source: tuple[str, str] + ) -> None: + if name in self.events: + self._check_args_match(name, args) + else: + self.events[name] = args + self.sources.setdefault(name, []).append(source) + + def _check_args_match(self, name: str, args: list[str]) -> None: + current_args = self.events[name] + msg = ( + f"Mismatched arguments for audit-event {name}: " + f"{current_args!r} != {args!r}" + ) + if current_args == args: + return + if len(current_args) != len(args): + logger.warning(msg) + return + for a1, a2 in zip(current_args, args, strict=False): + if a1 == a2: + continue + if any(a1 in s and a2 in s for s in _SYNONYMS): + continue + logger.warning(msg) + return + + def id_for(self, name) -> str: + source_count = len(self.sources.get(name, ())) + name_clean = re.sub(r"\W", "_", name) + return f"audit_event_{name_clean}_{source_count}" + + def rows(self) -> Iterator[tuple[str, list[str], list[tuple[str, str]]]]: + for name in sorted(self.events.keys()): + yield name, self.events[name], self.sources[name] + + +def initialise_audit_events(app: Sphinx) -> None: + """Initialise the audit_events attribute on the environment.""" + if not hasattr(app.env, "audit_events"): + app.env.audit_events = AuditEvents() + + +def audit_events_purge( + app: Sphinx, env: BuildEnvironment, docname: str +) -> None: + """This is to remove traces of removed documents from env.audit_events.""" + fresh_audit_events = AuditEvents() + for name, args, (doc, target) in env.audit_events: + if doc != docname: + fresh_audit_events.add_event(name, args, (doc, target)) + + +def audit_events_merge( + app: Sphinx, + env: BuildEnvironment, + docnames: list[str], + other: BuildEnvironment, +) -> None: + """In Sphinx parallel builds, this merges audit_events from subprocesses.""" + for name, args, source in other.audit_events: + env.audit_events.add_event(name, args, source) + + +class AuditEvent(SphinxDirective): + has_content = True + required_arguments = 1 + optional_arguments = 2 + final_argument_whitespace = True + + _label = [ + sphinx_gettext( + "Raises an :ref:`auditing event ` " + "{name} with no arguments." + ), + sphinx_gettext( + "Raises an :ref:`auditing event ` " + "{name} with argument {args}." + ), + sphinx_gettext( + "Raises an :ref:`auditing event ` " + "{name} with arguments {args}." + ), + ] + + def run(self) -> list[nodes.paragraph]: + name = self.arguments[0] + if len(self.arguments) >= 2 and self.arguments[1]: + args = [ + arg + for argument in self.arguments[1].strip("'\"").split(",") + if (arg := argument.strip()) + ] + else: + args = [] + ids = [] + try: + target = self.arguments[2].strip("\"'") + except (IndexError, TypeError): + target = None + if not target: + target = self.env.audit_events.id_for(name) + ids.append(target) + self.env.audit_events.add_event(name, args, (self.env.docname, target)) + + node = nodes.paragraph("", classes=["audit-hook"], ids=ids) + self.set_source_info(node) + if self.content: + node.rawsource = '\n'.join(self.content) # for gettext + self.state.nested_parse(self.content, self.content_offset, node) + else: + num_args = min(2, len(args)) + text = self._label[num_args].format( + name=f"``{name}``", + args=", ".join(f"``{a}``" for a in args), + ) + node.rawsource = text # for gettext + parsed, messages = self.state.inline_text(text, self.lineno) + node += parsed + node += messages + return [node] + + +class audit_event_list(nodes.General, nodes.Element): # noqa: N801 + pass + + +class AuditEventListDirective(SphinxDirective): + def run(self) -> list[audit_event_list]: + return [audit_event_list()] + + +class AuditEventListTransform(SphinxPostTransform): + default_priority = 500 + + def run(self) -> None: + if self.document.next_node(audit_event_list) is None: + return + + table = self._make_table(self.app.builder, self.env.docname) + for node in self.document.findall(audit_event_list): + node.replace_self(table) + + def _make_table(self, builder: Builder, docname: str) -> nodes.table: + table = nodes.table(cols=3) + group = nodes.tgroup( + "", + nodes.colspec(colwidth=30), + nodes.colspec(colwidth=55), + nodes.colspec(colwidth=15), + cols=3, + ) + head = nodes.thead() + body = nodes.tbody() + + table += group + group += head + group += body + + head += nodes.row( + "", + nodes.entry("", nodes.paragraph("", "Audit event")), + nodes.entry("", nodes.paragraph("", "Arguments")), + nodes.entry("", nodes.paragraph("", "References")), + ) + + for name, args, sources in builder.env.audit_events.rows(): + body += self._make_row(builder, docname, name, args, sources) + + return table + + @staticmethod + def _make_row( + builder: Builder, + docname: str, + name: str, + args: list[str], + sources: list[tuple[str, str]], + ) -> nodes.row: + row = nodes.row() + name_node = nodes.paragraph("", nodes.Text(name)) + row += nodes.entry("", name_node) + + args_node = nodes.paragraph() + for arg in args: + args_node += nodes.literal(arg, arg) + args_node += nodes.Text(", ") + if len(args_node.children) > 0: + args_node.children.pop() # remove trailing comma + row += nodes.entry("", args_node) + + backlinks_node = nodes.paragraph() + backlinks = enumerate(sorted(set(sources)), start=1) + for i, (doc, label) in backlinks: + if isinstance(label, str): + ref = nodes.reference("", f"[{i}]", internal=True) + try: + target = ( + f"{builder.get_relative_uri(docname, doc)}#{label}" + ) + except NoUri: + continue + else: + ref["refuri"] = target + backlinks_node += ref + row += nodes.entry("", backlinks_node) + return row + + +def setup(app: Sphinx): + app.add_directive("audit-event", AuditEvent) + app.add_directive("audit-event-table", AuditEventListDirective) + app.add_post_transform(AuditEventListTransform) + app.connect("builder-inited", initialise_audit_events) + app.connect("env-purge-doc", audit_events_purge) + app.connect("env-merge-info", audit_events_merge) + return { + "version": "1.0", + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/Doc/tools/extensions/c_annotations.py b/Doc/tools/extensions/c_annotations.py index 7916b178f1c..50065d34a2c 100644 --- a/Doc/tools/extensions/c_annotations.py +++ b/Doc/tools/extensions/c_annotations.py @@ -1,226 +1,302 @@ -""" - c_annotations.py - ~~~~~~~~~~~~~~~~ - - Supports annotations for C API elements: +"""Support annotations for C API elements. - * reference count annotations for C API functions. Based on - refcount.py and anno-api.py in the old Python documentation tools. +* Reference count annotations for C API functions. +* Stable ABI annotations +* Limited API annotations - * stable API annotations +Configuration: +* Set ``refcount_file`` to the path to the reference count data file. +* Set ``stable_abi_file`` to the path to stable ABI list. +""" - Usage: - * Set the `refcount_file` config value to the path to the reference - count data file. - * Set the `stable_abi_file` config value to the path to stable ABI list. +from __future__ import annotations - :copyright: Copyright 2007-2014 by Georg Brandl. - :license: Python license. -""" +import csv +import dataclasses +from pathlib import Path +from typing import TYPE_CHECKING -from os import path +import sphinx from docutils import nodes -from docutils.parsers.rst import directives -from docutils.parsers.rst import Directive from docutils.statemachine import StringList -from sphinx.locale import _ as sphinx_gettext -import csv - from sphinx import addnodes -from sphinx.domains.c import CObject +from sphinx.locale import _ as sphinx_gettext +from sphinx.util.docutils import SphinxDirective +if TYPE_CHECKING: + from sphinx.application import Sphinx + from sphinx.util.typing import ExtensionMetadata -REST_ROLE_MAP = { - 'function': 'func', - 'macro': 'macro', - 'member': 'member', - 'type': 'type', - 'var': 'data', +ROLE_TO_OBJECT_TYPE = { + "func": "function", + "macro": "macro", + "member": "member", + "type": "type", + "data": "var", } -class RCEntry: - def __init__(self, name): - self.name = name - self.args = [] - self.result_type = '' - self.result_refs = None - - -class Annotations: - def __init__(self, refcount_filename, stable_abi_file): - self.refcount_data = {} - with open(refcount_filename, encoding='utf8') as fp: - for line in fp: - line = line.strip() - if line[:1] in ("", "#"): - # blank lines and comments - continue - parts = line.split(":", 4) - if len(parts) != 5: - raise ValueError(f"Wrong field count in {line!r}") - function, type, arg, refcount, comment = parts - # Get the entry, creating it if needed: - try: - entry = self.refcount_data[function] - except KeyError: - entry = self.refcount_data[function] = RCEntry(function) - if not refcount or refcount == "null": - refcount = None - else: - refcount = int(refcount) - # Update the entry with the new parameter or the result - # information. - if arg: - entry.args.append((arg, type, refcount)) - else: - entry.result_type = type - entry.result_refs = refcount - - self.stable_abi_data = {} - with open(stable_abi_file, encoding='utf8') as fp: - for record in csv.DictReader(fp): - name = record['name'] - self.stable_abi_data[name] = record - - def add_annotations(self, app, doctree): - for node in doctree.findall(addnodes.desc_content): - par = node.parent - if par['domain'] != 'c': - continue - if not par[0].has_key('ids') or not par[0]['ids']: - continue - name = par[0]['ids'][0] - if name.startswith("c."): - name = name[2:] - - objtype = par['objtype'] - - # Stable ABI annotation. These have two forms: - # Part of the [Stable ABI](link). - # Part of the [Stable ABI](link) since version X.Y. - # For structs, there's some more info in the message: - # Part of the [Limited API](link) (as an opaque struct). - # Part of the [Stable ABI](link) (including all members). - # Part of the [Limited API](link) (Only some members are part - # of the stable ABI.). - # ... all of which can have "since version X.Y" appended. - record = self.stable_abi_data.get(name) - if record: - if record['role'] != objtype: - raise ValueError( - f"Object type mismatch in limited API annotation " - f"for {name}: {record['role']!r} != {objtype!r}") - stable_added = record['added'] - message = sphinx_gettext('Part of the') - message = message.center(len(message) + 2) - emph_node = nodes.emphasis(message, message, - classes=['stableabi']) - ref_node = addnodes.pending_xref( - 'Stable ABI', refdomain="std", reftarget='stable', - reftype='ref', refexplicit="False") - struct_abi_kind = record['struct_abi_kind'] - if struct_abi_kind in {'opaque', 'members'}: - ref_node += nodes.Text(sphinx_gettext('Limited API')) - else: - ref_node += nodes.Text(sphinx_gettext('Stable ABI')) - emph_node += ref_node - if struct_abi_kind == 'opaque': - emph_node += nodes.Text(' ' + sphinx_gettext('(as an opaque struct)')) - elif struct_abi_kind == 'full-abi': - emph_node += nodes.Text(' ' + sphinx_gettext('(including all members)')) - if record['ifdef_note']: - emph_node += nodes.Text(' ' + record['ifdef_note']) - if stable_added == '3.2': - # Stable ABI was introduced in 3.2. - pass - else: - emph_node += nodes.Text(' ' + sphinx_gettext('since version %s') % stable_added) - emph_node += nodes.Text('.') - if struct_abi_kind == 'members': - emph_node += nodes.Text( - ' ' + sphinx_gettext('(Only some members are part of the stable ABI.)')) - node.insert(0, emph_node) - - # Unstable API annotation. - if name.startswith('PyUnstable'): - warn_node = nodes.admonition( - classes=['unstable-c-api', 'warning']) - message = sphinx_gettext('This is') + ' ' - emph_node = nodes.emphasis(message, message) - ref_node = addnodes.pending_xref( - 'Unstable API', refdomain="std", - reftarget='unstable-c-api', - reftype='ref', refexplicit="False") - ref_node += nodes.Text(sphinx_gettext('Unstable API')) - emph_node += ref_node - emph_node += nodes.Text(sphinx_gettext('. It may change without warning in minor releases.')) - warn_node += emph_node - node.insert(0, warn_node) - - # Return value annotation - if objtype != 'function': - continue - entry = self.refcount_data.get(name) - if not entry: - continue - elif not entry.result_type.endswith("Object*"): - continue - classes = ['refcount'] - if entry.result_refs is None: - rc = sphinx_gettext('Return value: Always NULL.') - classes.append('return_null') - elif entry.result_refs: - rc = sphinx_gettext('Return value: New reference.') - classes.append('return_new_ref') - else: - rc = sphinx_gettext('Return value: Borrowed reference.') - classes.append('return_borrowed_ref') - node.insert(0, nodes.emphasis(rc, rc, classes=classes)) - - -def init_annotations(app): - annotations = Annotations( - path.join(app.srcdir, app.config.refcount_file), - path.join(app.srcdir, app.config.stable_abi_file), +@dataclasses.dataclass(slots=True) +class RefCountEntry: + # Name of the function. + name: str + # List of (argument name, type, refcount effect) tuples. + # (Currently not used. If it was, a dataclass might work better.) + args: list = dataclasses.field(default_factory=list) + # Return type of the function. + result_type: str = "" + # Reference count effect for the return value. + result_refs: int | None = None + + +@dataclasses.dataclass(frozen=True, slots=True) +class StableABIEntry: + # Role of the object. + # Source: Each [item_kind] in stable_abi.toml is mapped to a C Domain role. + role: str + # Name of the object. + # Source: [.*] in stable_abi.toml. + name: str + # Version when the object was added to the stable ABI. + # (Source: [.*.added] in stable_abi.toml. + added: str + # An explananatory blurb for the ifdef. + # Source: ``feature_macro.*.doc`` in stable_abi.toml. + ifdef_note: str + # Defines how much of the struct is exposed. Only relevant for structs. + # Source: [.*.struct_abi_kind] in stable_abi.toml. + struct_abi_kind: str + + +def read_refcount_data(refcount_filename: Path) -> dict[str, RefCountEntry]: + refcount_data = {} + refcounts = refcount_filename.read_text(encoding="utf8") + for line in refcounts.splitlines(): + line = line.strip() + if not line or line.startswith("#"): + # blank lines and comments + continue + + # Each line is of the form + # function ':' type ':' [param name] ':' [refcount effect] ':' [comment] + parts = line.split(":", 4) + if len(parts) != 5: + raise ValueError(f"Wrong field count in {line!r}") + function, type, arg, refcount, _comment = parts + + # Get the entry, creating it if needed: + try: + entry = refcount_data[function] + except KeyError: + entry = refcount_data[function] = RefCountEntry(function) + if not refcount or refcount == "null": + refcount = None + else: + refcount = int(refcount) + # Update the entry with the new parameter + # or the result information. + if arg: + entry.args.append((arg, type, refcount)) + else: + entry.result_type = type + entry.result_refs = refcount + + return refcount_data + + +def read_stable_abi_data(stable_abi_file: Path) -> dict[str, StableABIEntry]: + stable_abi_data = {} + with open(stable_abi_file, encoding="utf8") as fp: + for record in csv.DictReader(fp): + name = record["name"] + stable_abi_data[name] = StableABIEntry(**record) + + return stable_abi_data + + +def add_annotations(app: Sphinx, doctree: nodes.document) -> None: + state = app.env.domaindata["c_annotations"] + refcount_data = state["refcount_data"] + stable_abi_data = state["stable_abi_data"] + for node in doctree.findall(addnodes.desc_content): + par = node.parent + if par["domain"] != "c": + continue + if not par[0].get("ids", None): + continue + name = par[0]["ids"][0].removeprefix("c.") + objtype = par["objtype"] + + # Stable ABI annotation. + if record := stable_abi_data.get(name): + if ROLE_TO_OBJECT_TYPE[record.role] != objtype: + msg = ( + f"Object type mismatch in limited API annotation for {name}: " + f"{ROLE_TO_OBJECT_TYPE[record.role]!r} != {objtype!r}" + ) + raise ValueError(msg) + annotation = _stable_abi_annotation(record) + node.insert(0, annotation) + + # Unstable API annotation. + if name.startswith("PyUnstable"): + annotation = _unstable_api_annotation() + node.insert(0, annotation) + + # Return value annotation + if objtype != "function": + continue + if name not in refcount_data: + continue + entry = refcount_data[name] + if not entry.result_type.endswith("Object*"): + continue + annotation = _return_value_annotation(entry.result_refs) + node.insert(0, annotation) + + +def _stable_abi_annotation(record: StableABIEntry) -> nodes.emphasis: + """Create the Stable ABI annotation. + + These have two forms: + Part of the `Stable ABI `_. + Part of the `Stable ABI `_ since version X.Y. + For structs, there's some more info in the message: + Part of the `Limited API `_ (as an opaque struct). + Part of the `Stable ABI `_ (including all members). + Part of the `Limited API `_ (Only some members are part + of the stable ABI.). + ... all of which can have "since version X.Y" appended. + """ + stable_added = record.added + message = sphinx_gettext("Part of the") + message = message.center(len(message) + 2) + emph_node = nodes.emphasis(message, message, classes=["stableabi"]) + ref_node = addnodes.pending_xref( + "Stable ABI", + refdomain="std", + reftarget="stable", + reftype="ref", + refexplicit="False", ) - app.connect('doctree-read', annotations.add_annotations) + struct_abi_kind = record.struct_abi_kind + if struct_abi_kind in {"opaque", "members"}: + ref_node += nodes.Text(sphinx_gettext("Limited API")) + else: + ref_node += nodes.Text(sphinx_gettext("Stable ABI")) + emph_node += ref_node + if struct_abi_kind == "opaque": + emph_node += nodes.Text(" " + sphinx_gettext("(as an opaque struct)")) + elif struct_abi_kind == "full-abi": + emph_node += nodes.Text( + " " + sphinx_gettext("(including all members)") + ) + if record.ifdef_note: + emph_node += nodes.Text(f" {record.ifdef_note}") + if stable_added == "3.2": + # Stable ABI was introduced in 3.2. + pass + else: + emph_node += nodes.Text( + " " + sphinx_gettext("since version %s") % stable_added + ) + emph_node += nodes.Text(".") + if struct_abi_kind == "members": + msg = " " + sphinx_gettext( + "(Only some members are part of the stable ABI.)" + ) + emph_node += nodes.Text(msg) + return emph_node - class LimitedAPIList(Directive): - has_content = False - required_arguments = 0 - optional_arguments = 0 - final_argument_whitespace = True +def _unstable_api_annotation() -> nodes.admonition: + ref_node = addnodes.pending_xref( + "Unstable API", + nodes.Text(sphinx_gettext("Unstable API")), + refdomain="std", + reftarget="unstable-c-api", + reftype="ref", + refexplicit="False", + ) + emph_node = nodes.emphasis( + "This is ", + sphinx_gettext("This is") + " ", + ref_node, + nodes.Text( + sphinx_gettext( + ". It may change without warning in minor releases." + ) + ), + ) + return nodes.admonition( + "", + emph_node, + classes=["unstable-c-api", "warning"], + ) - def run(self): - content = [] - for record in annotations.stable_abi_data.values(): - role = REST_ROLE_MAP[record['role']] - name = record['name'] - content.append(f'* :c:{role}:`{name}`') - pnode = nodes.paragraph() - self.state.nested_parse(StringList(content), 0, pnode) - return [pnode] +def _return_value_annotation(result_refs: int | None) -> nodes.emphasis: + classes = ["refcount"] + if result_refs is None: + rc = sphinx_gettext("Return value: Always NULL.") + classes.append("return_null") + elif result_refs: + rc = sphinx_gettext("Return value: New reference.") + classes.append("return_new_ref") + else: + rc = sphinx_gettext("Return value: Borrowed reference.") + classes.append("return_borrowed_ref") + return nodes.emphasis(rc, rc, classes=classes) - app.add_directive('limited-api-list', LimitedAPIList) +class LimitedAPIList(SphinxDirective): + has_content = False + required_arguments = 0 + optional_arguments = 0 + final_argument_whitespace = True -def setup(app): - app.add_config_value('refcount_file', '', True) - app.add_config_value('stable_abi_file', '', True) - app.connect('builder-inited', init_annotations) + def run(self) -> list[nodes.Node]: + state = self.env.domaindata["c_annotations"] + content = [ + f"* :c:{record.role}:`{record.name}`" + for record in state["stable_abi_data"].values() + ] + node = nodes.paragraph() + self.state.nested_parse(StringList(content), 0, node) + return [node] + + +def init_annotations(app: Sphinx) -> None: + # Using domaindata is a bit hack-ish, + # but allows storing state without a global variable or closure. + app.env.domaindata["c_annotations"] = state = {} + state["refcount_data"] = read_refcount_data( + Path(app.srcdir, app.config.refcount_file) + ) + state["stable_abi_data"] = read_stable_abi_data( + Path(app.srcdir, app.config.stable_abi_file) + ) - # monkey-patch C object... - CObject.option_spec = { - 'noindex': directives.flag, - 'stableabi': directives.flag, - } - old_handle_signature = CObject.handle_signature - def new_handle_signature(self, sig, signode): - signode.parent['stableabi'] = 'stableabi' in self.options - return old_handle_signature(self, sig, signode) - CObject.handle_signature = new_handle_signature - return {'version': '1.0', 'parallel_read_safe': True} +def setup(app: Sphinx) -> ExtensionMetadata: + app.add_config_value("refcount_file", "", "env", types={str}) + app.add_config_value("stable_abi_file", "", "env", types={str}) + app.add_directive("limited-api-list", LimitedAPIList) + app.connect("builder-inited", init_annotations) + app.connect("doctree-read", add_annotations) + + if sphinx.version_info[:2] < (7, 2): + from docutils.parsers.rst import directives + from sphinx.domains.c import CObject + + # monkey-patch C object... + CObject.option_spec |= { + "no-index-entry": directives.flag, + "no-contents-entry": directives.flag, + } + + return { + "version": "1.0", + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/Doc/tools/extensions/escape4chm.py b/Doc/tools/extensions/escape4chm.py deleted file mode 100644 index 89970975b90..00000000000 --- a/Doc/tools/extensions/escape4chm.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -Escape the `body` part of .chm source file to 7-bit ASCII, to fix visual -effect on some MBCS Windows systems. - -https://bugs.python.org/issue32174 -""" - -import pathlib -import re -from html.entities import codepoint2name - -from sphinx.util.logging import getLogger - -# escape the characters which codepoint > 0x7F -def _process(string): - def escape(matchobj): - codepoint = ord(matchobj.group(0)) - - name = codepoint2name.get(codepoint) - if name is None: - return '&#%d;' % codepoint - else: - return '&%s;' % name - - return re.sub(r'[^\x00-\x7F]', escape, string) - -def escape_for_chm(app, pagename, templatename, context, doctree): - # only works for .chm output - if getattr(app.builder, 'name', '') != 'htmlhelp': - return - - # escape the `body` part to 7-bit ASCII - body = context.get('body') - if body is not None: - context['body'] = _process(body) - -def fixup_keywords(app, exception): - # only works for .chm output - if getattr(app.builder, 'name', '') != 'htmlhelp' or exception: - return - - getLogger(__name__).info('fixing HTML escapes in keywords file...') - outdir = pathlib.Path(app.builder.outdir) - outname = app.builder.config.htmlhelp_basename - with open(outdir / (outname + '.hhk'), 'rb') as f: - index = f.read() - with open(outdir / (outname + '.hhk'), 'wb') as f: - f.write(index.replace(b''', b''')) - -def setup(app): - # `html-page-context` event emitted when the HTML builder has - # created a context dictionary to render a template with. - app.connect('html-page-context', escape_for_chm) - # `build-finished` event emitted when all the files have been - # output. - app.connect('build-finished', fixup_keywords) - - return {'version': '1.0', 'parallel_read_safe': True} diff --git a/Doc/tools/extensions/glossary_search.py b/Doc/tools/extensions/glossary_search.py index 7c93b1e4990..502b6cd95bc 100644 --- a/Doc/tools/extensions/glossary_search.py +++ b/Doc/tools/extensions/glossary_search.py @@ -1,63 +1,63 @@ -# -*- coding: utf-8 -*- -""" - glossary_search.py - ~~~~~~~~~~~~~~~~ +"""Feature search results for glossary items prominently.""" - Feature search results for glossary items prominently. +from __future__ import annotations - :license: Python license. -""" import json -import os.path -from docutils.nodes import definition_list_item +from pathlib import Path +from typing import TYPE_CHECKING + +from docutils import nodes from sphinx.addnodes import glossary from sphinx.util import logging +if TYPE_CHECKING: + from sphinx.application import Sphinx + from sphinx.util.typing import ExtensionMetadata logger = logging.getLogger(__name__) -STATIC_DIR = '_static' -JSON = 'glossary.json' -def process_glossary_nodes(app, doctree, fromdocname): +def process_glossary_nodes( + app: Sphinx, + doctree: nodes.document, + _docname: str, +) -> None: if app.builder.format != 'html' or app.builder.embedded: return - terms = {} + if hasattr(app.env, 'glossary_terms'): + terms = app.env.glossary_terms + else: + terms = app.env.glossary_terms = {} for node in doctree.findall(glossary): - for glossary_item in node.findall(definition_list_item): - term = glossary_item[0].astext().lower() - definition = glossary_item[1] + for glossary_item in node.findall(nodes.definition_list_item): + term = glossary_item[0].astext() + definition = glossary_item[-1] rendered = app.builder.render_partial(definition) - terms[term] = { - 'title': glossary_item[0].astext(), - 'body': rendered['html_body'] + terms[term.lower()] = { + 'title': term, + 'body': rendered['html_body'], } - if hasattr(app.env, 'glossary_terms'): - app.env.glossary_terms.update(terms) - else: - app.env.glossary_terms = terms -def on_build_finish(app, exc): - if not hasattr(app.env, 'glossary_terms'): - return - if not app.env.glossary_terms: +def write_glossary_json(app: Sphinx, _exc: Exception) -> None: + if not getattr(app.env, 'glossary_terms', None): return - logger.info(f'Writing {JSON}', color='green') - - dest_dir = os.path.join(app.outdir, STATIC_DIR) - os.makedirs(dest_dir, exist_ok=True) - - with open(os.path.join(dest_dir, JSON), 'w') as f: - json.dump(app.env.glossary_terms, f) + logger.info('Writing glossary.json', color='green') + dest = Path(app.outdir, '_static', 'glossary.json') + dest.parent.mkdir(exist_ok=True) + dest.write_text(json.dumps(app.env.glossary_terms), encoding='utf-8') -def setup(app): +def setup(app: Sphinx) -> ExtensionMetadata: app.connect('doctree-resolved', process_glossary_nodes) - app.connect('build-finished', on_build_finish) + app.connect('build-finished', write_glossary_json) - return {'version': '0.1', 'parallel_read_safe': True} + return { + 'version': '1.0', + 'parallel_read_safe': True, + 'parallel_write_safe': True, + } diff --git a/Doc/tools/extensions/lexers/__init__.py b/Doc/tools/extensions/lexers/__init__.py new file mode 100644 index 00000000000..e12ac5be813 --- /dev/null +++ b/Doc/tools/extensions/lexers/__init__.py @@ -0,0 +1,15 @@ +from .asdl_lexer import ASDLLexer +from .peg_lexer import PEGLexer + + +def setup(app): + # Used for highlighting Parser/Python.asdl in library/ast.rst + app.add_lexer("asdl", ASDLLexer) + # Used for highlighting Grammar/python.gram in reference/grammar.rst + app.add_lexer("peg", PEGLexer) + + return { + "version": "1.0", + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/Doc/tools/extensions/asdl_highlight.py b/Doc/tools/extensions/lexers/asdl_lexer.py similarity index 62% rename from Doc/tools/extensions/asdl_highlight.py rename to Doc/tools/extensions/lexers/asdl_lexer.py index 42863a4b3bc..3a74174a1f7 100644 --- a/Doc/tools/extensions/asdl_highlight.py +++ b/Doc/tools/extensions/lexers/asdl_lexer.py @@ -1,15 +1,6 @@ -import sys -from pathlib import Path +from pygments.lexer import RegexLexer, bygroups, include +from pygments.token import Comment, Keyword, Name, Operator, Punctuation, Text -CPYTHON_ROOT = Path(__file__).resolve().parent.parent.parent.parent -sys.path.append(str(CPYTHON_ROOT / "Parser")) - -from pygments.lexer import RegexLexer, bygroups, include, words -from pygments.token import (Comment, Keyword, Name, Operator, - Punctuation, Text) - -from asdl import builtin_types -from sphinx.highlighting import lexers class ASDLLexer(RegexLexer): name = "ASDL" @@ -34,7 +25,10 @@ class ASDLLexer(RegexLexer): r"(\w+)(\*\s|\?\s|\s)(\w+)", bygroups(Name.Builtin.Pseudo, Operator, Name), ), - (words(builtin_types), Name.Builtin), + # Keep in line with ``builtin_types`` from Parser/asdl.py. + # ASDL's 4 builtin types are + # constant, identifier, int, string + ("constant|identifier|int|string", Name.Builtin), (r"attributes", Name.Builtin), ( _name + _text_ws + "(=)", @@ -46,8 +40,3 @@ class ASDLLexer(RegexLexer): (r".", Text), ], } - - -def setup(app): - lexers["asdl"] = ASDLLexer() - return {'version': '1.0', 'parallel_read_safe': True} diff --git a/Doc/tools/extensions/peg_highlight.py b/Doc/tools/extensions/lexers/peg_lexer.py similarity index 94% rename from Doc/tools/extensions/peg_highlight.py rename to Doc/tools/extensions/lexers/peg_lexer.py index 5ab5530d269..06f9f8eb312 100644 --- a/Doc/tools/extensions/peg_highlight.py +++ b/Doc/tools/extensions/lexers/peg_lexer.py @@ -1,8 +1,6 @@ from pygments.lexer import RegexLexer, bygroups, include from pygments.token import Comment, Keyword, Name, Operator, Punctuation, Text -from sphinx.highlighting import lexers - class PEGLexer(RegexLexer): """Pygments Lexer for PEG grammar (.gram) files @@ -81,8 +79,3 @@ class PEGLexer(RegexLexer): (r".", Text), ], } - - -def setup(app): - lexers["peg"] = PEGLexer() - return {"version": "1.0", "parallel_read_safe": True} diff --git a/Doc/tools/extensions/patchlevel.py b/Doc/tools/extensions/patchlevel.py index 617f28c2527..9ccaec3dd5c 100644 --- a/Doc/tools/extensions/patchlevel.py +++ b/Doc/tools/extensions/patchlevel.py @@ -1,68 +1,81 @@ -# -*- coding: utf-8 -*- -""" - patchlevel.py - ~~~~~~~~~~~~~ +"""Extract version information from Include/patchlevel.h.""" - Extract version info from Include/patchlevel.h. - Adapted from Doc/tools/getversioninfo. +import re +import sys +from pathlib import Path +from typing import Literal, NamedTuple - :copyright: 2007-2008 by Georg Brandl. - :license: Python license. -""" +CPYTHON_ROOT = Path( + __file__, # cpython/Doc/tools/extensions/patchlevel.py + "..", # cpython/Doc/tools/extensions + "..", # cpython/Doc/tools + "..", # cpython/Doc + "..", # cpython +).resolve() +PATCHLEVEL_H = CPYTHON_ROOT / "Include" / "patchlevel.h" -from __future__ import print_function +RELEASE_LEVELS = { + "PY_RELEASE_LEVEL_ALPHA": "alpha", + "PY_RELEASE_LEVEL_BETA": "beta", + "PY_RELEASE_LEVEL_GAMMA": "candidate", + "PY_RELEASE_LEVEL_FINAL": "final", +} -import os -import re -import sys -def get_header_version_info(srcdir): - patchlevel_h = os.path.join(srcdir, '..', 'Include', 'patchlevel.h') +class version_info(NamedTuple): # noqa: N801 + major: int #: Major release number + minor: int #: Minor release number + micro: int #: Patch release number + releaselevel: Literal["alpha", "beta", "candidate", "final"] + serial: int #: Serial release number - # This won't pick out all #defines, but it will pick up the ones we - # care about. - rx = re.compile(r'\s*#define\s+([a-zA-Z][a-zA-Z_0-9]*)\s+([a-zA-Z_0-9]+)') - d = {} - with open(patchlevel_h) as f: - for line in f: - m = rx.match(line) - if m is not None: - name, value = m.group(1, 2) - d[name] = value +def get_header_version_info() -> version_info: + # Capture PY_ prefixed #defines. + pat = re.compile(r"\s*#define\s+(PY_\w*)\s+(\w+)", re.ASCII) - release = version = '%s.%s' % (d['PY_MAJOR_VERSION'], d['PY_MINOR_VERSION']) - micro = int(d['PY_MICRO_VERSION']) - release += '.' + str(micro) + defines = {} + patchlevel_h = PATCHLEVEL_H.read_text(encoding="utf-8") + for line in patchlevel_h.splitlines(): + if (m := pat.match(line)) is not None: + name, value = m.groups() + defines[name] = value - level = d['PY_RELEASE_LEVEL'] - suffixes = { - 'PY_RELEASE_LEVEL_ALPHA': 'a', - 'PY_RELEASE_LEVEL_BETA': 'b', - 'PY_RELEASE_LEVEL_GAMMA': 'rc', - } - if level != 'PY_RELEASE_LEVEL_FINAL': - release += suffixes[level] + str(int(d['PY_RELEASE_SERIAL'])) - return version, release + return version_info( + major=int(defines["PY_MAJOR_VERSION"]), + minor=int(defines["PY_MINOR_VERSION"]), + micro=int(defines["PY_MICRO_VERSION"]), + releaselevel=RELEASE_LEVELS[defines["PY_RELEASE_LEVEL"]], + serial=int(defines["PY_RELEASE_SERIAL"]), + ) -def get_sys_version_info(): - major, minor, micro, level, serial = sys.version_info - release = version = '%s.%s' % (major, minor) - release += '.%s' % micro - if level != 'final': - release += '%s%s' % (level[0], serial) +def format_version_info(info: version_info) -> tuple[str, str]: + version = f"{info.major}.{info.minor}" + release = f"{info.major}.{info.minor}.{info.micro}" + if info.releaselevel != "final": + suffix = {"alpha": "a", "beta": "b", "candidate": "rc"} + release += f"{suffix[info.releaselevel]}{info.serial}" return version, release def get_version_info(): try: - return get_header_version_info('.') - except (IOError, OSError): - version, release = get_sys_version_info() - print('Can\'t get version info from Include/patchlevel.h, ' \ - 'using version of this interpreter (%s).' % release, file=sys.stderr) + info = get_header_version_info() + return format_version_info(info) + except OSError: + version, release = format_version_info(sys.version_info) + print( + f"Failed to get version info from Include/patchlevel.h, " + f"using version of this interpreter ({release}).", + file=sys.stderr, + ) return version, release -if __name__ == '__main__': - print(get_header_version_info('.')[1]) + +if __name__ == "__main__": + short_ver, full_ver = format_version_info(get_header_version_info()) + if sys.argv[1:2] == ["--short"]: + print(short_ver) + else: + print(full_ver) diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index 7a2a85346b3..6138246ccb4 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -15,14 +15,14 @@ from time import asctime from pprint import pformat -from docutils import nodes, utils +from docutils import nodes from docutils.io import StringOutput -from docutils.parsers.rst import Directive -from docutils.utils import new_document +from docutils.parsers.rst import directives +from docutils.utils import new_document, unescape from sphinx import addnodes from sphinx.builders import Builder -from sphinx.domains.python import PyFunction, PyMethod -from sphinx.errors import NoUri +from sphinx.domains.changeset import VersionChange, versionlabels, versionlabel_classes +from sphinx.domains.python import PyFunction, PyMethod, PyModule from sphinx.locale import _ as sphinx_gettext from sphinx.util import logging from sphinx.util.docutils import SphinxDirective @@ -48,11 +48,14 @@ std.token_re = re.compile(r'`((~?[\w-]*:)?\w+)`') +# backport :no-index: +PyModule.option_spec['no-index'] = directives.flag + # Support for marking up and linking to bugs.python.org issues def issue_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): - issue = utils.unescape(text) + issue = unescape(text) # sanity check: there are no bpo issues within these two values if 47261 < int(issue) < 400000: msg = inliner.reporter.error(f'The BPO ID {text!r} seems too high -- ' @@ -67,7 +70,7 @@ def issue_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): # Support for marking up and linking to GitHub issues def gh_issue_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): - issue = utils.unescape(text) + issue = unescape(text) # sanity check: all GitHub issues have ID >= 32426 # even though some of them are also valid BPO IDs if int(issue) < 32426: @@ -82,7 +85,7 @@ def gh_issue_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): # Support for marking up implementation details -class ImplementationDetail(Directive): +class ImplementationDetail(SphinxDirective): has_content = True final_argument_whitespace = True @@ -179,143 +182,6 @@ def parse_platforms(self): return platforms -# Support for documenting audit event - -def audit_events_purge(app, env, docname): - """This is to remove from env.all_audit_events old traces of removed - documents. - """ - if not hasattr(env, 'all_audit_events'): - return - fresh_all_audit_events = {} - for name, event in env.all_audit_events.items(): - event["source"] = [(d, t) for d, t in event["source"] if d != docname] - if event["source"]: - # Only keep audit_events that have at least one source. - fresh_all_audit_events[name] = event - env.all_audit_events = fresh_all_audit_events - - -def audit_events_merge(app, env, docnames, other): - """In Sphinx parallel builds, this merges env.all_audit_events from - subprocesses. - - all_audit_events is a dict of names, with values like: - {'source': [(docname, target), ...], 'args': args} - """ - if not hasattr(other, 'all_audit_events'): - return - if not hasattr(env, 'all_audit_events'): - env.all_audit_events = {} - for name, value in other.all_audit_events.items(): - if name in env.all_audit_events: - env.all_audit_events[name]["source"].extend(value["source"]) - else: - env.all_audit_events[name] = value - - -class AuditEvent(Directive): - - has_content = True - required_arguments = 1 - optional_arguments = 2 - final_argument_whitespace = True - - _label = [ - sphinx_gettext("Raises an :ref:`auditing event ` {name} with no arguments."), - sphinx_gettext("Raises an :ref:`auditing event ` {name} with argument {args}."), - sphinx_gettext("Raises an :ref:`auditing event ` {name} with arguments {args}."), - ] - - @property - def logger(self): - cls = type(self) - return logging.getLogger(cls.__module__ + "." + cls.__name__) - - def run(self): - name = self.arguments[0] - if len(self.arguments) >= 2 and self.arguments[1]: - args = (a.strip() for a in self.arguments[1].strip("'\"").split(",")) - args = [a for a in args if a] - else: - args = [] - - label = self._label[min(2, len(args))] - text = label.format(name="``{}``".format(name), - args=", ".join("``{}``".format(a) for a in args if a)) - - env = self.state.document.settings.env - if not hasattr(env, 'all_audit_events'): - env.all_audit_events = {} - - new_info = { - 'source': [], - 'args': args - } - info = env.all_audit_events.setdefault(name, new_info) - if info is not new_info: - if not self._do_args_match(info['args'], new_info['args']): - self.logger.warning( - "Mismatched arguments for audit-event {}: {!r} != {!r}" - .format(name, info['args'], new_info['args']) - ) - - ids = [] - try: - target = self.arguments[2].strip("\"'") - except (IndexError, TypeError): - target = None - if not target: - target = "audit_event_{}_{}".format( - re.sub(r'\W', '_', name), - len(info['source']), - ) - ids.append(target) - - info['source'].append((env.docname, target)) - - pnode = nodes.paragraph(text, classes=["audit-hook"], ids=ids) - pnode.line = self.lineno - if self.content: - self.state.nested_parse(self.content, self.content_offset, pnode) - else: - n, m = self.state.inline_text(text, self.lineno) - pnode.extend(n + m) - - return [pnode] - - # This list of sets are allowable synonyms for event argument names. - # If two names are in the same set, they are treated as equal for the - # purposes of warning. This won't help if number of arguments is - # different! - _SYNONYMS = [ - {"file", "path", "fd"}, - ] - - def _do_args_match(self, args1, args2): - if args1 == args2: - return True - if len(args1) != len(args2): - return False - for a1, a2 in zip(args1, args2): - if a1 == a2: - continue - if any(a1 in s and a2 in s for s in self._SYNONYMS): - continue - return False - return True - - -class audit_event_list(nodes.General, nodes.Element): - pass - - -class AuditEventListDirective(Directive): - - def run(self): - return [audit_event_list('')] - - # Support for documenting decorators class PyDecoratorMixin(object): @@ -395,58 +261,34 @@ def run(self): # Support for documenting version of removal in deprecations -class DeprecatedRemoved(Directive): - has_content = True +class DeprecatedRemoved(VersionChange): required_arguments = 2 - optional_arguments = 1 - final_argument_whitespace = True - option_spec = {} - _deprecated_label = sphinx_gettext('Deprecated since version {deprecated}, will be removed in version {removed}') - _removed_label = sphinx_gettext('Deprecated since version {deprecated}, removed in version {removed}') + _deprecated_label = sphinx_gettext('Deprecated since version %s, will be removed in version %s') + _removed_label = sphinx_gettext('Deprecated since version %s, removed in version %s') def run(self): - node = addnodes.versionmodified() - node.document = self.state.document - node['type'] = 'deprecated-removed' - version = (self.arguments[0], self.arguments[1]) - node['version'] = version - env = self.state.document.settings.env - current_version = tuple(int(e) for e in env.config.version.split('.')) - removed_version = tuple(int(e) for e in self.arguments[1].split('.')) + # Replace the first two arguments (deprecated version and removed version) + # with a single tuple of both versions. + version_deprecated = self.arguments[0] + version_removed = self.arguments.pop(1) + self.arguments[0] = version_deprecated, version_removed + + # Set the label based on if we have reached the removal version + current_version = tuple(map(int, self.config.version.split('.'))) + removed_version = tuple(map(int, version_removed.split('.'))) if current_version < removed_version: - label = self._deprecated_label - else: - label = self._removed_label - - text = label.format(deprecated=self.arguments[0], removed=self.arguments[1]) - if len(self.arguments) == 3: - inodes, messages = self.state.inline_text(self.arguments[2], - self.lineno+1) - para = nodes.paragraph(self.arguments[2], '', *inodes, translatable=False) - node.append(para) + versionlabels[self.name] = self._deprecated_label + versionlabel_classes[self.name] = 'deprecated' else: - messages = [] - if self.content: - self.state.nested_parse(self.content, self.content_offset, node) - if len(node): - if isinstance(node[0], nodes.paragraph) and node[0].rawsource: - content = nodes.inline(node[0].rawsource, translatable=True) - content.source = node[0].source - content.line = node[0].line - content += node[0].children - node[0].replace_self(nodes.paragraph('', '', content, translatable=False)) - node[0].insert(0, nodes.inline('', '%s: ' % text, - classes=['versionmodified'])) - else: - para = nodes.paragraph('', '', - nodes.inline('', '%s.' % text, - classes=['versionmodified']), - translatable=False) - node.append(para) - env = self.state.document.settings.env - env.get_domain('changeset').note_changeset(node) - return [node] + messages + versionlabels[self.name] = self._removed_label + versionlabel_classes[self.name] = 'removed' + try: + return super().run() + finally: + # reset versionlabels and versionlabel_classes + versionlabels[self.name] = '' + versionlabel_classes[self.name] = '' # Support for including Misc/NEWS @@ -456,7 +298,7 @@ def run(self): whatsnew_re = re.compile(r"(?im)^what's new in (.*?)\??$") -class MiscNews(Directive): +class MiscNews(SphinxDirective): has_content = False required_arguments = 1 optional_arguments = 0 @@ -471,7 +313,7 @@ def run(self): if not source_dir: source_dir = path.dirname(path.abspath(source)) fpath = path.join(source_dir, fname) - self.state.document.settings.record_dependencies.add(fpath) + self.env.note_dependency(path.abspath(fpath)) try: with io.open(fpath, encoding='utf-8') as fp: content = fp.read() @@ -603,70 +445,6 @@ def parse_monitoring_event(env, sig, signode): return sig -def process_audit_events(app, doctree, fromdocname): - for node in doctree.findall(audit_event_list): - break - else: - return - - env = app.builder.env - - table = nodes.table(cols=3) - group = nodes.tgroup( - '', - nodes.colspec(colwidth=30), - nodes.colspec(colwidth=55), - nodes.colspec(colwidth=15), - cols=3, - ) - head = nodes.thead() - body = nodes.tbody() - - table += group - group += head - group += body - - row = nodes.row() - row += nodes.entry('', nodes.paragraph('', nodes.Text('Audit event'))) - row += nodes.entry('', nodes.paragraph('', nodes.Text('Arguments'))) - row += nodes.entry('', nodes.paragraph('', nodes.Text('References'))) - head += row - - for name in sorted(getattr(env, "all_audit_events", ())): - audit_event = env.all_audit_events[name] - - row = nodes.row() - node = nodes.paragraph('', nodes.Text(name)) - row += nodes.entry('', node) - - node = nodes.paragraph() - for i, a in enumerate(audit_event['args']): - if i: - node += nodes.Text(", ") - node += nodes.literal(a, nodes.Text(a)) - row += nodes.entry('', node) - - node = nodes.paragraph() - backlinks = enumerate(sorted(set(audit_event['source'])), start=1) - for i, (doc, label) in backlinks: - if isinstance(label, str): - ref = nodes.reference("", nodes.Text("[{}]".format(i)), internal=True) - try: - ref['refuri'] = "{}#{}".format( - app.builder.get_relative_uri(fromdocname, doc), - label, - ) - except NoUri: - continue - node += ref - row += nodes.entry('', node) - - body += row - - for node in doctree.findall(audit_event_list): - node.replace_self(table) - - def patch_pairindextypes(app, _env) -> None: """Remove all entries from ``pairindextypes`` before writing POT files. @@ -696,8 +474,6 @@ def setup(app): app.add_role('gh', gh_issue_role) app.add_directive('impl-detail', ImplementationDetail) app.add_directive('availability', Availability) - app.add_directive('audit-event', AuditEvent) - app.add_directive('audit-event-table', AuditEventListDirective) app.add_directive('deprecated-removed', DeprecatedRemoved) app.add_builder(PydocTopicsBuilder) app.add_object_type('opcode', 'opcode', '%s (opcode)', parse_opcode_signature) @@ -712,7 +488,4 @@ def setup(app): app.add_directive_to_domain('py', 'abstractmethod', PyAbstractMethod) app.add_directive('miscnews', MiscNews) app.connect('env-check-consistency', patch_pairindextypes) - app.connect('doctree-resolved', process_audit_events) - app.connect('env-merge-info', audit_events_merge) - app.connect('env-purge-doc', audit_events_purge) return {'version': '1.0', 'parallel_read_safe': True} diff --git a/Doc/tools/static/glossary_search.js b/Doc/tools/static/glossary_search.js new file mode 100644 index 00000000000..13d728dc027 --- /dev/null +++ b/Doc/tools/static/glossary_search.js @@ -0,0 +1,47 @@ +"use strict"; + +const GLOSSARY_PAGE = "glossary.html"; + +const glossary_search = async () => { + const response = await fetch("_static/glossary.json"); + if (!response.ok) { + throw new Error("Failed to fetch glossary.json"); + } + const glossary = await response.json(); + + const params = new URLSearchParams(document.location.search).get("q"); + if (!params) { + return; + } + + const searchParam = params.toLowerCase(); + const glossaryItem = glossary[searchParam]; + if (!glossaryItem) { + return; + } + + // set up the title text with a link to the glossary page + const glossaryTitle = document.getElementById("glossary-title"); + glossaryTitle.textContent = "Glossary: " + glossaryItem.title; + const linkTarget = searchParam.replace(/ /g, "-"); + glossaryTitle.href = GLOSSARY_PAGE + "#term-" + linkTarget; + + // rewrite any anchor links (to other glossary terms) + // to have a full reference to the glossary page + const glossaryBody = document.getElementById("glossary-body"); + glossaryBody.innerHTML = glossaryItem.body; + const anchorLinks = glossaryBody.querySelectorAll('a[href^="#"]'); + anchorLinks.forEach(function (link) { + const currentUrl = link.getAttribute("href"); + link.href = GLOSSARY_PAGE + currentUrl; + }); + + const glossaryResult = document.getElementById("glossary-result"); + glossaryResult.style.display = ""; +}; + +if (document.readyState !== "loading") { + glossary_search().catch(console.error); +} else { + document.addEventListener("DOMContentLoaded", glossary_search); +} diff --git a/Doc/tools/templates/download.html b/Doc/tools/templates/download.html index b5353d6fb77..45ec436fee7 100644 --- a/Doc/tools/templates/download.html +++ b/Doc/tools/templates/download.html @@ -1,64 +1,75 @@ {% extends "layout.html" %} -{% set title = 'Download' %} +{% set title = _('Download') %} {% if daily is defined %} - {% set dlbase = pathto('archives', 1) %} + {% set dl_base = pathto('archives', resource=True) %} + {% set dl_version = version %} {% else %} {# The link below returns HTTP 404 until the first related alpha release. This is expected; use daily documentation builds for CPython development. #} - {% set dlbase = 'https://docs.python.org/ftp/python/doc/' + release %} + {% set dl_base = 'https://www.python.org/ftp/python/doc/' + release %} + {% set dl_version = release %} {% endif %} {% block body %} -

Download Python {{ release }} Documentation

+

{% trans %}Download Python {{ dl_version }} Documentation{% endtrans %}

-{% if last_updated %}

Last updated on: {{ last_updated }}.

{% endif %} +{% if last_updated %}

{% trans %}Last updated on: {{ last_updated }}.{% endtrans %}

{% endif %} -

To download an archive containing all the documents for this version of -Python in one of various formats, follow one of links in this table.

+

{% trans %}To download an archive containing all the documents for this version of +Python in one of various formats, follow one of links in this table.{% endtrans %}

- - - - + + + + - - - + + + + - - - + + + + - - - + + + + - - + + + + + + + +
FormatPacked as .zipPacked as .tar.bz2
PDF (US-Letter paper size)Download (ca. 13 MiB)Download (ca. 13 MiB)
{% trans %}Format{% endtrans %}{% trans %}Packed as .zip{% endtrans %}{% trans %}Packed as .tar.bz2{% endtrans %}
PDF (A4 paper size)Download (ca. 13 MiB)Download (ca. 13 MiB)
{% trans %}PDF{% endtrans %}{% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %}{% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %}
HTMLDownload (ca. 9 MiB)Download (ca. 6 MiB)
{% trans %}HTML{% endtrans %}{% trans download_size="13" %}Download (ca. {{ download_size }} MiB){% endtrans %}{% trans download_size="8" %}Download (ca. {{ download_size }} MiB){% endtrans %}
Plain TextDownload (ca. 3 MiB)Download (ca. 2 MiB)
{% trans %}Plain text{% endtrans %}{% trans download_size="4" %}Download (ca. {{ download_size }} MiB){% endtrans %}{% trans download_size="3" %}Download (ca. {{ download_size }} MiB){% endtrans %}
EPUBDownload (ca. 5 MiB)
{% trans %}Texinfo{% endtrans %}{% trans download_size="9" %}Download (ca. {{ download_size }} MiB){% endtrans %}{% trans download_size="7" %}Download (ca. {{ download_size }} MiB){% endtrans %}
{% trans %}EPUB{% endtrans %}{% trans download_size="6" %}Download (ca. {{ download_size }} MiB){% endtrans %}
-

These archives contain all the content in the documentation.

+

{% trans %}These archives contain all the content in the documentation.{% endtrans %}

-

Unpacking

+

{% trans %}Unpacking{% endtrans %}

-

Unix users should download the .tar.bz2 archives; these are bzipped tar +

{% trans %}Unix users should download the .tar.bz2 archives; these are bzipped tar archives and can be handled in the usual way using tar and the bzip2 program. The Info-ZIP unzip program can be used to handle the ZIP archives if desired. The .tar.bz2 archives provide the -best compression and fastest download times.

+best compression and fastest download times.{% endtrans %}

-

Windows users can use the ZIP archives since those are customary on that -platform. These are created on Unix using the Info-ZIP zip program.

+

{% trans %}Windows users can use the ZIP archives since those are customary on that +platform. These are created on Unix using the Info-ZIP zip program.{% endtrans %}

-

Problems

+

{% trans %}Problems{% endtrans %}

-

If you have comments or suggestions for the Python documentation, please send -email to docs@python.org.

+

{% trans %}If you have comments or suggestions for the Python documentation, please send +email to docs@python.org.{% endtrans %}

{% endblock %} diff --git a/Doc/tools/templates/indexcontent.html b/Doc/tools/templates/indexcontent.html index 6f854e86ab8..f2e9fbb0106 100644 --- a/Doc/tools/templates/indexcontent.html +++ b/Doc/tools/templates/indexcontent.html @@ -33,6 +33,8 @@

{{ docstitle|e }}

{% trans %}C API reference{% endtrans %}

+ diff --git a/Doc/tools/templates/search.html b/Doc/tools/templates/search.html index 85297446138..6ddac5f828b 100644 --- a/Doc/tools/templates/search.html +++ b/Doc/tools/templates/search.html @@ -2,61 +2,16 @@ {% block extrahead %} {{ super() }} - + +{% endblock %} +{% block searchresults %} +
+ {# For glossary_search.js #} + +
{% endblock %} diff --git a/Doc/tutorial/appendix.rst b/Doc/tutorial/appendix.rst index b8faf756698..da664f2f360 100644 --- a/Doc/tutorial/appendix.rst +++ b/Doc/tutorial/appendix.rst @@ -14,8 +14,8 @@ There are two variants of the interactive :term:`REPL`. The classic basic interpreter is supported on all platforms with minimal line control capabilities. -On Unix-like systems (e.g. Linux or macOS) with :mod:`curses` and -:mod:`readline` support, a new interactive shell is used by default. +On Windows, or Unix-like systems with :mod:`curses` support, +a new interactive shell is used by default. This one supports color, multiline editing, history browsing, and paste mode. To disable color, see :ref:`using-on-controlling-color` for details. Function keys provide some additional functionality. diff --git a/Doc/tutorial/classes.rst b/Doc/tutorial/classes.rst index 1b64741c349..492568961d8 100644 --- a/Doc/tutorial/classes.rst +++ b/Doc/tutorial/classes.rst @@ -276,8 +276,8 @@ definition looked like this:: then ``MyClass.i`` and ``MyClass.f`` are valid attribute references, returning an integer and a function object, respectively. Class attributes can also be assigned to, so you can change the value of ``MyClass.i`` by assignment. -:attr:`!__doc__` is also a valid attribute, returning the docstring belonging to -the class: ``"A simple example class"``. +:attr:`~type.__doc__` is also a valid attribute, returning the docstring +belonging to the class: ``"A simple example class"``. Class *instantiation* uses function notation. Just pretend that the class object is a parameterless function that returns a new instance of the class. @@ -688,6 +688,11 @@ current class name with leading underscore(s) stripped. This mangling is done without regard to the syntactic position of the identifier, as long as it occurs within the definition of a class. +.. seealso:: + + The :ref:`private name mangling specifications ` + for details and special cases. + Name mangling is helpful for letting subclasses override methods without breaking intraclass method calls. For example:: @@ -927,6 +932,6 @@ Examples:: .. [#] Except for one thing. Module objects have a secret read-only attribute called :attr:`~object.__dict__` which returns the dictionary used to implement the module's - namespace; the name :attr:`~object.__dict__` is an attribute but not a global name. + namespace; the name ``__dict__`` is an attribute but not a global name. Obviously, using this violates the abstraction of namespace implementation, and should be restricted to things like post-mortem debuggers. diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst index 77444f9cb83..fd765e58ff2 100644 --- a/Doc/tutorial/controlflow.rst +++ b/Doc/tutorial/controlflow.rst @@ -61,7 +61,7 @@ they appear in the sequence. For example (no pun intended): :: >>> # Measure some strings: - ... words = ['cat', 'window', 'defenestrate'] + >>> words = ['cat', 'window', 'defenestrate'] >>> for w in words: ... print(w, len(w)) ... @@ -160,21 +160,59 @@ arguments. In chapter :ref:`tut-structures`, we will discuss in more detail abo .. _tut-break: -:keyword:`!break` and :keyword:`!continue` Statements, and :keyword:`!else` Clauses on Loops -============================================================================================ +:keyword:`!break` and :keyword:`!continue` Statements +===================================================== The :keyword:`break` statement breaks out of the innermost enclosing -:keyword:`for` or :keyword:`while` loop. +:keyword:`for` or :keyword:`while` loop:: -A :keyword:`!for` or :keyword:`!while` loop can include an :keyword:`!else` clause. + >>> for n in range(2, 10): + ... for x in range(2, n): + ... if n % x == 0: + ... print(f"{n} equals {x} * {n//x}") + ... break + ... + 4 equals 2 * 2 + 6 equals 2 * 3 + 8 equals 2 * 4 + 9 equals 3 * 3 + +The :keyword:`continue` statement continues with the next +iteration of the loop:: + + >>> for num in range(2, 10): + ... if num % 2 == 0: + ... print(f"Found an even number {num}") + ... continue + ... print(f"Found an odd number {num}") + ... + Found an even number 2 + Found an odd number 3 + Found an even number 4 + Found an odd number 5 + Found an even number 6 + Found an odd number 7 + Found an even number 8 + Found an odd number 9 + +.. _tut-for-else: + +:keyword:`!else` Clauses on Loops +================================= + +In a :keyword:`!for` or :keyword:`!while` loop the :keyword:`!break` statement +may be paired with an :keyword:`!else` clause. If the loop finishes without +executing the :keyword:`!break`, the :keyword:`!else` clause executes. In a :keyword:`for` loop, the :keyword:`!else` clause is executed -after the loop reaches its final iteration. +after the loop finishes its final iteration, that is, if no break occurred. In a :keyword:`while` loop, it's executed after the loop's condition becomes false. -In either kind of loop, the :keyword:`!else` clause is **not** executed -if the loop was terminated by a :keyword:`break`. +In either kind of loop, the :keyword:`!else` clause is **not** executed if the +loop was terminated by a :keyword:`break`. Of course, other ways of ending the +loop early, such as a :keyword:`return` or a raised exception, will also skip +execution of the :keyword:`else` clause. This is exemplified in the following :keyword:`!for` loop, which searches for prime numbers:: @@ -198,32 +236,19 @@ which searches for prime numbers:: 9 equals 3 * 3 (Yes, this is the correct code. Look closely: the ``else`` clause belongs to -the :keyword:`for` loop, **not** the :keyword:`if` statement.) +the ``for`` loop, **not** the ``if`` statement.) -When used with a loop, the ``else`` clause has more in common with the -``else`` clause of a :keyword:`try` statement than it does with that of -:keyword:`if` statements: a :keyword:`try` statement's ``else`` clause runs -when no exception occurs, and a loop's ``else`` clause runs when no ``break`` -occurs. For more on the :keyword:`!try` statement and exceptions, see -:ref:`tut-handling`. - -The :keyword:`continue` statement, also borrowed from C, continues with the next -iteration of the loop:: +One way to think of the else clause is to imagine it paired with the ``if`` +inside the loop. As the loop executes, it will run a sequence like +if/if/if/else. The ``if`` is inside the loop, encountered a number of times. If +the condition is ever true, a ``break`` will happen. If the condition is never +true, the ``else`` clause outside the loop will execute. - >>> for num in range(2, 10): - ... if num % 2 == 0: - ... print("Found an even number", num) - ... continue - ... print("Found an odd number", num) - ... - Found an even number 2 - Found an odd number 3 - Found an even number 4 - Found an odd number 5 - Found an even number 6 - Found an odd number 7 - Found an even number 8 - Found an odd number 9 +When used with a loop, the ``else`` clause has more in common with the ``else`` +clause of a :keyword:`try` statement than it does with that of ``if`` +statements: a ``try`` statement's ``else`` clause runs when no exception +occurs, and a loop's ``else`` clause runs when no ``break`` occurs. For more on +the ``try`` statement and exceptions, see :ref:`tut-handling`. .. _tut-pass: @@ -445,7 +470,7 @@ boundary:: ... print() ... >>> # Now call the function we just defined: - ... fib(2000) + >>> fib(2000) 0 1 1 2 3 5 8 13 21 34 55 89 144 233 377 610 987 1597 .. index:: diff --git a/Doc/tutorial/datastructures.rst b/Doc/tutorial/datastructures.rst index a1492298bdb..73f17adeea7 100644 --- a/Doc/tutorial/datastructures.rst +++ b/Doc/tutorial/datastructures.rst @@ -383,16 +383,16 @@ A tuple consists of a number of values separated by commas, for instance:: >>> t (12345, 54321, 'hello!') >>> # Tuples may be nested: - ... u = t, (1, 2, 3, 4, 5) + >>> u = t, (1, 2, 3, 4, 5) >>> u ((12345, 54321, 'hello!'), (1, 2, 3, 4, 5)) >>> # Tuples are immutable: - ... t[0] = 88888 + >>> t[0] = 88888 Traceback (most recent call last): File "", line 1, in TypeError: 'tuple' object does not support item assignment >>> # but they can contain mutable objects: - ... v = ([1, 2, 3], [3, 2, 1]) + >>> v = ([1, 2, 3], [3, 2, 1]) >>> v ([1, 2, 3], [3, 2, 1]) @@ -465,7 +465,7 @@ Here is a brief demonstration:: False >>> # Demonstrate set operations on unique letters from two words - ... + >>> >>> a = set('abracadabra') >>> b = set('alacazam') >>> a # unique letters in a diff --git a/Doc/tutorial/errors.rst b/Doc/tutorial/errors.rst index 981b14f5a42..24fa01428fd 100644 --- a/Doc/tutorial/errors.rst +++ b/Doc/tutorial/errors.rst @@ -45,14 +45,20 @@ programs, however, and result in error messages as shown here:: >>> 10 * (1/0) Traceback (most recent call last): File "", line 1, in + 10 * (1/0) + ~^~ ZeroDivisionError: division by zero >>> 4 + spam*3 Traceback (most recent call last): File "", line 1, in + 4 + spam*3 + ^^^^ NameError: name 'spam' is not defined >>> '2' + 2 Traceback (most recent call last): File "", line 1, in + '2' + 2 + ~~~~^~~ TypeError: can only concatenate str (not "int") to str The last line of the error message indicates what happened. Exceptions come in @@ -252,6 +258,7 @@ exception to occur. For example:: >>> raise NameError('HiThere') Traceback (most recent call last): File "", line 1, in + raise NameError('HiThere') NameError: HiThere The sole argument to :keyword:`raise` indicates the exception to be raised. @@ -275,6 +282,7 @@ re-raise the exception:: An exception flew by! Traceback (most recent call last): File "", line 2, in + raise NameError('HiThere') NameError: HiThere @@ -294,12 +302,15 @@ message:: ... Traceback (most recent call last): File "", line 2, in + open("database.sqlite") + ~~~~^^^^^^^^^^^^^^^^^^^ FileNotFoundError: [Errno 2] No such file or directory: 'database.sqlite' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "", line 4, in + raise RuntimeError("unable to handle error") RuntimeError: unable to handle error To indicate that an exception is a direct consequence of another, the @@ -320,6 +331,8 @@ This can be useful when you are transforming exceptions. For example:: ... Traceback (most recent call last): File "", line 2, in + func() + ~~~~^^ File "", line 2, in func ConnectionError @@ -327,6 +340,7 @@ This can be useful when you are transforming exceptions. For example:: Traceback (most recent call last): File "", line 4, in + raise RuntimeError('Failed to open database') from exc RuntimeError: Failed to open database It also allows disabling automatic exception chaining using the ``from None`` @@ -339,6 +353,7 @@ idiom:: ... Traceback (most recent call last): File "", line 4, in + raise RuntimeError from None RuntimeError For more information about chaining mechanics, see :ref:`bltin-exceptions`. @@ -381,6 +396,7 @@ example:: Goodbye, world! Traceback (most recent call last): File "", line 2, in + raise KeyboardInterrupt KeyboardInterrupt If a :keyword:`finally` clause is present, the :keyword:`!finally` @@ -448,7 +464,11 @@ A more complicated example:: executing finally clause Traceback (most recent call last): File "", line 1, in + divide("2", "0") + ~~~~~~^^^^^^^^^^ File "", line 3, in divide + result = x / y + ~~^~~ TypeError: unsupported operand type(s) for /: 'str' and 'str' As you can see, the :keyword:`finally` clause is executed in any event. The @@ -511,8 +531,11 @@ caught like any other exception. :: >>> f() + Exception Group Traceback (most recent call last): | File "", line 1, in + | f() + | ~^^ | File "", line 3, in f - | ExceptionGroup: there were problems + | raise ExceptionGroup('there were problems', excs) + | ExceptionGroup: there were problems (2 sub-exceptions) +-+---------------- 1 ---------------- | OSError: error 1 +---------------- 2 ---------------- @@ -560,10 +583,15 @@ other clauses and eventually to be reraised. :: There were SystemErrors + Exception Group Traceback (most recent call last): | File "", line 2, in + | f() + | ~^^ | File "", line 2, in f - | ExceptionGroup: group1 + | raise ExceptionGroup( + | ...<12 lines>... + | ) + | ExceptionGroup: group1 (1 sub-exception) +-+---------------- 1 ---------------- - | ExceptionGroup: group2 + | ExceptionGroup: group2 (1 sub-exception) +-+---------------- 1 ---------------- | RecursionError: 4 +------------------------------------ @@ -607,6 +635,7 @@ includes all notes, in the order they were added, after the exception. :: ... Traceback (most recent call last): File "", line 2, in + raise TypeError('bad type') TypeError: bad type Add some information Add some more information @@ -630,23 +659,33 @@ exception in the group has a note indicating when this error has occurred. :: >>> raise ExceptionGroup('We have some problems', excs) + Exception Group Traceback (most recent call last): | File "", line 1, in + | raise ExceptionGroup('We have some problems', excs) | ExceptionGroup: We have some problems (3 sub-exceptions) +-+---------------- 1 ---------------- | Traceback (most recent call last): | File "", line 3, in + | f() + | ~^^ | File "", line 2, in f + | raise OSError('operation failed') | OSError: operation failed | Happened in Iteration 1 +---------------- 2 ---------------- | Traceback (most recent call last): | File "", line 3, in + | f() + | ~^^ | File "", line 2, in f + | raise OSError('operation failed') | OSError: operation failed | Happened in Iteration 2 +---------------- 3 ---------------- | Traceback (most recent call last): | File "", line 3, in + | f() + | ~^^ | File "", line 2, in f + | raise OSError('operation failed') | OSError: operation failed | Happened in Iteration 3 +------------------------------------ diff --git a/Doc/tutorial/floatingpoint.rst b/Doc/tutorial/floatingpoint.rst index 0795e2fef98..dfe2d1d3a83 100644 --- a/Doc/tutorial/floatingpoint.rst +++ b/Doc/tutorial/floatingpoint.rst @@ -6,7 +6,7 @@ .. _tut-fp-issues: ************************************************** -Floating Point Arithmetic: Issues and Limitations +Floating-Point Arithmetic: Issues and Limitations ************************************************** .. sectionauthor:: Tim Peters @@ -88,7 +88,7 @@ the one with 17 significant digits, ``0.10000000000000001``. Starting with Python 3.1, Python (on most systems) is now able to choose the shortest of these and simply display ``0.1``. -Note that this is in the very nature of binary floating-point: this is not a bug +Note that this is in the very nature of binary floating point: this is not a bug in Python, and it is not a bug in your code either. You'll see the same kind of thing in all languages that support your hardware's floating-point arithmetic (although some languages may not *display* the difference by default, or in all @@ -148,13 +148,13 @@ Binary floating-point arithmetic holds many surprises like this. The problem with "0.1" is explained in precise detail below, in the "Representation Error" section. See `Examples of Floating Point Problems `_ for -a pleasant summary of how binary floating-point works and the kinds of +a pleasant summary of how binary floating point works and the kinds of problems commonly encountered in practice. Also see `The Perils of Floating Point `_ for a more complete account of other common surprises. As that says near the end, "there are no easy answers." Still, don't be unduly -wary of floating-point! The errors in Python float operations are inherited +wary of floating point! The errors in Python float operations are inherited from the floating-point hardware, and on most machines are on the order of no more than 1 part in 2\*\*53 per operation. That's more than adequate for most tasks, but you do need to keep in mind that it's not decimal arithmetic and @@ -230,7 +230,7 @@ accumulate to the point where they affect the final total: >>> sum([0.1] * 10) == 1.0 True -The :func:`math.fsum()` goes further and tracks all of the "lost digits" +The :func:`math.fsum` goes further and tracks all of the "lost digits" as values are added onto a running total so that the result has only a single rounding. This is slower than :func:`sum` but will be more accurate in uncommon cases where large magnitude inputs mostly cancel diff --git a/Doc/tutorial/inputoutput.rst b/Doc/tutorial/inputoutput.rst index b93a0e8cec2..2e6fd419b21 100644 --- a/Doc/tutorial/inputoutput.rst +++ b/Doc/tutorial/inputoutput.rst @@ -87,12 +87,12 @@ Some examples:: >>> print(s) The value of x is 32.5, and y is 40000... >>> # The repr() of a string adds string quotes and backslashes: - ... hello = 'hello, world\n' + >>> hello = 'hello, world\n' >>> hellos = repr(hello) >>> print(hellos) 'hello, world\n' >>> # The argument to repr() may be any Python object: - ... repr((x, y, ('spam', 'eggs'))) + >>> repr((x, y, ('spam', 'eggs'))) "(32.5, 40000, ('spam', 'eggs'))" The :mod:`string` module contains a :class:`~string.Template` class that offers diff --git a/Doc/tutorial/introduction.rst b/Doc/tutorial/introduction.rst index 0f16dae8b14..054bac59c95 100644 --- a/Doc/tutorial/introduction.rst +++ b/Doc/tutorial/introduction.rst @@ -62,7 +62,7 @@ For example:: 20 >>> (50 - 5*6) / 4 5.0 - >>> 8 / 5 # division always returns a floating point number + >>> 8 / 5 # division always returns a floating-point number 1.6 The integer numbers (e.g. ``2``, ``4``, ``20``) have type :class:`int`, @@ -501,8 +501,8 @@ together. For instance, we can write an initial sub-sequence of the as follows:: >>> # Fibonacci series: - ... # the sum of two elements defines the next - ... a, b = 0, 1 + >>> # the sum of two elements defines the next + >>> a, b = 0, 1 >>> while a < 10: ... print(a) ... a, b = b, a+b @@ -544,7 +544,7 @@ This example introduces several new features. * The :func:`print` function writes the value of the argument(s) it is given. It differs from just writing the expression you want to write (as we did earlier in the calculator examples) in the way it handles multiple arguments, - floating point quantities, and strings. Strings are printed without quotes, + floating-point quantities, and strings. Strings are printed without quotes, and a space is inserted between items, so you can format things nicely, like this:: diff --git a/Doc/tutorial/stdlib.rst b/Doc/tutorial/stdlib.rst index 9def2a57149..e96e3431925 100644 --- a/Doc/tutorial/stdlib.rst +++ b/Doc/tutorial/stdlib.rst @@ -138,7 +138,7 @@ Mathematics =========== The :mod:`math` module gives access to the underlying C library functions for -floating point math:: +floating-point math:: >>> import math >>> math.cos(math.pi / 4) diff --git a/Doc/tutorial/stdlib2.rst b/Doc/tutorial/stdlib2.rst index 8eaf5892558..4f460b8e6ec 100644 --- a/Doc/tutorial/stdlib2.rst +++ b/Doc/tutorial/stdlib2.rst @@ -352,11 +352,11 @@ not want to run a full list sort:: .. _tut-decimal-fp: -Decimal Floating Point Arithmetic +Decimal Floating-Point Arithmetic ================================= The :mod:`decimal` module offers a :class:`~decimal.Decimal` datatype for -decimal floating point arithmetic. Compared to the built-in :class:`float` +decimal floating-point arithmetic. Compared to the built-in :class:`float` implementation of binary floating point, the class is especially helpful for * financial applications and other uses which require exact decimal diff --git a/Doc/using/android.rst b/Doc/using/android.rst new file mode 100644 index 00000000000..957705f7f5e --- /dev/null +++ b/Doc/using/android.rst @@ -0,0 +1,65 @@ +.. _using-android: + +======================= +Using Python on Android +======================= + +Python on Android is unlike Python on desktop platforms. On a desktop platform, +Python is generally installed as a system resource that can be used by any user +of that computer. Users then interact with Python by running a :program:`python` +executable and entering commands at an interactive prompt, or by running a +Python script. + +On Android, there is no concept of installing as a system resource. The only unit +of software distribution is an "app". There is also no console where you could +run a :program:`python` executable, or interact with a Python REPL. + +As a result, the only way you can use Python on Android is in embedded mode – that +is, by writing a native Android application, embedding a Python interpreter +using ``libpython``, and invoking Python code using the :ref:`Python embedding +API `. The full Python interpreter, the standard library, and all +your Python code is then packaged into your app for its own private use. + +The Python standard library has some notable omissions and restrictions on +Android. See the :ref:`API availability guide ` for +details. + +Adding Python to an Android app +------------------------------- + +These instructions are only needed if you're planning to compile Python for +Android yourself. Most users should *not* need to do this. Instead, use one of +the following tools, which will provide a much easier experience: + +* `Briefcase `__, from the BeeWare project +* `Buildozer `__, from the Kivy project +* `Chaquopy `__ +* `pyqtdeploy `__ +* `Termux `__ + +If you're sure you want to do all of this manually, read on. You can use the +:source:`testbed app ` as a guide; each step below contains a +link to the relevant file. + +* Build Python by following the instructions in :source:`Android/README.md`. + +* Add code to your :source:`build.gradle ` + file to copy the following items into your project. All except your own Python + code can be copied from ``cross-build/HOST/prefix/lib``: + + * In your JNI libraries: + + * ``libpython*.*.so`` + * ``lib*_python.so`` (external libraries such as OpenSSL) + + * In your assets: + + * ``python*.*`` (the Python standard library) + * ``python*.*/site-packages`` (your own Python code) + +* Add code to your app to :source:`extract the assets to the filesystem + `. + +* Add code to your app to :source:`start Python in embedded mode + `. This will need to be C code + called via JNI. diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst index 0620f7d7114..8f54a0fdebe 100644 --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -24,7 +24,7 @@ Command line When invoking Python, you may specify any of these options:: - python [-bBdEhiIOqsSuvVWx?] [-c command | -m module-name | script | - ] [args] + python [-bBdEhiIOPqRsSuvVWx?] [-c command | -m module-name | script | - ] [args] The most common use case is, of course, a simple invocation of a script:: @@ -441,6 +441,7 @@ Miscellaneous options -Wdefault # Warn once per call location -Werror # Convert to exceptions -Walways # Warn every time + -Wall # Same as -Walways -Wmodule # Warn once per calling module -Wonce # Warn once per Python process -Wignore # Never warn @@ -615,9 +616,9 @@ Miscellaneous options .. versionadded:: 3.13 * :samp:`-X gil={0,1}` forces the GIL to be disabled or enabled, - respectively. Only available in builds configured with + respectively. Setting to ``0`` is only available in builds configured with :option:`--disable-gil`. See also :envvar:`PYTHON_GIL` and - :ref:`free-threaded-cpython`. + :ref:`whatsnew313-free-threaded-cpython`. .. versionadded:: 3.13 @@ -786,6 +787,15 @@ conflict. This variable can also be modified by Python code using :data:`os.environ` to force inspect mode on program termination. + .. audit-event:: cpython.run_stdin "" "" + + .. versionchanged:: 3.12.5 (also 3.11.10, 3.10.15, 3.9.20, and 3.8.20) + Emits audit events. + + .. versionchanged:: 3.13 + Uses PyREPL if possible, in which case :envvar:`PYTHONSTARTUP` is + also executed. Emits audit events. + .. envvar:: PYTHONUNBUFFERED @@ -909,6 +919,7 @@ conflict. PYTHONWARNINGS=default # Warn once per call location PYTHONWARNINGS=error # Convert to exceptions PYTHONWARNINGS=always # Warn every time + PYTHONWARNINGS=all # Same as PYTHONWARNINGS=always PYTHONWARNINGS=module # Warn once per calling module PYTHONWARNINGS=once # Warn once per Python process PYTHONWARNINGS=ignore # Never warn @@ -1014,7 +1025,7 @@ conflict. 'surrogatepass' are used. This may also be enabled at runtime with - :func:`sys._enablelegacywindowsfsencoding()`. + :func:`sys._enablelegacywindowsfsencoding`. .. availability:: Windows. @@ -1204,12 +1215,11 @@ conflict. .. envvar:: PYTHON_GIL If this variable is set to ``1``, the global interpreter lock (GIL) will be - forced on. Setting it to ``0`` forces the GIL off. + forced on. Setting it to ``0`` forces the GIL off (needs Python configured with + the :option:`--disable-gil` build option). See also the :option:`-X gil <-X>` command-line option, which takes - precedence over this variable, and :ref:`free-threaded-cpython`. - - Needs Python configured with the :option:`--disable-gil` build option. + precedence over this variable, and :ref:`whatsnew313-free-threaded-cpython`. .. versionadded:: 3.13 diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst index 428ee527527..052289bd827 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst @@ -16,8 +16,8 @@ Features and minimum versions required to build CPython: * On Windows, Microsoft Visual Studio 2017 or later is required. -* Support for `IEEE 754 `_ floating - point numbers and `floating point Not-a-Number (NaN) +* Support for `IEEE 754 `_ + floating-point numbers and `floating-point Not-a-Number (NaN) `_. * Support for threads. @@ -299,7 +299,7 @@ General Options Defines the ``Py_GIL_DISABLED`` macro and adds ``"t"`` to :data:`sys.abiflags`. - See :ref:`free-threaded-cpython` for more detail. + See :ref:`whatsnew313-free-threaded-cpython` for more detail. .. versionadded:: 3.13 @@ -427,7 +427,7 @@ Options for third-party dependencies .. option:: PANEL_CFLAGS .. option:: PANEL_LIBS - C compiler and Linker flags for PANEL, overriding ``pkg-config``. + C compiler and linker flags for PANEL, overriding ``pkg-config``. C compiler and linker flags for ``libpanel`` or ``libpanelw``, used by :mod:`curses.panel` module, overriding ``pkg-config``. @@ -615,7 +615,7 @@ also be used to improve performance. .. option:: --without-mimalloc - Disable the fast mimalloc allocator :ref:`mimalloc ` + Disable the fast :ref:`mimalloc ` allocator (enabled by default). See also :envvar:`PYTHONMALLOC` environment variable. @@ -945,6 +945,17 @@ See :source:`Mac/README.rst`. Specify the name for the python framework on macOS only valid when :option:`--enable-framework` is set (default: ``Python``). +.. option:: --with-app-store-compliance +.. option:: --with-app-store-compliance=PATCH-FILE + + The Python standard library contains strings that are known to trigger + automated inspection tool errors when submitted for distribution by + the macOS and iOS App Stores. If enabled, this option will apply the list of + patches that are known to correct app store compliance. A custom patch + file can also be specified. This option is disabled by default. + + .. versionadded:: 3.13 + iOS Options ----------- @@ -1090,7 +1101,7 @@ Remove built files. make distclean ^^^^^^^^^^^^^^ -In addition to the the work done by ``make clean``, remove files +In addition to the work done by ``make clean``, remove files created by the configure script. ``configure`` will have to be run before building again. [#]_ diff --git a/Doc/using/index.rst b/Doc/using/index.rst index f55a12f1ab8..90fdfc0bec0 100644 --- a/Doc/using/index.rst +++ b/Doc/using/index.rst @@ -12,11 +12,13 @@ interpreter and things that make working with Python easier. .. toctree:: :numbered: + :maxdepth: 2 cmdline.rst unix.rst configure.rst windows.rst mac.rst + android.rst ios.rst editors.rst diff --git a/Doc/using/ios.rst b/Doc/using/ios.rst index 71fc29c450c..4d4eb2031ee 100644 --- a/Doc/using/ios.rst +++ b/Doc/using/ios.rst @@ -33,6 +33,17 @@ running, so you only need to deal with the Python code itself. Python at runtime on iOS ======================== +iOS version compatibility +------------------------- + +The minimum supported iOS version is specified at compile time, using the +:option:`--host` option to ``configure``. By default, when compiled for iOS, +Python will be compiled with a minimum supported iOS version of 13.0. To use a +different minimum iOS version, provide the version number as part of the +:option:`!--host` argument - for example, +``--host=arm64-apple-ios15.4-simulator`` would compile an ARM64 simulator build +with a deployment target of 15.4. + Platform identification ----------------------- @@ -42,17 +53,17 @@ the simulator or a physical device. Information about the specific runtime environment, including the iOS version, device model, and whether the device is a simulator, can be obtained using -:func:`platform.ios_ver()`. :func:`platform.system()` will report ``iOS`` or +:func:`platform.ios_ver`. :func:`platform.system` will report ``iOS`` or ``iPadOS``, depending on the device. -:func:`os.uname()` reports kernel-level details; it will report a name of +:func:`os.uname` reports kernel-level details; it will report a name of ``Darwin``. Standard library availability ----------------------------- The Python standard library has some notable omissions and restrictions on -iOS. See the :ref:`API availability guide for iOS ` for +iOS. See the :ref:`API availability guide for iOS ` for details. Binary extension modules @@ -312,3 +323,21 @@ modules in your app, some additional steps will be required: * If you're using a separate folder for third-party packages, ensure that folder is included as part of the ``PYTHONPATH`` configuration in step 10. + +App Store Compliance +==================== + +The only mechanism for distributing apps to third-party iOS devices is to +submit the app to the iOS App Store; apps submitted for distribution must pass +Apple's app review process. This process includes a set of automated validation +rules that inspect the submitted application bundle for problematic code. + +The Python standard library contains some code that is known to violate these +automated rules. While these violations appear to be false positives, Apple's +review rules cannot be challenged; so, it is necessary to modify the Python +standard library for an app to pass App Store review. + +The Python source tree contains +:source:`a patch file ` that will remove +all code that is known to cause issues with the App Store review process. This +patch is applied automatically when building for iOS. diff --git a/Doc/using/mac.rst b/Doc/using/mac.rst index 31d37aad2a7..4b6c884f3d4 100644 --- a/Doc/using/mac.rst +++ b/Doc/using/mac.rst @@ -2,140 +2,223 @@ .. _using-on-mac: ********************* -Using Python on a Mac +Using Python on macOS ********************* -:Author: Bob Savage +.. sectionauthor:: Bob Savage +.. sectionauthor:: Ned Deily +This document aims to give an overview of macOS-specific behavior you should +know about to get started with Python on Mac computers. +Python on a Mac running macOS is very similar to Python on other Unix-derived platforms, +but there are some differences in installation and some features. -Python on a Mac running macOS is in principle very similar to Python on -any other Unix platform, but there are a number of additional features such as -the integrated development environment (IDE) and the Package Manager that are -worth pointing out. +There are various ways to obtain and install Python for macOS. +Pre-built versions of the most recent versions of Python are available +from a number of distributors. Much of this document describes use of +the Pythons provided by the CPython release team for download from +the `python.org website `_. See +:ref:`alternative_bundles` for some other options. +.. |usemac_x_dot_y| replace:: 3.13 +.. |usemac_python_x_dot_y_literal| replace:: ``python3.13`` +.. |usemac_python_x_dot_y_t_literal| replace:: ``python3.13t`` +.. |usemac_python_x_dot_y_t_literal_config| replace:: ``python3.13t-config`` +.. |usemac_applications_folder_name| replace:: ``Python 3.13`` +.. |usemac_applications_folder_version| replace:: ``/Applications/Python 3.13/`` .. _getting-osx: .. _getting-and-installing-macpython: -Getting and Installing Python -============================= +Using Python for macOS from ``python.org`` +========================================== -macOS used to come with Python 2.7 pre-installed between versions -10.8 and `12.3 `_. -You are invited to install the most recent version of Python 3 from the `Python -website `__. -A current "universal2 binary" build of Python, which runs natively on the Mac's -new Apple Silicon and legacy Intel processors, is available there. +Installation steps +------------------ -What you get after installing is a number of things: +For `current Python versions `_ +(other than those in ``security`` status), the release team produces a +**Python for macOS** installer package for each new release. +A list of available installers +is available `here `_. +We recommend using the most recent supported Python version where possible. +Current installers provide a +`universal2 binary `_ build +of Python which runs natively on all Macs (Apple Silicon and Intel) that are +supported by a wide range of macOS versions, +currently typically from at least **macOS 10.13 High Sierra** on. -* A |python_version_literal| folder in your :file:`Applications` folder. In here - you find IDLE, the development environment that is a standard part of official +The downloaded file is a standard macOS installer package file (``.pkg``). +File integrity information (checksum, size, sigstore signature, etc) for each file is included +on the release download page. Installer packages and their contents are signed and notarized +with ``Python Software Foundation`` Apple Developer ID certificates +to meet `macOS Gatekeeper requirements `_. + +For a default installation, double-click on the downloaded installer package file. +This should launch the standard macOS Installer app and display the first of several +installer windows steps. + +.. image:: mac_installer_01_introduction.png + +Clicking on the **Continue** button brings up the **Read Me** for this installer. +Besides other important information, the **Read Me** documents which Python version is +going to be installed and on what versions of macOS it is supported. You may need +to scroll through to read the whole file. By default, this **Read Me** will also be +installed in |usemac_applications_folder_version| and available to read anytime. + +.. image:: mac_installer_02_readme.png + +Clicking on **Continue** proceeds to display the license for Python and for +other included software. You will then need to **Agree** to the license terms +before proceeding to the next step. This license file will also be installed +and available to be read later. + +.. image:: mac_installer_03_license.png + +After the license terms are accepted, the next step is the **Installation Type** +display. For most uses, the standard set of installation operations is appropriate. + +.. image:: mac_installer_04_installation_type.png + +By pressing the **Customize** button, you can choose to omit or select certain package +components of the installer. Click on each package name to see a description of +what it installs. +To also install support for the optional experimental free-threaded feature, +see :ref:`install-freethreaded-macos`. + +.. image:: mac_installer_05_custom_install.png + +In either case, clicking **Install** will begin the install process by asking +permission to install new software. A macOS user name with ``Administrator`` privilege +is needed as the installed Python will be available to all users of the Mac. + +When the installation is complete, the **Summary** window will appear. + +.. image:: mac_installer_06_summary.png + +Double-click on the :command:`Install Certificates.command` +icon or file in the |usemac_applications_folder_version| window to complete the +installation. + +.. image:: mac_installer_07_applications.png + +This will open a temporary :program:`Terminal` shell window that +will use the new Python to download and install SSL root certificates +for its use. + +.. image:: mac_installer_08_install_certificates.png + +If ``Successfully installed certifi`` and ``update complete`` appears +in the terminal window, the installation is complete. +Close this terminal window and the installer window. + +A default install will include: + +* A |usemac_applications_folder_name| folder in your :file:`Applications` folder. In here + you find :program:`IDLE`, the development environment that is a standard part of official Python distributions; and :program:`Python Launcher`, which handles double-clicking Python - scripts from the Finder. + scripts from the macOS `Finder `_. * A framework :file:`/Library/Frameworks/Python.framework`, which includes the Python executable and libraries. The installer adds this location to your shell - path. To uninstall Python, you can remove these three things. A - symlink to the Python executable is placed in :file:`/usr/local/bin/`. + path. To uninstall Python, you can remove these three things. + Symlinks to the Python executable are placed in :file:`/usr/local/bin/`. .. note:: - On macOS 10.8-12.3, the Apple-provided build of Python is installed in - :file:`/System/Library/Frameworks/Python.framework` and :file:`/usr/bin/python`, - respectively. You should never modify or delete these, as they are - Apple-controlled and are used by Apple- or third-party software. Remember that - if you choose to install a newer Python version from python.org, you will have - two different but functional Python installations on your computer, so it will - be important that your paths and usages are consistent with what you want to do. - -IDLE includes a Help menu that allows you to access Python documentation. If you -are completely new to Python you should start reading the tutorial introduction -in that document. - -If you are familiar with Python on other Unix platforms you should read the -section on running Python scripts from the Unix shell. - + Recent versions of macOS include a :command:`python3` command in :file:`/usr/bin/python3` + that links to a usually older and incomplete version of Python provided by and for use by + the Apple development tools, :program:`Xcode` or the :program:`Command Line Tools for Xcode`. + You should never modify or attempt to delete this installation, as it is + Apple-controlled and is used by Apple-provided or third-party software. If + you choose to install a newer Python version from ``python.org``, you will have + two different but functional Python installations on your computer that + can co-exist. The default installer options should ensure that its :command:`python3` + will be used instead of the system :command:`python3`. How to run a Python script -------------------------- -Your best way to get started with Python on macOS is through the IDLE -integrated development environment; see section :ref:`ide` and use the Help menu -when the IDE is running. +There are two ways to invoke the Python interpreter. +If you are familiar with using a Unix shell in a terminal +window, you can invoke |usemac_python_x_dot_y_literal| or ``python3`` optionally +followed by one or more command line options (described in :ref:`using-on-general`). +The Python tutorial also has a useful section on +:ref:`using Python interactively from a shell `. + +You can also invoke the interpreter through an integrated +development environment. +:ref:`idle` is a basic editor and interpreter environment +which is included with the standard distribution of Python. +:program:`IDLE` includes a Help menu that allows you to access Python documentation. If you +are completely new to Python, you can read the tutorial introduction +in that document. + +There are many other editors and IDEs available, see :ref:`editors` +for more information. -If you want to run Python scripts from the Terminal window command line or from -the Finder you first need an editor to create your script. macOS comes with a -number of standard Unix command line editors, :program:`vim` -:program:`nano` among them. If you want a more Mac-like editor, -:program:`BBEdit` from Bare Bones Software (see -https://www.barebones.com/products/bbedit/index.html) are good choices, as is -:program:`TextMate` (see https://macromates.com). Other editors include -:program:`MacVim` (https://macvim.org) and :program:`Aquamacs` -(https://aquamacs.org). +To run a Python script file from the terminal window, you can +invoke the interpreter with the name of the script file: -To run your script from the Terminal window you must make sure that -:file:`/usr/local/bin` is in your shell search path. + |usemac_python_x_dot_y_literal| ``myscript.py`` -To run your script from the Finder you have two options: +To run your script from the Finder, you can either: * Drag it to :program:`Python Launcher`. * Select :program:`Python Launcher` as the default application to open your - script (or any ``.py`` script) through the finder Info window and double-click it. + script (or any ``.py`` script) through the Finder Info window and double-click it. :program:`Python Launcher` has various preferences to control how your script is launched. Option-dragging allows you to change these for one invocation, or use - its Preferences menu to change things globally. - + its ``Preferences`` menu to change things globally. -.. _osx-gui-scripts: - -Running scripts with a GUI --------------------------- +Be aware that running the script directly from the macOS Finder might +produce different results than when running from a terminal window as +the script will not be run in the usual shell environment including +any setting of environment variables in shell profiles. +And, as with any other script or program, +be certain of what you are about to run. -With older versions of Python, there is one macOS quirk that you need to be -aware of: programs that talk to the Aqua window manager (in other words, -anything that has a GUI) need to be run in a special way. Use :program:`pythonw` -instead of :program:`python` to start such scripts. +.. _alternative_bundles: -With Python 3.9, you can use either :program:`python` or :program:`pythonw`. +Alternative Distributions +========================= +Besides the standard ``python.org`` for macOS installer, there are third-party +distributions for macOS that may include additional functionality. +Some popular distributions and their key features: -Configuration -------------- +`ActivePython `_ + Installer with multi-platform compatibility, documentation -Python on macOS honors all standard Unix environment variables such as -:envvar:`PYTHONPATH`, but setting these variables for programs started from the -Finder is non-standard as the Finder does not read your :file:`.profile` or -:file:`.cshrc` at startup. You need to create a file -:file:`~/.MacOSX/environment.plist`. See Apple's -`Technical Q&A QA1067 `__ -for details. +`Anaconda `_ + Popular scientific modules (such as numpy, scipy, and pandas) and the + ``conda`` package manager. -For more information on installation Python packages, see section -:ref:`mac-package-manager`. +`Homebrew `_ + Package manager for macOS including multiple versions of Python and many + third-party Python-based packages (including numpy, scipy, and pandas). +`MacPorts `_ + Another package manager for macOS including multiple versions of Python and many + third-party Python-based packages. May include pre-built versions of Python and + many packages for older versions of macOS. -.. _ide: - -The IDE -======= - -Python ships with the standard IDLE development environment. A good -introduction to using IDLE can be found at -https://www.hashcollision.org/hkn/python/idle_intro/index.html. - +Note that distributions might not include the latest versions of Python or +other libraries, and are not maintained or supported by the core Python team. .. _mac-package-manager: Installing Additional Python Packages ===================================== -This section has moved to the `Python Packaging User Guide`_. +Refer to the `Python Packaging User Guide`_ for more information. .. _Python Packaging User Guide: https://packaging.python.org/en/latest/tutorials/installing-packages/ +.. _osx-gui-scripts: + .. _gui-programming-on-the-mac: GUI Programming @@ -143,36 +226,209 @@ GUI Programming There are several options for building GUI applications on the Mac with Python. -*PyObjC* is a Python binding to Apple's Objective-C/Cocoa framework, which is -the foundation of most modern Mac development. Information on PyObjC is -available from :pypi:`pyobjc`. - The standard Python GUI toolkit is :mod:`tkinter`, based on the cross-platform -Tk toolkit (https://www.tcl.tk). An Aqua-native version of Tk is bundled with -macOS by Apple, and the latest version can be downloaded and installed from -https://www.activestate.com; it can also be built from source. +Tk toolkit (https://www.tcl.tk). A macOS-native version of Tk is included with +the installer. -A number of alternative macOS GUI toolkits are available: +*PyObjC* is a Python binding to Apple's Objective-C/Cocoa framework. +Information on PyObjC is available from :pypi:`pyobjc`. -* `PySide `__: Official Python bindings to the - `Qt GUI toolkit `__. +A number of alternative macOS GUI toolkits are available including: -* `PyQt `__: Alternative +* `PySide `_: Official Python bindings to the + `Qt GUI toolkit `_. + +* `PyQt `_: Alternative Python bindings to Qt. -* `Kivy `__: A cross-platform GUI toolkit that supports +* `Kivy `_: A cross-platform GUI toolkit that supports desktop and mobile platforms. -* `Toga `__: Part of the `BeeWare Project - `__; supports desktop, mobile, web and console apps. +* `Toga `_: Part of the `BeeWare Project + `_; supports desktop, mobile, web and console apps. -* `wxPython `__: A cross-platform toolkit that +* `wxPython `_: A cross-platform toolkit that supports desktop operating systems. + +Advanced Topics +=============== + +.. _install-freethreaded-macos: + +Installing Free-threaded Binaries +--------------------------------- + +.. versionadded:: 3.13 (Experimental) + +.. note:: + + Everything described in this section is considered experimental, + and should be expected to change in future releases. + +The ``python.org`` :ref:`Python for macOS ` +installer package can optionally install an additional build of +Python |usemac_x_dot_y| that supports :pep:`703`, the experimental free-threading feature +(running with the :term:`global interpreter lock` disabled). +Check the release page on ``python.org`` for possible updated information. + +Because this feature is still considered experimental, the support for it +is not installed by default. It is packaged as a separate install option, +available by clicking the **Customize** button on the **Installation Type** +step of the installer as described above. + +.. image:: mac_installer_09_custom_install_free_threaded.png + +If the box next to the **Free-threaded Python** package name is checked, +a separate :file:`PythonT.framework` will also be installed +alongside the normal :file:`Python.framework` in :file:`/Library/Frameworks`. +This configuration allows a free-threaded Python |usemac_x_dot_y| build to co-exist +on your system with a traditional (GIL only) Python |usemac_x_dot_y| build with +minimal risk while installing or testing. This installation layout is itself +experimental and is subject to change in future releases. + +Known cautions and limitations: + +- The **UNIX command-line tools** package, which is selected by default, + will install links in :file:`/usr/local/bin` for |usemac_python_x_dot_y_t_literal|, + the free-threaded interpreter, and |usemac_python_x_dot_y_t_literal_config|, + a configuration utility which may be useful for package builders. + Since :file:`/usr/local/bin` is typically included in your shell ``PATH``, + in most cases no changes to your ``PATH`` environment variables should + be needed to use |usemac_python_x_dot_y_t_literal|. + +- For this release, the **Shell profile updater** package and the + :file:`Update Shell Profile.command` in |usemac_applications_folder_version| + do not support the free-threaded package. + +- The free-threaded build and the traditional build have separate search + paths and separate :file:`site-packages` directories so, by default, + if you need a package available in both builds, it may need to be installed in both. + The free-threaded package will install a separate instance of :program:`pip` for use + with |usemac_python_x_dot_y_t_literal|. + + - To install a package using :command:`pip` without a :command:`venv`: + + |usemac_python_x_dot_y_t_literal| ``-m pip install `` + +- When working with multiple Python environments, it is usually safest and easiest + to :ref:`create and use virtual environments `. + This can avoid possible command name conflicts and confusion about which Python is in use: + + |usemac_python_x_dot_y_t_literal| ``-m venv `` + + then :command:`activate`. + +- To run a free-threaded version of IDLE: + + |usemac_python_x_dot_y_t_literal| ``-m idlelib`` + +- The interpreters in both builds respond to the same + :ref:`PYTHON environment variables ` + which may have unexpected results, for example, if you have ``PYTHONPATH`` + set in a shell profile. If necessary, there are + :ref:`command line options ` like ``-E`` + to ignore these environment variables. + +- The free-threaded build links to the third-party shared libraries, + such as ``OpenSSL`` and ``Tk``, installed in the traditional framework. + This means that both builds also share one set of trust certificates + as installed by the :command:`Install Certificates.command` script, + thus it only needs to be run once. + +- If you cannot depend on the link in ``/usr/local/bin`` pointing to the + ``python.org`` free-threaded |usemac_python_x_dot_y_t_literal| (for example, if you want + to install your own version there or some other distribution does), + you can explicitly set your shell ``PATH`` environment variable to + include the ``PythonT`` framework ``bin`` directory: + + .. code-block:: sh + + export PATH="/Library/Frameworks/PythonT.framework/Versions/3.13/bin":"$PATH" + + The traditional framework installation by default does something similar, + except for :file:`Python.framework`. Be aware that having both framework ``bin`` + directories in ``PATH`` can lead to confusion if there are duplicate names + like ``python3.13`` in both; which one is actually used depends on the order + they appear in ``PATH``. The ``which python3.x`` or ``which python3.xt`` + commands can show which path is being used. Using virtual environments + can help avoid such ambiguities. Another option might be to create + a shell :command:`alias` to the desired interpreter, like: + + .. code-block:: sh + + alias py3.13="/Library/Frameworks/Python.framework/Versions/3.13/bin/python3.13" + alias py3.13t="/Library/Frameworks/PythonT.framework/Versions/3.13/bin/python3.13t" + +Installing using the command line +--------------------------------- + +If you want to use automation to install the ``python.org`` installer package +(rather than by using the familiar macOS :program:`Installer` GUI app), +the macOS command line :command:`installer` utility lets you select non-default +options, too. If you are not familiar with :command:`installer`, it can be +somewhat cryptic (see :command:`man installer` for more information). +As an example, the following shell snippet shows one way to do it, +using the ``3.13.0b2`` release and selecting the free-threaded interpreter +option: + +.. code-block:: sh + + RELEASE="python-3.13.0b2-macos11.pkg" + + # download installer pkg + curl -O https://www.python.org/ftp/python/3.13.0/${RELEASE} + + # create installer choicechanges to customize the install: + # enable the PythonTFramework-3.13 package + # while accepting the other defaults (install all other packages) + cat > ./choicechanges.plist < + + + + + attributeSetting + 1 + choiceAttribute + selected + choiceIdentifier + org.python.Python.PythonTFramework-3.13 + + + + EOF + + sudo installer -pkg ./${RELEASE} -applyChoiceChangesXML ./choicechanges.plist -target / + + +You can then test that both installer builds are now available with something like: + +.. code-block:: console + + $ # test that the free-threaded interpreter was installed if the Unix Command Tools package was enabled + $ /usr/local/bin/python3.13t -VV + Python 3.13.0b2 experimental free-threading build (v3.13.0b2:3a83b172af, Jun 5 2024, 12:57:31) [Clang 15.0.0 (clang-1500.3.9.4)] + $ # and the traditional interpreter + $ /usr/local/bin/python3.13 -VV + Python 3.13.0b2 (v3.13.0b2:3a83b172af, Jun 5 2024, 12:50:24) [Clang 15.0.0 (clang-1500.3.9.4)] + $ # test that they are also available without the prefix if /usr/local/bin is on $PATH + $ python3.13t -VV + Python 3.13.0b2 experimental free-threading build (v3.13.0b2:3a83b172af, Jun 5 2024, 12:57:31) [Clang 15.0.0 (clang-1500.3.9.4)] + $ python3.13 -VV + Python 3.13.0b2 (v3.13.0b2:3a83b172af, Jun 5 2024, 12:50:24) [Clang 15.0.0 (clang-1500.3.9.4)] + +.. note:: + + Current ``python.org`` installers only install to fixed locations like + :file:`/Library/Frameworks/`, :file:`/Applications`, and :file:`/usr/local/bin`. + You cannot use the :command:`installer` ``-domain`` option to install to + other locations. + .. _distributing-python-applications-on-the-mac: Distributing Python Applications -================================ +-------------------------------- A range of tools exist for converting your Python code into a standalone distributable application: @@ -180,22 +436,39 @@ distributable application: * :pypi:`py2app`: Supports creating macOS ``.app`` bundles from a Python project. -* `Briefcase `__: Part of the `BeeWare Project - `__; a cross-platform packaging tool that supports +* `Briefcase `_: Part of the `BeeWare Project + `_; a cross-platform packaging tool that supports creation of ``.app`` bundles on macOS, as well as managing signing and notarization. -* `PyInstaller `__: A cross-platform packaging tool that creates +* `PyInstaller `_: A cross-platform packaging tool that creates a single file or folder as a distributable artifact. -Other Resources -=============== +App Store Compliance +-------------------- + +Apps submitted for distribution through the macOS App Store must pass Apple's +app review process. This process includes a set of automated validation rules +that inspect the submitted application bundle for problematic code. -The Pythonmac-SIG mailing list is an excellent support resource for Python users -and developers on the Mac: +The Python standard library contains some code that is known to violate these +automated rules. While these violations appear to be false positives, Apple's +review rules cannot be challenged. Therefore, it is necessary to modify the +Python standard library for an app to pass App Store review. -https://www.python.org/community/sigs/current/pythonmac-sig/ +The Python source tree contains +:source:`a patch file ` that will remove +all code that is known to cause issues with the App Store review process. This +patch is applied automatically when CPython is configured with the +:option:`--with-app-store-compliance` option. -Another useful resource is the MacPython wiki: +This patch is not normally required to use CPython on a Mac; nor is it required +if you are distributing an app *outside* the macOS App Store. It is *only* +required if you are using the macOS App Store as a distribution channel. + +Other Resources +=============== -https://wiki.python.org/moin/MacPython +The `python.org Help page `_ has links to many useful resources. +The `Pythonmac-SIG mailing list `_ +is another support resource specifically for Python users and developers on the Mac. diff --git a/Doc/using/mac_installer_01_introduction.png b/Doc/using/mac_installer_01_introduction.png new file mode 100644 index 00000000000..1999f3a3759 Binary files /dev/null and b/Doc/using/mac_installer_01_introduction.png differ diff --git a/Doc/using/mac_installer_02_readme.png b/Doc/using/mac_installer_02_readme.png new file mode 100644 index 00000000000..a36efaf7d50 Binary files /dev/null and b/Doc/using/mac_installer_02_readme.png differ diff --git a/Doc/using/mac_installer_03_license.png b/Doc/using/mac_installer_03_license.png new file mode 100644 index 00000000000..598c22a13d9 Binary files /dev/null and b/Doc/using/mac_installer_03_license.png differ diff --git a/Doc/using/mac_installer_04_installation_type.png b/Doc/using/mac_installer_04_installation_type.png new file mode 100644 index 00000000000..9498fd06240 Binary files /dev/null and b/Doc/using/mac_installer_04_installation_type.png differ diff --git a/Doc/using/mac_installer_05_custom_install.png b/Doc/using/mac_installer_05_custom_install.png new file mode 100644 index 00000000000..3a201d2f446 Binary files /dev/null and b/Doc/using/mac_installer_05_custom_install.png differ diff --git a/Doc/using/mac_installer_06_summary.png b/Doc/using/mac_installer_06_summary.png new file mode 100644 index 00000000000..1af6eee2c66 Binary files /dev/null and b/Doc/using/mac_installer_06_summary.png differ diff --git a/Doc/using/mac_installer_07_applications.png b/Doc/using/mac_installer_07_applications.png new file mode 100644 index 00000000000..940219cad6f Binary files /dev/null and b/Doc/using/mac_installer_07_applications.png differ diff --git a/Doc/using/mac_installer_08_install_certificates.png b/Doc/using/mac_installer_08_install_certificates.png new file mode 100644 index 00000000000..c125eeb18aa Binary files /dev/null and b/Doc/using/mac_installer_08_install_certificates.png differ diff --git a/Doc/using/mac_installer_09_custom_install_free_threaded.png b/Doc/using/mac_installer_09_custom_install_free_threaded.png new file mode 100644 index 00000000000..0f69c55eddb Binary files /dev/null and b/Doc/using/mac_installer_09_custom_install_free_threaded.png differ diff --git a/Doc/using/venv-create.inc b/Doc/using/venv-create.inc deleted file mode 100644 index 354eb1541ce..00000000000 --- a/Doc/using/venv-create.inc +++ /dev/null @@ -1,121 +0,0 @@ -Creation of :ref:`virtual environments ` is done by executing the -command ``venv``:: - - python -m venv /path/to/new/virtual/environment - -Running this command creates the target directory (creating any parent -directories that don't exist already) and places a ``pyvenv.cfg`` file in it -with a ``home`` key pointing to the Python installation from which the command -was run (a common name for the target directory is ``.venv``). It also creates -a ``bin`` (or ``Scripts`` on Windows) subdirectory containing a copy/symlink -of the Python binary/binaries (as appropriate for the platform or arguments -used at environment creation time). It also creates an (initially empty) -``lib/pythonX.Y/site-packages`` subdirectory (on Windows, this is -``Lib\site-packages``). If an existing directory is specified, it will be -re-used. - -.. versionchanged:: 3.5 - The use of ``venv`` is now recommended for creating virtual environments. - -.. deprecated:: 3.6 - ``pyvenv`` was the recommended tool for creating virtual environments for - Python 3.3 and 3.4, and is - :ref:`deprecated in Python 3.6 `. - -.. highlight:: none - -On Windows, invoke the ``venv`` command as follows:: - - c:\>Python35\python -m venv c:\path\to\myenv - -Alternatively, if you configured the ``PATH`` and ``PATHEXT`` variables for -your :ref:`Python installation `:: - - c:\>python -m venv c:\path\to\myenv - -The command, if run with ``-h``, will show the available options:: - - usage: venv [-h] [--system-site-packages] [--symlinks | --copies] [--clear] - [--upgrade] [--without-pip] [--prompt PROMPT] [--upgrade-deps] - [--without-scm-ignore-file] - ENV_DIR [ENV_DIR ...] - - Creates virtual Python environments in one or more target directories. - - positional arguments: - ENV_DIR A directory to create the environment in. - - options: - -h, --help show this help message and exit - --system-site-packages - Give the virtual environment access to the system - site-packages dir. - --symlinks Try to use symlinks rather than copies, when - symlinks are not the default for the platform. - --copies Try to use copies rather than symlinks, even when - symlinks are the default for the platform. - --clear Delete the contents of the environment directory if - it already exists, before environment creation. - --upgrade Upgrade the environment directory to use this - version of Python, assuming Python has been upgraded - in-place. - --without-pip Skips installing or upgrading pip in the virtual - environment (pip is bootstrapped by default) - --prompt PROMPT Provides an alternative prompt prefix for this - environment. - --upgrade-deps Upgrade core dependencies (pip) to the latest - version in PyPI - --without-scm-ignore-file - Skips adding the default SCM ignore file to the - environment directory (the default is a .gitignore - file). - - Once an environment has been created, you may wish to activate it, e.g. by - sourcing an activate script in its bin directory. - -.. versionchanged:: 3.13 - - ``--without-scm-ignore-file`` was added along with creating an ignore file - for ``git`` by default. - -.. versionchanged:: 3.12 - - ``setuptools`` is no longer a core venv dependency. - -.. versionchanged:: 3.9 - Add ``--upgrade-deps`` option to upgrade pip + setuptools to the latest on PyPI - -.. versionchanged:: 3.4 - Installs pip by default, added the ``--without-pip`` and ``--copies`` - options - -.. versionchanged:: 3.4 - In earlier versions, if the target directory already existed, an error was - raised, unless the ``--clear`` or ``--upgrade`` option was provided. - -.. note:: - While symlinks are supported on Windows, they are not recommended. Of - particular note is that double-clicking ``python.exe`` in File Explorer - will resolve the symlink eagerly and ignore the virtual environment. - -.. note:: - On Microsoft Windows, it may be required to enable the ``Activate.ps1`` - script by setting the execution policy for the user. You can do this by - issuing the following PowerShell command: - - PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser - - See `About Execution Policies - `_ - for more information. - -The created ``pyvenv.cfg`` file also includes the -``include-system-site-packages`` key, set to ``true`` if ``venv`` is -run with the ``--system-site-packages`` option, ``false`` otherwise. - -Unless the ``--without-pip`` option is given, :mod:`ensurepip` will be -invoked to bootstrap ``pip`` into the virtual environment. - -Multiple paths can be given to ``venv``, in which case an identical virtual -environment will be created, according to the given options, at each provided -path. diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst index ef98d32e867..136236f51eb 100644 --- a/Doc/using/windows.rst +++ b/Doc/using/windows.rst @@ -608,7 +608,7 @@ key features: Popular scientific modules (such as numpy, scipy and pandas) and the ``conda`` package manager. -`Enthought Deployment Manager `_ +`Enthought Deployment Manager `_ "The Next Generation Python Environment and Package Manager". Previously Enthought provided Canopy, but it `reached end of life in 2016 @@ -1305,7 +1305,7 @@ shipped with PyWin32. It is an embeddable IDE with a built-in debugger. .. seealso:: - `Win32 How Do I...? `_ + `Win32 How Do I...? `_ by Tim Golden `Python and COM `_ diff --git a/Doc/whatsnew/2.1.rst b/Doc/whatsnew/2.1.rst index b4002f06e92..f23f27c994d 100644 --- a/Doc/whatsnew/2.1.rst +++ b/Doc/whatsnew/2.1.rst @@ -443,8 +443,8 @@ Python syntax:: f.grammar = "A ::= B (C D)*" The dictionary containing attributes can be accessed as the function's -:attr:`~object.__dict__`. Unlike the :attr:`~object.__dict__` attribute of class instances, in -functions you can actually assign a new dictionary to :attr:`~object.__dict__`, though +:attr:`~function.__dict__`. Unlike the :attr:`~type.__dict__` attribute of class instances, in +functions you can actually assign a new dictionary to :attr:`~function.__dict__`, though the new value is restricted to a regular Python dictionary; you *can't* be tricky and set it to a :class:`!UserDict` instance, or any other random object that behaves like a mapping. @@ -644,9 +644,9 @@ New and Improved Modules lists the function arguments and the local variables for each frame. * Various functions in the :mod:`time` module, such as :func:`~time.asctime` and - :func:`~time.localtime`, require a floating point argument containing the time in + :func:`~time.localtime`, require a floating-point argument containing the time in seconds since the epoch. The most common use of these functions is to work with - the current time, so the floating point argument has been made optional; when a + the current time, so the floating-point argument has been made optional; when a value isn't provided, the current time will be used. For example, log file entries usually need a string containing the current time; in Python 2.1, ``time.asctime()`` can be used, instead of the lengthier diff --git a/Doc/whatsnew/2.2.rst b/Doc/whatsnew/2.2.rst index d4dbe0570fb..856be5ecfa5 100644 --- a/Doc/whatsnew/2.2.rst +++ b/Doc/whatsnew/2.2.rst @@ -171,7 +171,7 @@ attributes of their own: * :attr:`~definition.__name__` is the attribute's name. -* :attr:`!__doc__` is the attribute's docstring. +* :attr:`~definition.__doc__` is the attribute's docstring. * ``__get__(object)`` is a method that retrieves the attribute value from *object*. @@ -186,7 +186,8 @@ are:: descriptor = obj.__class__.x descriptor.__get__(obj) -For methods, :meth:`!descriptor.__get__` returns a temporary object that's +For methods, :meth:`descriptor.__get__ ` returns a temporary +object that's callable, and wraps up the instance and the method to be called on it. This is also why static methods and class methods are now possible; they have descriptors that wrap up just the method, or the method and the class. As a @@ -1249,7 +1250,7 @@ Some of the more notable changes are: * The :func:`pow` built-in function no longer supports 3 arguments when floating-point numbers are supplied. ``pow(x, y, z)`` returns ``(x**y) % z``, - but this is never useful for floating point numbers, and the final result varies + but this is never useful for floating-point numbers, and the final result varies unpredictably depending on the platform. A call such as ``pow(2.0, 8.0, 7.0)`` will now raise a :exc:`TypeError` exception. diff --git a/Doc/whatsnew/2.3.rst b/Doc/whatsnew/2.3.rst index 8adf36e316c..ac463f82cfb 100644 --- a/Doc/whatsnew/2.3.rst +++ b/Doc/whatsnew/2.3.rst @@ -1113,10 +1113,10 @@ Here are all of the changes that Python 2.3 makes to the core Python language. * One of the noted incompatibilities between old- and new-style classes has been - removed: you can now assign to the :attr:`~definition.__name__` and :attr:`~class.__bases__` + removed: you can now assign to the :attr:`~type.__name__` and :attr:`~type.__bases__` attributes of new-style classes. There are some restrictions on what can be - assigned to :attr:`~class.__bases__` along the lines of those relating to assigning to - an instance's :attr:`~instance.__class__` attribute. + assigned to :attr:`!__bases__` along the lines of those relating to assigning to + an instance's :attr:`~object.__class__` attribute. .. ====================================================================== @@ -1382,7 +1382,7 @@ complete list of changes, or look through the CVS logs for all the details. In Python 2.4, the default will change to always returning floats. Application developers should enable this feature only if all their libraries - work properly when confronted with floating point time stamps, or if they use + work properly when confronted with floating-point time stamps, or if they use the tuple API. If used, the feature should be activated on an application level instead of trying to enable it on a per-use basis. @@ -1925,8 +1925,8 @@ Changes to Python's build process and to the C API include: dependence on a system version or local installation of Expat. * If you dynamically allocate type objects in your extension, you should be - aware of a change in the rules relating to the :attr:`!__module__` and - :attr:`~definition.__name__` attributes. In summary, you will want to ensure the type's + aware of a change in the rules relating to the :attr:`~type.__module__` and + :attr:`~type.__name__` attributes. In summary, you will want to ensure the type's dictionary contains a ``'__module__'`` key; making the module name the part of the type name leading up to the final period will no longer have the desired effect. For more detail, read the API reference documentation or the source. diff --git a/Doc/whatsnew/2.4.rst b/Doc/whatsnew/2.4.rst index 7e235d4370e..7628cfefe0e 100644 --- a/Doc/whatsnew/2.4.rst +++ b/Doc/whatsnew/2.4.rst @@ -684,11 +684,11 @@ includes a quick-start tutorial and a reference. Written by Facundo Batista and implemented by Facundo Batista, Eric Price, Raymond Hettinger, Aahz, and Tim Peters. - http://www.lahey.com/float.htm + `http://www.lahey.com/float.htm `__ The article uses Fortran code to illustrate many of the problems that floating-point inaccuracy can cause. - http://speleotrove.com/decimal/ + https://speleotrove.com/decimal/ A description of a decimal-based representation. This representation is being proposed as a standard, and underlies the new Python decimal type. Much of this material was written by Mike Cowlishaw, designer of the Rexx language. @@ -757,7 +757,7 @@ API that perform ASCII-only conversions, ignoring the locale setting: :c:expr:`double` to an ASCII string. The code for these functions came from the GLib library -(https://developer-old.gnome.org/glib/2.26/), whose developers kindly +(`https://developer-old.gnome.org/glib/2.26/ `__), whose developers kindly relicensed the relevant functions and donated them to the Python Software Foundation. The :mod:`locale` module can now change the numeric locale, letting extensions such as GTK+ produce the correct results. diff --git a/Doc/whatsnew/2.6.rst b/Doc/whatsnew/2.6.rst index fc2de712485..3c9c2049b89 100644 --- a/Doc/whatsnew/2.6.rst +++ b/Doc/whatsnew/2.6.rst @@ -1453,7 +1453,7 @@ that will be the numerator and denominator of the resulting fraction. :: Fraction(5, 3) For converting floating-point numbers to rationals, -the float type now has an :meth:`as_integer_ratio()` method that returns +the float type now has an :meth:`as_integer_ratio` method that returns the numerator and denominator for a fraction that evaluates to the same floating-point value:: @@ -2273,7 +2273,7 @@ changes, or look through the Subversion logs for all the details. (Contributed by Guido van Rossum from work for Google App Engine; :issue:`3487`.) -* The :mod:`rlcompleter` module's :meth:`Completer.complete()` method +* The :mod:`rlcompleter` module's :meth:`Completer.complete` method will now ignore exceptions triggered while evaluating a name. (Fixed by Lorenz Quack; :issue:`2250`.) @@ -2566,7 +2566,7 @@ changes, or look through the Subversion logs for all the details. :meth:`tracer`, and :meth:`speed` methods. * The ability to set new shapes for the turtle, and to define a new coordinate system. - * Turtles now have an :meth:`undo()` method that can roll back actions. + * Turtles now have an :meth:`undo` method that can roll back actions. * Simple support for reacting to input events such as mouse and keyboard activity, making it possible to write simple games. * A :file:`turtle.cfg` file can be used to customize the starting appearance @@ -3051,7 +3051,7 @@ Changes to Python's build process and to the C API include: * Several functions return information about the platform's floating-point support. :c:func:`PyFloat_GetMax` returns - the maximum representable floating point value, + the maximum representable floating-point value, and :c:func:`PyFloat_GetMin` returns the minimum positive value. :c:func:`PyFloat_GetInfo` returns an object containing more information from the :file:`float.h` file, such as diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst index c45f0887b41..0e4dee0bd24 100644 --- a/Doc/whatsnew/2.7.rst +++ b/Doc/whatsnew/2.7.rst @@ -291,7 +291,7 @@ modules. configuration files can now be read, modified, and then written back in their original order. -* The :meth:`~collections.somenamedtuple._asdict()` method for +* The :meth:`~collections.somenamedtuple._asdict` method for :func:`collections.namedtuple` now returns an ordered dictionary with the values appearing in the same order as the underlying tuple indices. @@ -1198,7 +1198,7 @@ changes, or look through the Subversion logs for all the details. of the operands. Previously such comparisons would fall back to Python's default rules for comparing objects, which produced arbitrary results based on their type. Note that you still cannot combine - :class:`!Decimal` and floating-point in other operations such as addition, + :class:`!Decimal` and floating point in other operations such as addition, since you should be explicitly choosing how to convert between float and :class:`!Decimal`. (Fixed by Mark Dickinson; :issue:`2531`.) @@ -1548,7 +1548,7 @@ changes, or look through the Subversion logs for all the details. *ciphers* argument that's a string listing the encryption algorithms to be allowed; the format of the string is described `in the OpenSSL documentation - `__. + `__. (Added by Antoine Pitrou; :issue:`8322`.) Another change makes the extension load all of OpenSSL's ciphers and @@ -2680,14 +2680,12 @@ automatic ``PATH`` modifications to have ``pip`` available from the command line by default, otherwise it can still be accessed through the Python launcher for Windows as ``py -m pip``. -As `discussed in the PEP`__, platform packagers may choose not to install +As :pep:`discussed in the PEP <0477#disabling-ensurepip-by-downstream-distributors>`, +platform packagers may choose not to install these commands by default, as long as, when invoked, they provide clear and simple directions on how to install them on that platform (usually using the system package manager). -__ https://peps.python.org/pep-0477/#disabling-ensurepip-by-downstream-distributors - - Documentation Changes ~~~~~~~~~~~~~~~~~~~~~ diff --git a/Doc/whatsnew/3.1.rst b/Doc/whatsnew/3.1.rst index 69b273e5838..b9606beb5f9 100644 --- a/Doc/whatsnew/3.1.rst +++ b/Doc/whatsnew/3.1.rst @@ -205,9 +205,9 @@ Some smaller changes made to the core Python language are: (Contributed by Mark Dickinson; :issue:`4707`.) -* Python now uses David Gay's algorithm for finding the shortest floating - point representation that doesn't change its value. This should help - mitigate some of the confusion surrounding binary floating point +* Python now uses David Gay's algorithm for finding the shortest floating-point + representation that doesn't change its value. This should help + mitigate some of the confusion surrounding binary floating-point numbers. The significance is easily seen with a number like ``1.1`` which does not @@ -215,7 +215,7 @@ Some smaller changes made to the core Python language are: equivalent, an expression like ``float('1.1')`` evaluates to the nearest representable value which is ``0x1.199999999999ap+0`` in hex or ``1.100000000000000088817841970012523233890533447265625`` in decimal. That - nearest value was and still is used in subsequent floating point + nearest value was and still is used in subsequent floating-point calculations. What is new is how the number gets displayed. Formerly, Python used a @@ -224,7 +224,7 @@ Some smaller changes made to the core Python language are: using 17 digits was that it relied on IEEE-754 guarantees to assure that ``eval(repr(1.1))`` would round-trip exactly to its original value. The disadvantage is that many people found the output to be confusing (mistaking - intrinsic limitations of binary floating point representation as being a + intrinsic limitations of binary floating-point representation as being a problem with Python itself). The new algorithm for ``repr(1.1)`` is smarter and returns ``'1.1'``. @@ -236,8 +236,8 @@ Some smaller changes made to the core Python language are: it does not change the underlying values. So, it is still the case that ``1.1 + 2.2 != 3.3`` even though the representations may suggest otherwise. - The new algorithm depends on certain features in the underlying floating - point implementation. If the required features are not found, the old + The new algorithm depends on certain features in the underlying floating-point + implementation. If the required features are not found, the old algorithm will continue to be used. Also, the text pickle protocols assure cross-platform portability by using the old algorithm. @@ -550,7 +550,7 @@ Porting to Python 3.1 This section lists previously described changes and other bugfixes that may require changes to your code: -* The new floating point string representations can break existing doctests. +* The new floating-point string representations can break existing doctests. For example:: def e(): diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index b939ccd1790..e4699fbf8ed 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -1233,7 +1233,7 @@ also now un-stringize stringized annotations. itertools --------- -Add :func:`itertools.pairwise()`. +Add :func:`itertools.pairwise`. (Contributed by Raymond Hettinger in :issue:`38200`.) linecache @@ -1245,14 +1245,14 @@ When a module does not define ``__loader__``, fall back to ``__spec__.loader``. os -- -Add :func:`os.cpu_count()` support for VxWorks RTOS. +Add :func:`os.cpu_count` support for VxWorks RTOS. (Contributed by Peixing Xin in :issue:`41440`.) Add a new function :func:`os.eventfd` and related helpers to wrap the ``eventfd2`` syscall on Linux. (Contributed by Christian Heimes in :issue:`41001`.) -Add :func:`os.splice()` that allows to move data between two file +Add :func:`os.splice` that allows to move data between two file descriptors without copying between kernel address space and user address space, where one of the file descriptors must refer to a pipe. (Contributed by Pablo Galindo in :issue:`41625`.) @@ -1292,7 +1292,7 @@ functions in the :mod:`os` module. platform -------- -Add :func:`platform.freedesktop_os_release()` to retrieve operation system +Add :func:`platform.freedesktop_os_release` to retrieve operation system identification from `freedesktop.org os-release `_ standard file. (Contributed by Christian Heimes in :issue:`28468`.) diff --git a/Doc/whatsnew/3.11.rst b/Doc/whatsnew/3.11.rst index b601bd453f5..d59f24406c9 100644 --- a/Doc/whatsnew/3.11.rst +++ b/Doc/whatsnew/3.11.rst @@ -2032,8 +2032,8 @@ Removed C APIs are :ref:`listed separately `. It was introduced in Python 3.4 but has been broken since Python 3.7. (Contributed by Inada Naoki in :issue:`23882`.) -* Removed the undocumented private :meth:`!float.__set_format__()` method, - previously known as :meth:`!float.__setformat__()` in Python 3.7. +* Removed the undocumented private :meth:`!float.__set_format__` method, + previously known as :meth:`!float.__setformat__` in Python 3.7. Its docstring said: "You probably don't want to use this function. It exists mainly to be used in Python's test suite." (Contributed by Victor Stinner in :issue:`46852`.) @@ -2138,7 +2138,7 @@ Build Changes :issue:`45440` and :issue:`46640`.) * Support for `IEEE 754 `_ - floating point numbers. + floating-point numbers. (Contributed by Victor Stinner in :issue:`46917`.) * The :c:macro:`!Py_NO_NAN` macro has been removed. diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index 8e2c85a5821..0799c354dde 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -59,7 +59,7 @@ Summary -- Release highlights .. This section singles out the most important changes in Python 3.12. Brevity is key. -Python 3.12 is the latest stable release of the Python programming language, +Python 3.12 is a stable release of the Python programming language, with a mix of changes to the language and the standard library. The library changes focus on cleaning up deprecated APIs, usability, and correctness. Of note, the :mod:`!distutils` package has been removed from the standard library. @@ -154,7 +154,7 @@ Important deprecations, removals or restrictions: reducing the size of every :class:`str` object by at least 8 bytes. * :pep:`632`: Remove the :mod:`!distutils` package. - See `the migration guide `_ + See :pep:`the migration guide <0632#migration-advice>` for advice replacing the APIs it provided. The third-party `Setuptools `__ package continues to provide :mod:`!distutils`, @@ -359,7 +359,7 @@ create an interpreter with its own GIL: /* The new interpreter is now active in the current thread. */ For further examples how to use the C-API for sub-interpreters with a -per-interpreter GIL, see :source:`Modules/_xxsubinterpretersmodule.c`. +per-interpreter GIL, see ``Modules/_xxsubinterpretersmodule.c``. (Contributed by Eric Snow in :gh:`104210`, etc.) @@ -1254,7 +1254,7 @@ Deprecated We added the warning to raise awareness as issues encountered by code doing this are becoming more frequent. See the :func:`os.fork` documentation for more details along with `this discussion on fork being incompatible with threads - `_ for *why* we're now surfacing this + `_ for *why* we're now surfacing this longstanding platform compatibility problem to developers. When this warning appears due to usage of :mod:`multiprocessing` or @@ -1319,7 +1319,7 @@ Deprecated (Contributed by Brett Cannon in :gh:`65961`.) * The bitwise inversion operator (``~``) on bool is deprecated. It will throw an - error in Python 3.14. Use ``not`` for logical negation of bools instead. + error in Python 3.16. Use ``not`` for logical negation of bools instead. In the rare case that you really need the bitwise inversion of the underlying ``int``, convert to int explicitly: ``~int(x)``. (Contributed by Tim Hoffmann in :gh:`103487`.) @@ -1330,155 +1330,15 @@ Deprecated therefore it will be removed in 3.14. (Contributed by Nikita Sobolev in :gh:`101866`.) -Pending Removal in Python 3.13 ------------------------------- - -The following modules and APIs have been deprecated in earlier Python releases, -and will be removed in Python 3.13. - -Modules (see :pep:`594`): - -* :mod:`!aifc` -* :mod:`!audioop` -* :mod:`!cgi` -* :mod:`!cgitb` -* :mod:`!chunk` -* :mod:`!crypt` -* :mod:`!imghdr` -* :mod:`!mailcap` -* :mod:`!msilib` -* :mod:`!nis` -* :mod:`!nntplib` -* :mod:`!ossaudiodev` -* :mod:`!pipes` -* :mod:`!sndhdr` -* :mod:`!spwd` -* :mod:`!sunau` -* :mod:`!telnetlib` -* :mod:`!uu` -* :mod:`!xdrlib` - -Other modules: - -* :mod:`!lib2to3`, and the :program:`2to3` program (:gh:`84540`) - -APIs: - -* :class:`!configparser.LegacyInterpolation` (:gh:`90765`) -* ``locale.resetlocale()`` (:gh:`90817`) -* :meth:`!turtle.RawTurtle.settiltangle` (:gh:`50096`) -* :func:`!unittest.findTestCases` (:gh:`50096`) -* :func:`!unittest.getTestCaseNames` (:gh:`50096`) -* :func:`!unittest.makeSuite` (:gh:`50096`) -* :meth:`!unittest.TestProgram.usageExit` (:gh:`67048`) -* :class:`!webbrowser.MacOSX` (:gh:`86421`) -* :class:`classmethod` descriptor chaining (:gh:`89519`) -* :mod:`importlib.resources` deprecated methods: - - * ``contents()`` - * ``is_resource()`` - * ``open_binary()`` - * ``open_text()`` - * ``path()`` - * ``read_binary()`` - * ``read_text()`` - - Use :func:`importlib.resources.files()` instead. Refer to `importlib-resources: Migrating from Legacy - `_ (:gh:`106531`) - -Pending Removal in Python 3.14 ------------------------------- - -The following APIs have been deprecated -and will be removed in Python 3.14. - -* :mod:`argparse`: The *type*, *choices*, and *metavar* parameters - of :class:`!argparse.BooleanOptionalAction` - -* :mod:`ast`: - - * :class:`!ast.Num` - * :class:`!ast.Str` - * :class:`!ast.Bytes` - * :class:`!ast.NameConstant` - * :class:`!ast.Ellipsis` - -* :mod:`asyncio`: - - * :class:`!asyncio.MultiLoopChildWatcher` - * :class:`!asyncio.FastChildWatcher` - * :class:`!asyncio.AbstractChildWatcher` - * :class:`!asyncio.SafeChildWatcher` - * :func:`!asyncio.set_child_watcher` - * :func:`!asyncio.get_child_watcher`, - * :meth:`!asyncio.AbstractEventLoopPolicy.set_child_watcher` - * :meth:`!asyncio.AbstractEventLoopPolicy.get_child_watcher` - -* :mod:`collections.abc`: :class:`!collections.abc.ByteString`. - -* :mod:`email`: the *isdst* parameter in :func:`email.utils.localtime`. - -* :mod:`importlib.abc`: - - * :class:`!importlib.abc.ResourceReader` - * :class:`!importlib.abc.Traversable` - * :class:`!importlib.abc.TraversableResources` - -* :mod:`itertools`: Support for copy, deepcopy, and pickle operations. - -* :mod:`pkgutil`: - - * :func:`!pkgutil.find_loader` - * :func:`!pkgutil.get_loader`. - -* :mod:`pty`: - - * :func:`!pty.master_open` - * :func:`!pty.slave_open` - -* :mod:`shutil`: The *onerror* argument of :func:`shutil.rmtree` - -* :mod:`typing`: :class:`!typing.ByteString` - -* The ``__package__`` and ``__cached__`` attributes on module objects. - -* The :attr:`~codeobject.co_lnotab` attribute of code objects. - -Pending Removal in Python 3.15 ------------------------------- +.. include:: ../deprecations/pending-removal-in-3.13.rst -The following APIs have been deprecated -and will be removed in Python 3.15. +.. include:: ../deprecations/pending-removal-in-3.14.rst -APIs: +.. include:: ../deprecations/pending-removal-in-3.15.rst -* :func:`locale.getdefaultlocale` (:gh:`90817`) - - -Pending Removal in Future Versions ----------------------------------- - -The following APIs were deprecated in earlier Python versions and will be removed, -although there is currently no date scheduled for their removal. - -* :mod:`array`'s ``'u'`` format code (:gh:`57281`) - -* :class:`typing.Text` (:gh:`92332`) - -* :mod:`xml.etree.ElementTree`: Testing the truth value of an - :class:`xml.etree.ElementTree.Element` is deprecated. In a future release it - will always return True. Prefer explicit ``len(elem)`` or - ``elem is not None`` tests instead. - -* Currently Python accepts numeric literals immediately followed by keywords, - for example ``0in x``, ``1or x``, ``0if 1else 2``. It allows confusing - and ambiguous expressions like ``[0x1for x in y]`` (which can be - interpreted as ``[0x1 for x in y]`` or ``[0x1f or x in y]``). - A syntax warning is raised if the numeric literal is - immediately followed by one of keywords :keyword:`and`, :keyword:`else`, - :keyword:`for`, :keyword:`if`, :keyword:`in`, :keyword:`is` and :keyword:`or`. - In a future release it will be changed to a syntax error. (:gh:`87999`) +.. include:: ../deprecations/pending-removal-in-3.16.rst +.. include:: ../deprecations/pending-removal-in-future.rst Removed ======= @@ -1562,9 +1422,9 @@ hashlib ------- * Remove the pure Python implementation of :mod:`hashlib`'s - :func:`hashlib.pbkdf2_hmac()`, deprecated in Python 3.10. Python 3.10 and + :func:`hashlib.pbkdf2_hmac`, deprecated in Python 3.10. Python 3.10 and newer requires OpenSSL 1.1.1 (:pep:`644`): this OpenSSL version provides - a C implementation of :func:`~hashlib.pbkdf2_hmac()` which is faster. + a C implementation of :func:`~hashlib.pbkdf2_hmac` which is faster. (Contributed by Victor Stinner in :gh:`94199`.) importlib @@ -1573,7 +1433,7 @@ importlib * Many previously deprecated cleanups in :mod:`importlib` have now been completed: - * References to, and support for :meth:`!module_repr()` has been removed. + * References to, and support for :meth:`!module_repr` has been removed. (Contributed by Barry Warsaw in :gh:`97850`.) * ``importlib.util.set_package``, ``importlib.util.set_loader`` and @@ -2027,7 +1887,7 @@ New Features The :c:macro:`Py_TPFLAGS_MANAGED_DICT` and :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` flags have been added. This allows extensions classes to support object - ``__dict__`` and weakrefs with less bookkeeping, + :attr:`~object.__dict__` and weakrefs with less bookkeeping, using less memory and with faster access. * API for performing calls using @@ -2146,7 +2006,7 @@ Porting to Python 3.12 internal-only field directly. To get a list of subclasses, call the Python method - :py:meth:`~class.__subclasses__` (using :c:func:`PyObject_CallMethod`, + :py:meth:`~type.__subclasses__` (using :c:func:`PyObject_CallMethod`, for example). * Add support of more formatting options (left aligning, octals, uppercase @@ -2165,7 +2025,7 @@ Porting to Python 3.12 :c:func:`PyUnicode_FromFormatV`. (Contributed by Philip Georgi in :gh:`95504`.) -* Extension classes wanting to add a ``__dict__`` or weak reference slot +* Extension classes wanting to add a :attr:`~object.__dict__` or weak reference slot should use :c:macro:`Py_TPFLAGS_MANAGED_DICT` and :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` instead of ``tp_dictoffset`` and ``tp_weaklistoffset``, respectively. @@ -2350,92 +2210,13 @@ Deprecated overrides :c:member:`~PyTypeObject.tp_new` is deprecated. Call the metaclass instead. -Pending Removal in Python 3.14 -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -* The ``ma_version_tag`` field in :c:type:`PyDictObject` for extension modules - (:pep:`699`; :gh:`101193`). - -* Global configuration variables: +.. Add deprecations above alphabetically, not here at the end. - * :c:var:`Py_DebugFlag`: use :c:member:`PyConfig.parser_debug` - * :c:var:`Py_VerboseFlag`: use :c:member:`PyConfig.verbose` - * :c:var:`Py_QuietFlag`: use :c:member:`PyConfig.quiet` - * :c:var:`Py_InteractiveFlag`: use :c:member:`PyConfig.interactive` - * :c:var:`Py_InspectFlag`: use :c:member:`PyConfig.inspect` - * :c:var:`Py_OptimizeFlag`: use :c:member:`PyConfig.optimization_level` - * :c:var:`Py_NoSiteFlag`: use :c:member:`PyConfig.site_import` - * :c:var:`Py_BytesWarningFlag`: use :c:member:`PyConfig.bytes_warning` - * :c:var:`Py_FrozenFlag`: use :c:member:`PyConfig.pathconfig_warnings` - * :c:var:`Py_IgnoreEnvironmentFlag`: use :c:member:`PyConfig.use_environment` - * :c:var:`Py_DontWriteBytecodeFlag`: use :c:member:`PyConfig.write_bytecode` - * :c:var:`Py_NoUserSiteDirectory`: use :c:member:`PyConfig.user_site_directory` - * :c:var:`Py_UnbufferedStdioFlag`: use :c:member:`PyConfig.buffered_stdio` - * :c:var:`Py_HashRandomizationFlag`: use :c:member:`PyConfig.use_hash_seed` - and :c:member:`PyConfig.hash_seed` - * :c:var:`Py_IsolatedFlag`: use :c:member:`PyConfig.isolated` - * :c:var:`Py_LegacyWindowsFSEncodingFlag`: use :c:member:`PyPreConfig.legacy_windows_fs_encoding` - * :c:var:`Py_LegacyWindowsStdioFlag`: use :c:member:`PyConfig.legacy_windows_stdio` - * :c:var:`!Py_FileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` - * :c:var:`!Py_HasFileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` - * :c:var:`!Py_FileSystemDefaultEncodeErrors`: use :c:member:`PyConfig.filesystem_errors` - * :c:var:`!Py_UTF8Mode`: use :c:member:`PyPreConfig.utf8_mode` (see :c:func:`Py_PreInitialize`) +.. include:: ../deprecations/c-api-pending-removal-in-3.14.rst - The :c:func:`Py_InitializeFromConfig` API should be used with - :c:type:`PyConfig` instead. +.. include:: ../deprecations/c-api-pending-removal-in-3.15.rst -* Creating :c:data:`immutable types ` with mutable - bases (:gh:`95388`). - -Pending Removal in Python 3.15 -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -* :c:func:`PyImport_ImportModuleNoBlock`: use :c:func:`PyImport_ImportModule` -* :c:type:`!Py_UNICODE_WIDE` type: use :c:type:`wchar_t` -* :c:type:`Py_UNICODE` type: use :c:type:`wchar_t` -* Python initialization functions: - - * :c:func:`PySys_ResetWarnOptions`: clear :data:`sys.warnoptions` and - :data:`!warnings.filters` - * :c:func:`Py_GetExecPrefix`: get :data:`sys.exec_prefix` - * :c:func:`Py_GetPath`: get :data:`sys.path` - * :c:func:`Py_GetPrefix`: get :data:`sys.prefix` - * :c:func:`Py_GetProgramFullPath`: get :data:`sys.executable` - * :c:func:`Py_GetProgramName`: get :data:`sys.executable` - * :c:func:`Py_GetPythonHome`: get :c:member:`PyConfig.home` or - the :envvar:`PYTHONHOME` environment variable - -Pending Removal in Future Versions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The following APIs are deprecated and will be removed, -although there is currently no date scheduled for their removal. - -* :c:macro:`Py_TPFLAGS_HAVE_FINALIZE`: unneeded since Python 3.8 -* :c:func:`PyErr_Fetch`: use :c:func:`PyErr_GetRaisedException` -* :c:func:`PyErr_NormalizeException`: use :c:func:`PyErr_GetRaisedException` -* :c:func:`PyErr_Restore`: use :c:func:`PyErr_SetRaisedException` -* :c:func:`PyModule_GetFilename`: use :c:func:`PyModule_GetFilenameObject` -* :c:func:`PyOS_AfterFork`: use :c:func:`PyOS_AfterFork_Child` -* :c:func:`PySlice_GetIndicesEx`: use :c:func:`PySlice_Unpack` and :c:func:`PySlice_AdjustIndices` -* :c:func:`!PyUnicode_AsDecodedObject`: use :c:func:`PyCodec_Decode` -* :c:func:`!PyUnicode_AsDecodedUnicode`: use :c:func:`PyCodec_Decode` -* :c:func:`!PyUnicode_AsEncodedObject`: use :c:func:`PyCodec_Encode` -* :c:func:`!PyUnicode_AsEncodedUnicode`: use :c:func:`PyCodec_Encode` -* :c:func:`PyUnicode_READY`: unneeded since Python 3.12 -* :c:func:`!PyErr_Display`: use :c:func:`PyErr_DisplayException` -* :c:func:`!_PyErr_ChainExceptions`: use ``_PyErr_ChainExceptions1`` -* :c:member:`!PyBytesObject.ob_shash` member: - call :c:func:`PyObject_Hash` instead -* :c:member:`!PyDictObject.ma_version_tag` member -* Thread Local Storage (TLS) API: - - * :c:func:`PyThread_create_key`: use :c:func:`PyThread_tss_alloc` - * :c:func:`PyThread_delete_key`: use :c:func:`PyThread_tss_free` - * :c:func:`PyThread_set_key_value`: use :c:func:`PyThread_tss_set` - * :c:func:`PyThread_get_key_value`: use :c:func:`PyThread_tss_get` - * :c:func:`PyThread_delete_key_value`: use :c:func:`PyThread_tss_delete` - * :c:func:`PyThread_ReInitTLS`: unneeded since Python 3.7 +.. include:: ../deprecations/c-api-pending-removal-in-future.rst Removed ------- diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index 992d39e3152..a47d5e077a3 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -3,7 +3,7 @@ What's New In Python 3.13 **************************** -:Editor: Thomas Wouters +:Editors: Adam Turner and Thomas Wouters .. Rules for maintenance: @@ -46,19 +46,13 @@ when researching a change. This article explains the new features in Python 3.13, compared to 3.12. - +Python 3.13 was released on October 7, 2024. For full details, see the :ref:`changelog `. .. seealso:: :pep:`719` -- Python 3.13 Release Schedule -.. note:: - - Prerelease users should be aware that this document is currently in draft - form. It will be updated substantially as Python 3.13 moves towards release, - so it's worth checking back even after reading earlier versions. - Summary -- Release Highlights ============================= @@ -66,14 +60,38 @@ Summary -- Release Highlights .. This section singles out the most important changes in Python 3.13. Brevity is key. -Python 3.13 beta is the pre-release of the next version of the Python -programming language, with a mix of changes to the language, the -implementation and the standard library. The biggest changes to the -implementation include a new interactive interpreter, and experimental -support for dropping the Global Interpreter Lock (:pep:`703`) and a -Just-In-Time compiler (:pep:`744`). The library changes contain removal of -deprecated APIs and modules, as well as the usual improvements in -user-friendliness and correctness. +Python 3.13 is the latest stable release of the Python programming +language, with a mix of changes to the language, the implementation +and the standard library. +The biggest changes include a new `interactive interpreter +`_, +experimental support for running in a `free-threaded mode +`_ (:pep:`703`), +and a `Just-In-Time compiler `_ (:pep:`744`). + +Error messages continue to improve, with tracebacks now highlighted in color +by default. The :func:`locals` builtin now has :ref:`defined semantics +` for changing the returned mapping, +and type parameters now support default values. + +The library changes contain removal of deprecated APIs and modules, +as well as the usual improvements in user-friendliness and correctness. +Several legacy standard library modules have now `been removed +`_ following their deprecation in Python 3.11 (:pep:`594`). + +This article doesn't attempt to provide a complete specification +of all new features, but instead gives a convenient overview. +For full details refer to the documentation, +such as the :ref:`Library Reference ` +and :ref:`Language Reference `. +To understand the complete implementation and design rationale for a change, +refer to the PEP for a particular new feature; +but note that PEPs usually are not kept up-to-date +once a feature has been fully implemented. +See `Porting to Python 3.13`_ for guidance on upgrading from +earlier versions of Python. + +-------------- .. PEP-sized items next. @@ -82,118 +100,160 @@ Interpreter improvements: * A greatly improved :ref:`interactive interpreter ` and :ref:`improved error messages `. - -* Color support in the new :ref:`interactive interpreter - `, - as well as in :ref:`tracebacks ` - and :ref:`doctest ` output. This can be disabled through the - :envvar:`PYTHON_COLORS` and |NO_COLOR|_ environment variables. - -* :pep:`744`: A basic :ref:`JIT compiler ` was added. - It is currently disabled by default (though we may turn it on later). - Performance improvements are modest -- we expect to be improving this - over the next few releases. - * :pep:`667`: The :func:`locals` builtin now has :ref:`defined semantics ` when mutating the returned mapping. Python debuggers and similar tools may now more reliably update local variables in optimized scopes even during concurrent code execution. +* :pep:`703`: CPython 3.13 has experimental support for running with the + :term:`global interpreter lock` disabled. See :ref:`Free-threaded CPython + ` for more details. +* :pep:`744`: A basic :ref:`JIT compiler ` was added. + It is currently disabled by default (though we may turn it on later). + Performance improvements are modest -- we expect to improve this + over the next few releases. +* Color support in the new :ref:`interactive interpreter + `, + as well as in :ref:`tracebacks ` + and :ref:`doctest ` output. + This can be disabled through the :envvar:`PYTHON_COLORS` and |NO_COLOR|_ + environment variables. + +Python data model improvements: + +* :attr:`~type.__static_attributes__` stores the names of attributes accessed + through ``self.X`` in any function in a class body. +* :attr:`~type.__firstlineno__` records the first line number of a class + definition. + +Significant improvements in the standard library: + +* Add a new :exc:`PythonFinalizationError` exception, raised when an operation + is blocked during :term:`finalization `. +* The :mod:`argparse` module now supports deprecating command-line options, + positional arguments, and subcommands. +* The new functions :func:`base64.z85encode` and :func:`base64.z85decode` + support encoding and decoding `Z85 data`_. +* The :mod:`copy` module now has a :func:`copy.replace` function, + with support for many builtin types and any class defining + the :func:`~object.__replace__` method. +* The new :mod:`dbm.sqlite3` module is now the default :mod:`dbm` backend. +* The :mod:`os` module has a :ref:`suite of new functions ` + for working with Linux's timer notification file descriptors. +* The :mod:`random` module now has a :ref:`command-line interface `. + +Security improvements: + +* :func:`ssl.create_default_context` sets :data:`ssl.VERIFY_X509_PARTIAL_CHAIN` + and :data:`ssl.VERIFY_X509_STRICT` as default flags. + +C API improvements: + +* The :c:data:`Py_mod_gil` slot is now used to indicate that + an extension module supports running with the :term:`GIL` disabled. +* The :doc:`PyTime C API ` has been added, + providing access to system clocks. +* :c:type:`PyMutex` is a new lightweight mutex that occupies a single byte. +* There is a new :ref:`suite of functions ` + for generating :pep:`669` monitoring events in the C API. New typing features: * :pep:`696`: Type parameters (:data:`typing.TypeVar`, :data:`typing.ParamSpec`, and :data:`typing.TypeVarTuple`) now support defaults. - -* :pep:`702`: Support for marking deprecations in the type system using the - new :func:`warnings.deprecated` decorator. - -* :pep:`742`: :data:`typing.TypeIs` was added, providing more intuitive - type narrowing behavior. - -* :pep:`705`: :data:`typing.ReadOnly` was added, to mark an item of a +* :pep:`702`: The new :func:`warnings.deprecated` decorator adds support + for marking deprecations in the type system and at runtime. +* :pep:`705`: :data:`typing.ReadOnly` can be used to mark an item of a :class:`typing.TypedDict` as read-only for type checkers. - -Free-threading: - -* :pep:`703`: CPython 3.13 has experimental support for running with the - :term:`global interpreter lock` disabled when built with ``--disable-gil``. - See :ref:`Free-threaded CPython ` for more details. +* :pep:`742`: :data:`typing.TypeIs` provides more intuitive + type narrowing behavior, as an alternative to :data:`typing.TypeGuard`. Platform support: -* :pep:`730`: Apple's iOS is now an officially supported platform. Official - Android support (:pep:`738`) is in the works as well. +* :pep:`730`: Apple's iOS is now an :ref:`officially supported platform + `, at :pep:`tier 3 <11#tier-3>`. +* :pep:`738`: Android is now an :ref:`officially supported platform + `, at :pep:`tier 3 <11#tier-3>`. +* ``wasm32-wasi`` is now supported as a :pep:`tier 2 <11#tier-2>` platform. +* ``wasm32-emscripten`` is no longer an officially supported platform. -Removed modules: +Important removals: * :ref:`PEP 594 `: The remaining 19 "dead batteries" - have been removed from the standard library: + (legacy stdlib modules) have been removed from the standard library: :mod:`!aifc`, :mod:`!audioop`, :mod:`!cgi`, :mod:`!cgitb`, :mod:`!chunk`, :mod:`!crypt`, :mod:`!imghdr`, :mod:`!mailcap`, :mod:`!msilib`, :mod:`!nis`, - :mod:`!nntplib`, :mod:`!ossaudiodev`, :mod:`!pipes`, :mod:`!sndhdr`, :mod:`!spwd`, - :mod:`!sunau`, :mod:`!telnetlib`, :mod:`!uu` and :mod:`!xdrlib`. - -* Also removed were the :mod:`!tkinter.tix` and :mod:`!lib2to3` modules, and the - ``2to3`` program. + :mod:`!nntplib`, :mod:`!ossaudiodev`, :mod:`!pipes`, :mod:`!sndhdr`, + :mod:`!spwd`, :mod:`!sunau`, :mod:`!telnetlib`, :mod:`!uu` and :mod:`!xdrlib`. +* Remove the :program:`2to3` tool and :mod:`!lib2to3` module + (deprecated in Python 3.11). +* Remove the :mod:`!tkinter.tix` module (deprecated in Python 3.6). +* Remove the :func:`!locale.resetlocale` function. +* Remove the :mod:`!typing.io` and :mod:`!typing.re` namespaces. +* Remove chained :class:`classmethod` descriptors. Release schedule changes: -* :pep:`602` ("Annual Release Cycle for Python") has been updated: +:pep:`602` ("Annual Release Cycle for Python") has been updated +to extend the full support ('bugfix') period for new releases to two years. +This updated policy means that: + +* Python 3.9--3.12 have one and a half years of full support, + followed by three and a half years of security fixes. +* Python 3.13 and later have two years of full support, + followed by three years of security fixes. - * Python 3.9 - 3.12 have one and a half years of full support, - followed by three and a half years of security fixes. - * Python 3.13 and later have two years of full support, - followed by three years of security fixes. New Features ============ + .. _whatsnew313-better-interactive-interpreter: -A Better Interactive Interpreter +A better interactive interpreter -------------------------------- -On Unix-like systems like Linux or macOS as well as Windows, Python now -uses a new :term:`interactive` shell. When the user starts the -:term:`REPL` from an interactive terminal the interactive shell now -supports the following new features: +Python now uses a new :term:`interactive` shell by default, based on code +from the `PyPy project`_. +When the user starts the :term:`REPL` from an interactive terminal, +the following new features are now supported: -* Colorized prompts. * Multiline editing with history preservation. +* Direct support for REPL-specific commands like :kbd:`help`, :kbd:`exit`, + and :kbd:`quit`, without the need to call them as functions. +* Prompts and tracebacks with :ref:`color enabled by default + `. * Interactive help browsing using :kbd:`F1` with a separate command history. * History browsing using :kbd:`F2` that skips output as well as the :term:`>>>` and :term:`...` prompts. * "Paste mode" with :kbd:`F3` that makes pasting larger blocks of code easier (press :kbd:`F3` again to return to the regular prompt). -* The ability to issue REPL-specific commands like :kbd:`help`, :kbd:`exit`, - and :kbd:`quit` without the need to use call parentheses after the command - name. - -If the new interactive shell is not desired, it can be disabled via -the :envvar:`PYTHON_BASIC_REPL` environment variable. - -The new shell requires :mod:`curses` on Unix-like systems. +To disable the new interactive shell, +set the :envvar:`PYTHON_BASIC_REPL` environment variable. For more on interactive mode, see :ref:`tut-interac`. (Contributed by Pablo Galindo Salgado, Łukasz Langa, and Lysandros Nikolaou in :gh:`111201` based on code from the PyPy project. Windows support contributed by Dino Viehland and Anthony Shaw.) +.. _`PyPy project`: https://pypy.org/ + + .. _whatsnew313-improved-error-messages: -Improved Error Messages +Improved error messages ----------------------- -* The interpreter now colorizes error messages when displaying tracebacks by default. - This feature can be controlled via the new :envvar:`PYTHON_COLORS` environment - variable as well as the canonical |NO_COLOR|_ and |FORCE_COLOR|_ environment - variables. See also :ref:`using-on-controlling-color`. +* The interpreter now uses color by default when displaying tracebacks in the + terminal. This feature :ref:`can be controlled ` + via the new :envvar:`PYTHON_COLORS` environment variable as well as + the canonical |NO_COLOR|_ and |FORCE_COLOR|_ environment variables. (Contributed by Pablo Galindo Salgado in :gh:`112730`.) .. Apparently this how you hack together a formatted link: + (https://www.docutils.org/docs/ref/rst/directives.html#replacement-text) .. |FORCE_COLOR| replace:: ``FORCE_COLOR`` .. _FORCE_COLOR: https://force-color.org/ @@ -205,880 +265,1027 @@ Improved Error Messages standard library module. When this results in errors, we now display a more helpful error message: - .. code-block:: shell-session + .. code-block:: pytb - $ python random.py - Traceback (most recent call last): - File "/home/random.py", line 1, in - import random; print(random.randint(5)) - ^^^^^^^^^^^^^ - File "/home/random.py", line 1, in - import random; print(random.randint(5)) - ^^^^^^^^^^^^^^ - AttributeError: module 'random' has no attribute 'randint' (consider renaming '/home/random.py' since it has the same name as the standard library module named 'random' and the import system gives it precedence) + $ python random.py + Traceback (most recent call last): + File "/home/me/random.py", line 1, in + import random + File "/home/me/random.py", line 3, in + print(random.randint(5)) + ^^^^^^^^^^^^^^ + AttributeError: module 'random' has no attribute 'randint' (consider renaming '/home/me/random.py' since it has the same name as the standard library module named 'random' and the import system gives it precedence) Similarly, if a script has the same name as a third-party - module it attempts to import, and this results in errors, + module that it attempts to import and this results in errors, we also display a more helpful error message: - .. code-block:: shell-session + .. code-block:: pytb - $ python numpy.py - Traceback (most recent call last): - File "/home/numpy.py", line 1, in - import numpy as np; np.array([1,2,3]) - ^^^^^^^^^^^^^^^^^^ - File "/home/numpy.py", line 1, in - import numpy as np; np.array([1,2,3]) - ^^^^^^^^ - AttributeError: module 'numpy' has no attribute 'array' (consider renaming '/home/numpy.py' if it has the same name as a third-party module you intended to import) + $ python numpy.py + Traceback (most recent call last): + File "/home/me/numpy.py", line 1, in + import numpy as np + File "/home/me/numpy.py", line 3, in + np.array([1, 2, 3]) + ^^^^^^^^ + AttributeError: module 'numpy' has no attribute 'array' (consider renaming '/home/me/numpy.py' if it has the same name as a third-party module you intended to import) (Contributed by Shantanu Jain in :gh:`95754`.) -* When an incorrect keyword argument is passed to a function, the error message - now potentially suggests the correct keyword argument. - (Contributed by Pablo Galindo Salgado and Shantanu Jain in :gh:`107944`.) +* The error message now tries to suggest the correct keyword argument + when an incorrect keyword argument is passed to a function. - >>> "better error messages!".split(max_split=1) - Traceback (most recent call last): - File "", line 1, in - "better error messages!".split(max_split=1) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^ - TypeError: split() got an unexpected keyword argument 'max_split'. Did you mean 'maxsplit'? + .. code-block:: pycon -* Classes have a new :attr:`~class.__static_attributes__` attribute, populated by the compiler, - with a tuple of names of attributes of this class which are accessed - through ``self.X`` from any function in its body. (Contributed by Irit Katriel - in :gh:`115775`.) + >>> "Better error messages!".split(max_split=1) + Traceback (most recent call last): + File "", line 1, in + "Better error messages!".split(max_split=1) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^ + TypeError: split() got an unexpected keyword argument 'max_split'. Did you mean 'maxsplit'? -.. _whatsnew313-locals-semantics: + (Contributed by Pablo Galindo Salgado and Shantanu Jain in :gh:`107944`.) -Defined mutation semantics for ``locals()`` -------------------------------------------- -Historically, the expected result of mutating the return value of :func:`locals` -has been left to individual Python implementations to define. - -Through :pep:`667`, Python 3.13 standardises the historical behaviour of CPython -for most code execution scopes, but changes -:term:`optimized scopes ` (functions, generators, coroutines, -comprehensions, and generator expressions) to explicitly return independent -snapshots of the currently assigned local variables, including locally -referenced nonlocal variables captured in closures. - -This change to the semantics of :func:`locals` in optimized scopes also affects the default -behaviour of code execution functions that implicitly target ``locals()`` if no explicit -namespace is provided (such as :func:`exec` and :func:`eval`). In previous versions, whether -or not changes could be accessed by calling ``locals()`` after calling the code execution -function was implementation dependent. In CPython specifically, such code would typically -appear to work as desired, but could sometimes fail in optimized scopes based on other code -(including debuggers and code execution tracing tools) potentially resetting the shared -snapshot in that scope. Now, the code will always run against an independent snapshot of the -local variables in optimized scopes, and hence the changes will never be visible in -subsequent calls to ``locals()``. To access the changes made in these cases, an explicit -namespace reference must now be passed to the relevant function. Alternatively, it may make -sense to update affected code to use a higher level code execution API that returns the -resulting code execution namespace (e.g. :func:`runpy.run_path` when executing Python -files from disk). +.. _whatsnew313-free-threaded-cpython: -To ensure debuggers and similar tools can reliably update local variables in -scopes affected by this change, :attr:`FrameType.f_locals ` now -returns a write-through proxy to the frame's local and locally referenced -nonlocal variables in these scopes, rather than returning an inconsistently -updated shared ``dict`` instance with undefined runtime semantics. +Free-threaded CPython +--------------------- -See :pep:`667` for more details, including related C API changes and deprecations. Porting -notes are also provided below for the affected :ref:`Python APIs ` -and :ref:`C APIs `. +CPython now has experimental support for running in a free-threaded mode, +with the :term:`global interpreter lock` (GIL) disabled. +This is an experimental feature and therefore is not enabled by default. +The free-threaded mode requires a different executable, +usually called ``python3.13t`` or ``python3.13t.exe``. +Pre-built binaries marked as *free-threaded* can be installed as part of +the official :ref:`Windows ` +and :ref:`macOS ` installers, +or CPython can be built from source with the :option:`--disable-gil` option. -(PEP and implementation contributed by Mark Shannon and Tian Gao in -:gh:`74929`. Documentation updates provided by Guido van Rossum and -Alyssa Coghlan.) +Free-threaded execution allows for full utilization of the available +processing power by running threads in parallel on available CPU cores. +While not all software will benefit from this automatically, programs +designed with threading in mind will run faster on multi-core hardware. +**The free-threaded mode is experimental** and work is ongoing to improve it: +expect some bugs and a substantial single-threaded performance hit. +Free-threaded builds of CPython support optionally running with the GIL +enabled at runtime using the environment variable :envvar:`PYTHON_GIL` or +the command-line option :option:`-X gil=1`. -Incremental Garbage Collection ------------------------------- +To check if the current interpreter supports free-threading, :option:`python -VV <-V>` +and :attr:`sys.version` contain "experimental free-threading build". +The new :func:`!sys._is_gil_enabled` function can be used to check whether +the GIL is actually disabled in the running process. -* The cycle garbage collector is now incremental. - This means that maximum pause times are reduced - by an order of magnitude or more for larger heaps. +C-API extension modules need to be built specifically for the free-threaded +build. Extensions that support running with the :term:`GIL` disabled should +use the :c:data:`Py_mod_gil` slot. Extensions using single-phase init should +use :c:func:`PyUnstable_Module_SetGIL` to indicate whether they support +running with the GIL disabled. Importing C extensions that don't use these +mechanisms will cause the GIL to be enabled, unless the GIL was explicitly +disabled with the :envvar:`PYTHON_GIL` environment variable or the +:option:`-X gil=0` option. +pip 24.1 or newer is required to install packages with C extensions in the +free-threaded build. -Support For Mobile Platforms ----------------------------- +This work was made possible thanks to many individuals and +organizations, including the large community of contributors to Python +and third-party projects to test and enable free-threading support. +Notable contributors include: +Sam Gross, Ken Jin, Donghee Na, Itamar Oren, Matt Page, Brett Simmers, +Dino Viehland, Carl Meyer, Nathan Goldbaum, Ralf Gommers, +Lysandros Nikolaou, and many others. +Many of these contributors are employed by Meta, which has +provided significant engineering resources to support this project. -* iOS is now a :pep:`11` supported platform. ``arm64-apple-ios`` - (iPhone and iPad devices released after 2013) and - ``arm64-apple-ios-simulator`` (Xcode iOS simulator running on Apple Silicon - hardware) are now tier 3 platforms. +.. seealso:: - ``x86_64-apple-ios-simulator`` (Xcode iOS simulator running on older x86_64 - hardware) is not a tier 3 supported platform, but will be supported on a - best-effort basis. + :pep:`703` "Making the Global Interpreter Lock Optional in CPython" + contains rationale and information surrounding this work. - See :pep:`730`: for more details. + `Porting Extension Modules to Support Free-Threading + `_: A community-maintained + porting guide for extension authors. - (PEP written and implementation contributed by Russell Keith-Magee in - :gh:`114099`.) .. _whatsnew313-jit-compiler: -Experimental JIT Compiler -========================= +An experimental just-in-time (JIT) compiler +------------------------------------------- -When CPython is configured using the ``--enable-experimental-jit`` option, -a just-in-time compiler is added which may speed up some Python programs. +When CPython is configured and built using +the :option:`!--enable-experimental-jit` option, +a just-in-time (JIT) compiler is added which may speed up some Python programs. +On Windows, use ``PCbuild/build.bat --experimental-jit`` to enable the JIT +or ``--experimental-jit-interpreter`` to enable the Tier 2 interpreter. +Build requirements and further supporting information `are contained at`__ +:file:`Tools/jit/README.md`. -The internal architecture is roughly as follows. +__ https://github.com/python/cpython/blob/main/Tools/jit/README.md -* We start with specialized *Tier 1 bytecode*. - See :ref:`What's new in 3.11 ` for details. +The :option:`!--enable-experimental-jit` option takes these (optional) values, +defaulting to ``yes`` if :option:`!--enable-experimental-jit` is present +without the optional value. -* When the Tier 1 bytecode gets hot enough, it gets translated - to a new, purely internal *Tier 2 IR*, a.k.a. micro-ops ("uops"). +* ``no``: Disable the entire Tier 2 and JIT pipeline. +* ``yes``: Enable the JIT. + To disable the JIT at runtime, pass the environment variable ``PYTHON_JIT=0``. +* ``yes-off``: Build the JIT but disable it by default. + To enable the JIT at runtime, pass the environment variable ``PYTHON_JIT=1``. +* ``interpreter``: Enable the Tier 2 interpreter but disable the JIT. + The interpreter can be disabled by running with ``PYTHON_JIT=0``. -* The Tier 2 IR uses the same stack-based VM as Tier 1, but the - instruction format is better suited to translation to machine code. +The internal architecture is roughly as follows: +* We start with specialized *Tier 1 bytecode*. + See :ref:`What's new in 3.11 ` for details. +* When the Tier 1 bytecode gets hot enough, it gets translated + to a new purely internal intermediate representation (IR), + called the *Tier 2 IR*, and sometimes referred to as micro-ops ("uops"). +* The Tier 2 IR uses the same stack-based virtual machine as Tier 1, + but the instruction format is better suited to translation to machine code. * We have several optimization passes for Tier 2 IR, which are applied before it is interpreted or translated to machine code. - * There is a Tier 2 interpreter, but it is mostly intended for debugging the earlier stages of the optimization pipeline. The Tier 2 interpreter can be enabled by configuring Python with ``--enable-experimental-jit=interpreter``. - * When the JIT is enabled, the optimized Tier 2 IR is translated to machine code, which is then executed. - * The machine code translation process uses a technique called *copy-and-patch*. It has no runtime dependencies, but there is a new build-time dependency on LLVM. -The ``--enable-experimental-jit`` flag has the following optional values: - -* ``no`` (default) -- Disable the entire Tier 2 and JIT pipeline. - -* ``yes`` (default if the flag is present without optional value) - -- Enable the JIT. To disable the JIT at runtime, - pass the environment variable ``PYTHON_JIT=0``. - -* ``yes-off`` -- Build the JIT but disable it by default. - To enable the JIT at runtime, pass the environment variable - ``PYTHON_JIT=1``. - -* ``interpreter`` -- Enable the Tier 2 interpreter but disable the JIT. - The interpreter can be disabled by running with - ``PYTHON_JIT=0``. - -(On Windows, use ``PCbuild/build.bat --experimental-jit`` to enable the JIT -or ``--experimental-jit-interpreter`` to enable the Tier 2 interpreter.) - -See :pep:`744` for more details. +.. seealso:: :pep:`744` (JIT by Brandt Bucher, inspired by a paper by Haoran Xu and Fredrik Kjolstad. Tier 2 IR by Mark Shannon and Guido van Rossum. Tier 2 optimizer by Ken Jin.) -.. _free-threaded-cpython: - -Free-threaded CPython -===================== -CPython will run with the :term:`global interpreter lock` (GIL) disabled when -configured using the ``--disable-gil`` option at build time. This is an -experimental feature and therefore isn't used by default. Users need to -either compile their own interpreter, or install one of the experimental -builds that are marked as *free-threaded*. See :pep:`703` "Making the Global -Interpreter Lock Optional in CPython" for more detail. +.. _whatsnew313-locals-semantics: -Free-threaded execution allows for full utilization of the available -processing power by running threads in parallel on available CPU cores. -While not all software will benefit from this automatically, programs -designed with threading in mind will run faster on multicore hardware. +Defined mutation semantics for :py:func:`locals` +------------------------------------------------ + +Historically, the expected result of mutating the return value of +:func:`locals` has been left to individual Python implementations to define. +Starting from Python 3.13, :pep:`667` standardises +the historical behavior of CPython for most code execution scopes, +but changes :term:`optimized scopes ` +(functions, generators, coroutines, comprehensions, and generator expressions) +to explicitly return independent snapshots of the currently assigned local +variables, including locally referenced nonlocal variables captured in closures. + +This change to the semantics of :func:`locals` in optimized scopes also +affects the default behavior of code execution functions that implicitly +target :func:`!locals` if no explicit namespace is provided +(such as :func:`exec` and :func:`eval`). +In previous versions, whether or not changes could be accessed by calling +:func:`!locals` after calling the code execution function was +implementation-dependent. In CPython specifically, such code would typically +appear to work as desired, but could sometimes fail in optimized scopes based +on other code (including debuggers and code execution tracing tools) +potentially resetting the shared snapshot in that scope. +Now, the code will always run against an independent snapshot of +the local variables in optimized scopes, and hence the changes will never +be visible in subsequent calls to :func:`!locals`. +To access the changes made in these cases, an explicit namespace reference +must now be passed to the relevant function. +Alternatively, it may make sense to update affected code to use a higher level +code execution API that returns the resulting code execution namespace +(e.g. :func:`runpy.run_path` when executing Python files from disk). -Work is still ongoing: expect some bugs and a substantial single-threaded -performance hit. +To ensure debuggers and similar tools can reliably update local variables in +scopes affected by this change, :attr:`FrameType.f_locals ` now +returns a write-through proxy to the frame's local and locally referenced +nonlocal variables in these scopes, rather than returning an inconsistently +updated shared ``dict`` instance with undefined runtime semantics. -The free-threaded build still supports optionally running with the GIL -enabled at runtime using the environment variable :envvar:`PYTHON_GIL` or -the command line option :option:`-X gil`. +See :pep:`667` for more details, including related C API changes +and deprecations. Porting notes are also provided below for the affected +:ref:`Python APIs ` and :ref:`C APIs +`. -To check if the current interpreter is configured with ``--disable-gil``, -use ``sysconfig.get_config_var("Py_GIL_DISABLED")``. To check if the :term:`GIL` -is actually disabled in the running process, the :func:`!sys._is_gil_enabled` -function can be used. +(PEP and implementation contributed by Mark Shannon and Tian Gao in +:gh:`74929`. Documentation updates provided by Guido van Rossum and +Alyssa Coghlan.) -C-API extension modules need to be built specifically for the free-threaded -build. Extensions that support running with the :term:`GIL` disabled should -use the :c:data:`Py_mod_gil` slot. Extensions using single-phase init should -use :c:func:`PyUnstable_Module_SetGIL` to indicate whether they support -running with the GIL disabled. Importing C extensions that don't use these -mechanisms will cause the GIL to be enabled, unless the GIL was explicitly -disabled with the :envvar:`PYTHON_GIL` environment variable or the -:option:`-X gil=0` option. -pip 24.1b1 or newer is required to install packages with C extensions in the -free-threaded build. +.. _whatsnew313-platform-support: -Other Language Changes -====================== +Support for mobile platforms +---------------------------- -* Allow the *count* argument of :meth:`str.replace` to be a keyword. - (Contributed by Hugo van Kemenade in :gh:`106487`.) +:pep:`730`: iOS is now a :pep:`11` supported platform, with the +``arm64-apple-ios`` and ``arm64-apple-ios-simulator`` targets at tier 3 +(iPhone and iPad devices released after 2013 and the Xcode iOS simulator +running on Apple silicon hardware, respectively). +``x86_64-apple-ios-simulator`` +(the Xcode iOS simulator running on older ``x86_64`` hardware) +is not a tier 3 supported platform, but will have best-effort support. +(PEP written and implementation contributed by Russell Keith-Magee in +:gh:`114099`.) -* Compiler now strip indents from docstrings. - This will reduce the size of :term:`bytecode cache ` (e.g. ``.pyc`` file). - For example, cache file size for ``sqlalchemy.orm.session`` in SQLAlchemy 2.0 - is reduced by about 5%. - This change will affect tools using docstrings, like :mod:`doctest`. - (Contributed by Inada Naoki in :gh:`81283`.) +:pep:`738`: Android is now a :pep:`11` supported platform, with the +``aarch64-linux-android`` and ``x86_64-linux-android`` targets at tier 3. +The 32-bit targets ``arm-linux-androideabi`` and ``i686-linux-android`` +are not tier 3 supported platforms, but will have best-effort support. +(PEP written and implementation contributed by Malcolm Smith in +:gh:`116622`.) -* The :func:`compile` built-in can now accept a new flag, - ``ast.PyCF_OPTIMIZED_AST``, which is similar to ``ast.PyCF_ONLY_AST`` - except that the returned ``AST`` is optimized according to the value - of the ``optimize`` argument. - (Contributed by Irit Katriel in :gh:`108113`). +.. seealso:: :pep:`730`, :pep:`738` -* :mod:`multiprocessing`, :mod:`concurrent.futures`, :mod:`compileall`: - Replace :func:`os.cpu_count` with :func:`os.process_cpu_count` to select the - default number of worker threads and processes. Get the CPU affinity - if supported. - (Contributed by Victor Stinner in :gh:`109649`.) -* :func:`os.path.realpath` now resolves MS-DOS style file names even if - the file is not accessible. - (Contributed by Moonsik Park in :gh:`82367`.) +Other Language Changes +====================== -* Fixed a bug where a :keyword:`global` declaration in an :keyword:`except` block - is rejected when the global is used in the :keyword:`else` block. - (Contributed by Irit Katriel in :gh:`111123`.) +* The compiler now strips common leading whitespace + from every line in a docstring. + This reduces the size of the :term:`bytecode cache ` + (such as ``.pyc`` files), with reductions in file size of around 5%, + for example in :mod:`!sqlalchemy.orm.session` from SQLAlchemy 2.0. + This change affects tools that use docstrings, such as :mod:`doctest`. + + .. doctest:: + + >>> def spam(): + ... """ + ... This is a docstring with + ... leading whitespace. + ... + ... It even has multiple paragraphs! + ... """ + ... + >>> spam.__doc__ + '\nThis is a docstring with\n leading whitespace.\n\nIt even has multiple paragraphs!\n' -* Many functions now emit a warning if a boolean value is passed as - a file descriptor argument. - This can help catch some errors earlier. - (Contributed by Serhiy Storchaka in :gh:`82626`.) + (Contributed by Inada Naoki in :gh:`81283`.) -* Added a new environment variable :envvar:`PYTHON_FROZEN_MODULES`. It - determines whether or not frozen modules are ignored by the import machinery, - equivalent of the :option:`-X frozen_modules <-X>` command-line option. - (Contributed by Yilei Yang in :gh:`111374`.) +* :ref:`Annotation scopes ` within class scopes + can now contain lambdas and comprehensions. + Comprehensions that are located within class scopes + are not inlined into their parent scope. -* Add :ref:`support for the perf profiler ` working without - frame pointers through the new environment variable - :envvar:`PYTHON_PERF_JIT_SUPPORT` and command-line option :option:`-X perf_jit - <-X>` (Contributed by Pablo Galindo in :gh:`118518`.) + .. code-block:: python -* The new :envvar:`PYTHON_HISTORY` environment variable can be used to change - the location of a ``.python_history`` file. - (Contributed by Levi Sabah, Zackery Spytz and Hugo van Kemenade in - :gh:`73965`.) + class C[T]: + type Alias = lambda: T -* Add :exc:`PythonFinalizationError` exception. This exception derived from - :exc:`RuntimeError` is raised when an operation is blocked during - the :term:`Python finalization `. + (Contributed by Jelle Zijlstra in :gh:`109118` and :gh:`118160`.) - The following functions now raise PythonFinalizationError, instead of - :exc:`RuntimeError`: +* :ref:`Future statements ` are no longer triggered by + relative imports of the :mod:`__future__` module, + meaning that statements of the form ``from .__future__ import ...`` + are now simply standard relative imports, with no special features activated. + (Contributed by Jeremiah Gabriel Pascual in :gh:`118216`.) - * :func:`_thread.start_new_thread`. - * :class:`subprocess.Popen`. - * :func:`os.fork`. - * :func:`os.forkpty`. +* :keyword:`global` declarations are now permitted in :keyword:`except` blocks + when that global is used in the :keyword:`else` block. + Previously this raised an erroneous :exc:`SyntaxError`. + (Contributed by Irit Katriel in :gh:`111123`.) - (Contributed by Victor Stinner in :gh:`114570`.) +* Add :envvar:`PYTHON_FROZEN_MODULES`, a new environment variable that + determines whether frozen modules are ignored by the import machinery, + equivalent to the :option:`-X frozen_modules <-X>` command-line option. + (Contributed by Yilei Yang in :gh:`111374`.) -* Added :attr:`!name` and :attr:`!mode` attributes for compressed - and archived file-like objects in modules :mod:`bz2`, :mod:`lzma`, - :mod:`tarfile` and :mod:`zipfile`. - (Contributed by Serhiy Storchaka in :gh:`115961`.) +* Add :ref:`support for the perf profiler ` working + without `frame pointers `_ through + the new environment variable :envvar:`PYTHON_PERF_JIT_SUPPORT` + and command-line option :option:`-X perf_jit <-X>`. + (Contributed by Pablo Galindo in :gh:`118518`.) -* Allow controlling Expat >=2.6.0 reparse deferral (:cve:`2023-52425`) - by adding five new methods: +* The location of a :file:`.python_history` file can be changed via the + new :envvar:`PYTHON_HISTORY` environment variable. + (Contributed by Levi Sabah, Zackery Spytz and Hugo van Kemenade + in :gh:`73965`.) - * :meth:`xml.etree.ElementTree.XMLParser.flush` - * :meth:`xml.etree.ElementTree.XMLPullParser.flush` - * :meth:`xml.parsers.expat.xmlparser.GetReparseDeferralEnabled` - * :meth:`xml.parsers.expat.xmlparser.SetReparseDeferralEnabled` - * :meth:`!xml.sax.expatreader.ExpatParser.flush` +* Classes have a new :attr:`~type.__static_attributes__` attribute. + This is populated by the compiler with a tuple of the class's attribute names + which are assigned through ``self.`` from any function in its body. + (Contributed by Irit Katriel in :gh:`115775`.) - (Contributed by Sebastian Pipping in :gh:`115623`.) +* The compiler now creates a :attr:`!__firstlineno__` attribute on classes + with the line number of the first line of the class definition. + (Contributed by Serhiy Storchaka in :gh:`118465`.) -* The :func:`ssl.create_default_context` API now includes - :data:`ssl.VERIFY_X509_PARTIAL_CHAIN` and :data:`ssl.VERIFY_X509_STRICT` - in its default flags. +* The :func:`exec` and :func:`eval` builtins now accept + the *globals* and *locals* arguments as keywords. + (Contributed by Raphael Gaschignard in :gh:`105879`) - .. note:: +* The :func:`compile` builtin now accepts a new flag, + ``ast.PyCF_OPTIMIZED_AST``, which is similar to ``ast.PyCF_ONLY_AST`` + except that the returned AST is optimized according to + the value of the *optimize* argument. + (Contributed by Irit Katriel in :gh:`108113`). - :data:`ssl.VERIFY_X509_STRICT` may reject pre-:rfc:`5280` or malformed - certificates that the underlying OpenSSL implementation otherwise would - accept. While disabling this is not recommended, you can do so using:: +* Add a :attr:`~property.__name__` attribute on :class:`property` objects. + (Contributed by Eugene Toder in :gh:`101860`.) - ctx = ssl.create_default_context() - ctx.verify_flags &= ~ssl.VERIFY_X509_STRICT +* Add :exc:`PythonFinalizationError`, a new exception derived from + :exc:`RuntimeError` and used to signal when operations are blocked + during :term:`finalization `. + The following callables now raise :exc:`!PythonFinalizationError`, + instead of :exc:`RuntimeError`: - (Contributed by William Woodruff in :gh:`112389`.) + * :func:`_thread.start_new_thread` + * :func:`os.fork` + * :func:`os.forkpty` + * :class:`subprocess.Popen` -* The :class:`configparser.ConfigParser` now accepts unnamed sections before named - ones if configured to do so. - (Contributed by Pedro Sousa Lacerda in :gh:`66449`.) + (Contributed by Victor Stinner in :gh:`114570`.) -* :ref:`annotation scope ` within class scopes can now - contain lambdas and comprehensions. Comprehensions that are located within - class scopes are not inlined into their parent scope. (Contributed by - Jelle Zijlstra in :gh:`109118` and :gh:`118160`.) +* Allow the *count* argument of :meth:`str.replace` to be a keyword. + (Contributed by Hugo van Kemenade in :gh:`106487`.) -* Classes have a new :attr:`!__firstlineno__` attribute, - populated by the compiler, with the line number of the first line - of the class definition. - (Contributed by Serhiy Storchaka in :gh:`118465`.) +* Many functions now emit a warning if a boolean value is passed as + a file descriptor argument. + This can help catch some errors earlier. + (Contributed by Serhiy Storchaka in :gh:`82626`.) -* ``from __future__ import ...`` statements are now just normal - relative imports if dots are present before the module name. - (Contributed by Jeremiah Gabriel Pascual in :gh:`118216`.) +* Added :attr:`!name` and :attr:`!mode` attributes + for compressed and archived file-like objects in + the :mod:`bz2`, :mod:`lzma`, :mod:`tarfile`, and :mod:`zipfile` modules. + (Contributed by Serhiy Storchaka in :gh:`115961`.) New Modules =========== -* None. +* :mod:`dbm.sqlite3`: An SQLite backend for :mod:`dbm`. + (Contributed by Raymond Hettinger and Erlend E. Aasland in :gh:`100414`.) Improved Modules ================ + argparse -------- -* Add parameter *deprecated* in methods - :meth:`~argparse.ArgumentParser.add_argument` and :meth:`!add_parser` - which allows to deprecate command-line options, positional arguments and - subcommands. +* Add the *deprecated* parameter to the + :meth:`~argparse.ArgumentParser.add_argument` + and :meth:`!add_parser` methods, to enable deprecating + command-line options, positional arguments, and subcommands. (Contributed by Serhiy Storchaka in :gh:`83648`.) + array ----- -* Add ``'w'`` type code (``Py_UCS4``) that can be used for Unicode strings. - It can be used instead of ``'u'`` type code, which is deprecated. +* Add the ``'w'`` type code (``Py_UCS4``) for Unicode characters. + It should be used instead of the deprecated ``'u'`` type code. (Contributed by Inada Naoki in :gh:`80480`.) -* Add ``clear()`` method in order to implement ``MutableSequence``. +* Register :class:`array.array` as a :class:`~collections.abc.MutableSequence` + by implementing the :meth:`~array.array.clear` method. (Contributed by Mike Zimin in :gh:`114894`.) + ast --- -* The constructors of node types in the :mod:`ast` module are now stricter - in the arguments they accept, and have more intuitive behaviour when - arguments are omitted. +* The constructors of node types in the :mod:`ast` module are now + stricter in the arguments they accept, + with more intuitive behavior when arguments are omitted. If an optional field on an AST node is not included as an argument when constructing an instance, the field will now be set to ``None``. Similarly, if a list field is omitted, that field will now be set to an empty list, - and if a :class:`!ast.expr_context` field is omitted, it defaults to + and if an :class:`!expr_context` field is omitted, it defaults to :class:`Load() `. (Previously, in all cases, the attribute would be missing on the newly constructed AST node instance.) - If other arguments are omitted, a :exc:`DeprecationWarning` is emitted. - This will cause an exception in Python 3.15. Similarly, passing a keyword - argument that does not map to a field on the AST node is now deprecated, + In all other cases, where a required argument is omitted, + the node constructor will emit a :exc:`DeprecationWarning`. + This will raise an exception in Python 3.15. + Similarly, passing a keyword argument to the constructor + that does not map to a field on the AST node is now deprecated, and will raise an exception in Python 3.15. - These changes do not apply to user-defined subclasses of :class:`ast.AST`, - unless the class opts in to the new behavior by setting the attribute - :attr:`ast.AST._field_types`. + These changes do not apply to user-defined subclasses of :class:`ast.AST` + unless the class opts in to the new behavior + by defining the :attr:`.AST._field_types` mapping. (Contributed by Jelle Zijlstra in :gh:`105858`, :gh:`117486`, and :gh:`118851`.) * :func:`ast.parse` now accepts an optional argument *optimize* - which is passed on to the :func:`compile` built-in. This makes it - possible to obtain an optimized AST. + which is passed on to :func:`compile`. + This makes it possible to obtain an optimized AST. (Contributed by Irit Katriel in :gh:`108113`.) + asyncio ------- +* :func:`asyncio.as_completed` now returns an object that is both an + :term:`asynchronous iterator` and a plain :term:`iterator` + of :term:`awaitables `. + The awaitables yielded by asynchronous iteration include original task + or future objects that were passed in, + making it easier to associate results with the tasks being completed. + (Contributed by Justin Arthur in :gh:`77714`.) + * :meth:`asyncio.loop.create_unix_server` will now automatically remove the Unix socket when the server is closed. (Contributed by Pierre Ossman in :gh:`111246`.) -* :meth:`asyncio.DatagramTransport.sendto` will now send zero-length - datagrams if called with an empty bytes object. The transport flow - control also now accounts for the datagram header when calculating - the buffer size. +* :meth:`.DatagramTransport.sendto` will now send zero-length + datagrams if called with an empty bytes object. + The transport flow control also now accounts for the datagram header + when calculating the buffer size. (Contributed by Jamie Phan in :gh:`115199`.) -* Add :meth:`asyncio.Server.close_clients` and - :meth:`asyncio.Server.abort_clients` methods which allow to more - forcefully close an asyncio server. - (Contributed by Pierre Ossman in :gh:`113538`.) +* Add :meth:`Queue.shutdown ` + and :exc:`~asyncio.QueueShutDown` to manage queue termination. + (Contributed by Laurie Opperman and Yves Duprat in :gh:`104228`.) -* :func:`asyncio.as_completed` now returns an object that is both an - :term:`asynchronous iterator` and a plain :term:`iterator` of awaitables. - The awaitables yielded by asynchronous iteration include original task or - future objects that were passed in, making it easier to associate results - with the tasks being completed. - (Contributed by Justin Arthur in :gh:`77714`.) +* Add the :meth:`.Server.close_clients` and :meth:`.Server.abort_clients` + methods, which more forcefully close an asyncio server. + (Contributed by Pierre Ossman in :gh:`113538`.) -* When :func:`asyncio.TaskGroup.create_task` is called on an inactive - :class:`asyncio.TaskGroup`, the given coroutine will be closed (which - prevents a :exc:`RuntimeWarning` about the given coroutine being - never awaited). - (Contributed by Arthur Tacca and Jason Zhang in :gh:`115957`.) +* Accept a tuple of separators in :meth:`.StreamReader.readuntil`, + stopping when any one of them is encountered. + (Contributed by Bruce Merry in :gh:`81322`.) -* Improved behavior of :class:`asyncio.TaskGroup` when an external cancellation - collides with an internal cancellation. For example, when two task groups - are nested and both experience an exception in a child task simultaneously, - it was possible that the outer task group would hang, because its internal - cancellation was swallowed by the inner task group. +* Improve the behavior of :class:`~asyncio.TaskGroup` when + an external cancellation collides with an internal cancellation. + For example, when two task groups are nested + and both experience an exception in a child task simultaneously, + it was possible that the outer task group would hang, + because its internal cancellation was swallowed by the inner task group. - In the case where a task group is cancelled externally and also must - raise an :exc:`ExceptionGroup`, it will now call the parent task's - :meth:`~asyncio.Task.cancel` method. This ensures that a - :exc:`asyncio.CancelledError` will be raised at the next - :keyword:`await`, so the cancellation is not lost. + In the case where a task group is cancelled externally + and also must raise an :exc:`ExceptionGroup`, + it will now call the parent task's :meth:`~asyncio.Task.cancel` method. + This ensures that a :exc:`~asyncio.CancelledError` will be raised + at the next :keyword:`await`, so the cancellation is not lost. - An added benefit of these changes is that task groups now preserve the - cancellation count (:meth:`asyncio.Task.cancelling`). + An added benefit of these changes is that task groups now preserve + the cancellation count (:meth:`~asyncio.Task.cancelling`). - In order to handle some corner cases, :meth:`asyncio.Task.uncancel` may now - reset the undocumented ``_must_cancel`` flag when the cancellation count - reaches zero. + In order to handle some corner cases, :meth:`~asyncio.Task.uncancel` may now + reset the undocumented ``_must_cancel`` flag + when the cancellation count reaches zero. (Inspired by an issue reported by Arthur Tacca in :gh:`116720`.) -* Add :meth:`asyncio.Queue.shutdown` (along with - :exc:`asyncio.QueueShutDown`) for queue termination. - (Contributed by Laurie Opperman and Yves Duprat in :gh:`104228`.) +* When :meth:`.TaskGroup.create_task` is called on an inactive + :class:`~asyncio.TaskGroup`, the given coroutine will be closed (which + prevents a :exc:`RuntimeWarning` about the given coroutine being + never awaited). + (Contributed by Arthur Tacca and Jason Zhang in :gh:`115957`.) -* Accept a tuple of separators in :meth:`asyncio.StreamReader.readuntil`, - stopping when one of them is encountered. - (Contributed by Bruce Merry in :gh:`81322`.) base64 ------ -* Add :func:`base64.z85encode` and :func:`base64.z85decode` functions which allow encoding - and decoding Z85 data. - See `Z85 specification `_ for more information. +* Add :func:`~base64.z85encode` and :func:`~base64.z85decode` functions + for encoding :class:`bytes` as `Z85 data`_ + and decoding Z85-encoded data to :class:`!bytes`. (Contributed by Matan Perelman in :gh:`75299`.) + .. _Z85 data: https://rfc.zeromq.org/spec/32/ + + +compileall +---------- + +* The default number of worker threads and processes is now selected using + :func:`os.process_cpu_count` instead of :func:`os.cpu_count`. + (Contributed by Victor Stinner in :gh:`109649`.) + + +concurrent.futures +------------------ + +* The default number of worker threads and processes is now selected using + :func:`os.process_cpu_count` instead of :func:`os.cpu_count`. + (Contributed by Victor Stinner in :gh:`109649`.) + + +configparser +------------ + +* :class:`~configparser.ConfigParser` now has support for unnamed sections, + which allows for top-level key-value pairs. + This can be enabled with the new *allow_unnamed_section* parameter. + (Contributed by Pedro Sousa Lacerda in :gh:`66449`.) + + copy ---- -* Add :func:`copy.replace` function which allows to create a modified copy of - an object, which is especially useful for immutable objects. - It supports named tuples created with the factory function - :func:`collections.namedtuple`, :class:`~dataclasses.dataclass` instances, - various :mod:`datetime` objects, :class:`~inspect.Signature` objects, - :class:`~inspect.Parameter` objects, :ref:`code object `, and - any user classes which define the :meth:`!__replace__` method. +* The new :func:`~copy.replace` function and the :meth:`replace protocol + ` make creating modified copies of objects much simpler. + This is especially useful when working with immutable objects. + The following types support the :func:`~copy.replace` function + and implement the replace protocol: + + * :func:`collections.namedtuple` + * :class:`dataclasses.dataclass` + * :class:`datetime.datetime`, :class:`datetime.date`, :class:`datetime.time` + * :class:`inspect.Signature`, :class:`inspect.Parameter` + * :class:`types.SimpleNamespace` + * :ref:`code objects ` + + Any user-defined class can also support :func:`copy.replace` by defining + the :meth:`~object.__replace__` method. (Contributed by Serhiy Storchaka in :gh:`108751`.) + +ctypes +------ + +* As a consequence of necessary internal refactoring, initialization of + internal metaclasses now happens in ``__init__`` rather + than in ``__new__``. This affects projects that subclass these internal + metaclasses to provide custom initialization. + Generally: + + - Custom logic that was done in ``__new__`` after calling ``super().__new__`` + should be moved to ``__init__``. + - To create a class, call the metaclass, not only the metaclass's + ``__new__`` method. + + See :gh:`124520` for discussion and links to changes in some affected + projects. + + dbm --- -* Add :meth:`dbm.gnu.gdbm.clear` and :meth:`dbm.ndbm.ndbm.clear` methods that remove all items - from the database. +* Add :mod:`dbm.sqlite3`, a new module which implements an SQLite backend, + and make it the default :mod:`!dbm` backend. + (Contributed by Raymond Hettinger and Erlend E. Aasland in :gh:`100414`.) + +* Allow removing all items from the database through + the new :meth:`.gdbm.clear` and :meth:`.ndbm.clear` methods. (Contributed by Donghee Na in :gh:`107122`.) -* Add new :mod:`dbm.sqlite3` backend, and make it the default :mod:`!dbm` backend. - (Contributed by Raymond Hettinger and Erlend E. Aasland in :gh:`100414`.) dis --- * Change the output of :mod:`dis` module functions to show logical labels for jump targets and exception handlers, rather than offsets. - The offsets can be added with the new ``-O`` command line option or - the ``show_offsets`` parameter. + The offsets can be added with the new + :option:`-O ` command-line option + or the *show_offsets* argument. (Contributed by Irit Katriel in :gh:`112137`.) -* :meth:`~dis.get_instructions` no longer represents cache entries as - separate instructions. Instead, it returns them as part of the - :class:`~dis.Instruction`, in the new *cache_info* field. The - *show_caches* argument to :meth:`~dis.get_instructions` is - deprecated and no longer has any effect. +* :meth:`~dis.get_instructions` no longer represents cache entries + as separate instructions. + Instead, it returns them as part of the :class:`~dis.Instruction`, + in the new *cache_info* field. + The *show_caches* argument to :meth:`~dis.get_instructions` is deprecated + and no longer has any effect. (Contributed by Irit Katriel in :gh:`112962`.) + .. _whatsnew313-doctest: doctest ------- -* Color is added to the output by default. +* :mod:`doctest` output is now colored by default. This can be controlled via the new :envvar:`PYTHON_COLORS` environment - variable as well as the canonical |NO_COLOR|_ and |FORCE_COLOR|_ environment - variables. See also :ref:`using-on-controlling-color`. + variable as well as the canonical |NO_COLOR|_ + and |FORCE_COLOR|_ environment variables. + See also :ref:`using-on-controlling-color`. (Contributed by Hugo van Kemenade in :gh:`117225`.) -* The :meth:`doctest.DocTestRunner.run` method now counts the number of skipped - tests. Add :attr:`doctest.DocTestRunner.skips` and - :attr:`doctest.TestResults.skipped` attributes. +* The :meth:`.DocTestRunner.run` method now counts the number of skipped tests. + Add the :attr:`.DocTestRunner.skips` and :attr:`.TestResults.skipped` attributes. (Contributed by Victor Stinner in :gh:`108794`.) + email ----- -* :func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now return - ``('', '')`` 2-tuples in more situations where invalid email addresses are - encountered instead of potentially inaccurate values. Add optional *strict* - parameter to these two functions: use ``strict=False`` to get the old - behavior, accept malformed inputs. - ``getattr(email.utils, 'supports_strict_parsing', False)`` can be used to - check if the *strict* parameter is available. +* Headers with embedded newlines are now quoted on output. + The :mod:`~email.generator` will now refuse to serialize (write) headers + that are improperly folded or delimited, such that they would be parsed as + multiple headers or joined with adjacent data. + If you need to turn this safety feature off, + set :attr:`~email.policy.Policy.verify_generated_headers`. + (Contributed by Bas Bloemsaat and Petr Viktorin in :gh:`121650`.) + +* :func:`~email.utils.getaddresses` and :func:`~email.utils.parseaddr` now + return ``('', '')`` pairs in more situations where invalid email addresses + are encountered instead of potentially inaccurate values. + The two functions have a new optional *strict* parameter (default ``True``). + To get the old behavior (accepting malformed input), use ``strict=False``. + ``getattr(email.utils, 'supports_strict_parsing', False)`` can be used + to check if the *strict* parameter is available. (Contributed by Thomas Dwyer and Victor Stinner for :gh:`102988` to improve the :cve:`2023-27043` fix.) + fractions --------- -* Formatting for objects of type :class:`fractions.Fraction` now supports - the standard format specification mini-language rules for fill, alignment, - sign handling, minimum width and grouping. (Contributed by Mark Dickinson - in :gh:`111320`.) - -gc --- +* :class:`~fractions.Fraction` objects now support the standard + :ref:`format specification mini-language ` rules + for fill, alignment, sign handling, minimum width, and grouping. + (Contributed by Mark Dickinson in :gh:`111320`.) -* The cyclic garbage collector is now incremental, which changes the meanings - of the results of :meth:`gc.get_threshold` and :meth:`gc.set_threshold` as - well as :meth:`gc.get_count` and :meth:`gc.get_stats`. - - * :meth:`gc.get_threshold` returns a three-item tuple for backwards compatibility. - The first value is the threshold for young collections, as before; the second - value determines the rate at which the old collection is scanned (the - default is 10, and higher values mean that the old collection is scanned more slowly). - The third value is meaningless and is always zero. - * :meth:`gc.set_threshold` ignores any items after the second. - * :meth:`gc.get_count` and :meth:`gc.get_stats` - return the same format of results as before. - The only difference is that instead of the results referring to - the young, aging and old generations, the results refer to the - young generation and the aging and collecting spaces of the old generation. - - In summary, code that attempted to manipulate the behavior of the cycle GC may - not work exactly as intended, but it is very unlikely to be harmful. - All other code will work just fine. glob ---- -* Add :func:`glob.translate` function that converts a path specification with - shell-style wildcards to a regular expression. +* Add :func:`~glob.translate`, a function to convert a path specification + with shell-style wildcards to a regular expression. (Contributed by Barney Gale in :gh:`72904`.) + importlib --------- -* Previously deprecated :mod:`importlib.resources` functions are un-deprecated: - - * :func:`~importlib.resources.is_resource()` - * :func:`~importlib.resources.open_binary()` - * :func:`~importlib.resources.open_text()` - * :func:`~importlib.resources.path()` - * :func:`~importlib.resources.read_binary()` - * :func:`~importlib.resources.read_text()` +* The following functions in :mod:`importlib.resources` now allow accessing + a directory (or tree) of resources, using multiple positional arguments + (the *encoding* and *errors* arguments in the text-reading functions + are now keyword-only): - All now allow for a directory (or tree) of resources, using multiple positional - arguments. + * :func:`~importlib.resources.is_resource` + * :func:`~importlib.resources.open_binary` + * :func:`~importlib.resources.open_text` + * :func:`~importlib.resources.path` + * :func:`~importlib.resources.read_binary` + * :func:`~importlib.resources.read_text` - For text-reading functions, the *encoding* and *errors* must now be given as - keyword arguments. + These functions are no longer deprecated and are not scheduled for removal. + (Contributed by Petr Viktorin in :gh:`106532`.) - The :func:`~importlib.resources.contents()` remains deprecated in favor of - the full-featured :class:`~importlib.resources.abc.Traversable` API. +* :func:`~importlib.resources.contents` remains deprecated in favor of + the fully-featured :class:`~importlib.resources.abc.Traversable` API. However, there is now no plan to remove it. - (Contributed by Petr Viktorin in :gh:`106532`.) + io -- -* The :class:`io.IOBase` finalizer now logs the ``close()`` method errors with - :data:`sys.unraisablehook`. Previously, errors were ignored silently by default, - and only logged in :ref:`Python Development Mode ` or on :ref:`Python - built on debug mode `. +* The :class:`~io.IOBase` finalizer now logs any errors raised by + the :meth:`~io.IOBase.close` method with :data:`sys.unraisablehook`. + Previously, errors were ignored silently by default, + and only logged in :ref:`Python Development Mode ` + or when using a :ref:`Python debug build `. (Contributed by Victor Stinner in :gh:`62948`.) + ipaddress --------- -* Add the :attr:`ipaddress.IPv4Address.ipv6_mapped` property, which returns the IPv4-mapped IPv6 address. +* Add the :attr:`.IPv4Address.ipv6_mapped` property, + which returns the IPv4-mapped IPv6 address. (Contributed by Charles Machalow in :gh:`109466`.) + * Fix ``is_global`` and ``is_private`` behavior in - :class:`~ipaddress.IPv4Address`, - :class:`~ipaddress.IPv6Address`, - :class:`~ipaddress.IPv4Network` and - :class:`~ipaddress.IPv6Network`. + :class:`~ipaddress.IPv4Address`, :class:`~ipaddress.IPv6Address`, + :class:`~ipaddress.IPv4Network`, and :class:`~ipaddress.IPv6Network`. + (Contributed by Jakub Stasiak in :gh:`113171`.) + itertools --------- -* Added a ``strict`` option to :func:`itertools.batched`. - This raises a :exc:`ValueError` if the final batch is shorter +* :func:`~itertools.batched` has a new *strict* parameter, + which raises a :exc:`ValueError` if the final batch is shorter than the specified batch size. (Contributed by Raymond Hettinger in :gh:`113202`.) + marshal ------- * Add the *allow_code* parameter in module functions. - Passing ``allow_code=False`` prevents serialization and de-serialization of - code objects which are incompatible between Python versions. + Passing ``allow_code=False`` prevents serialization and de-serialization + of code objects which are incompatible between Python versions. (Contributed by Serhiy Storchaka in :gh:`113626`.) + math ---- -* A new function :func:`~math.fma` for fused multiply-add operations has been - added. This function computes ``x * y + z`` with only a single round, and so - avoids any intermediate loss of precision. It wraps the ``fma()`` function - provided by C99, and follows the specification of the IEEE 754 - "fusedMultiplyAdd" operation for special cases. +* The new function :func:`~math.fma` performs fused multiply-add operations. + This computes ``x * y + z`` with only a single round, + and so avoids any intermediate loss of precision. + It wraps the ``fma()`` function provided by C99, + and follows the specification of the IEEE 754 "fusedMultiplyAdd" operation + for special cases. (Contributed by Mark Dickinson and Victor Stinner in :gh:`73468`.) + mimetypes --------- -* Add the :func:`~mimetypes.guess_file_type` function which works with file path. - Passing file path instead of URL in :func:`~mimetypes.guess_type` is :term:`soft deprecated`. +* Add the :func:`~mimetypes.guess_file_type` function to guess a MIME type + from a filesystem path. + Using paths with :func:`~mimetypes.guess_type` is now :term:`soft deprecated`. (Contributed by Serhiy Storchaka in :gh:`66543`.) + mmap ---- -* The :class:`mmap.mmap` class now has an :meth:`~mmap.mmap.seekable` method +* :class:`~mmap.mmap` is now protected from crashing on Windows when the + mapped memory is inaccessible due to file system errors or access violations. + (Contributed by Jannis Weigend in :gh:`118209`.) + +* :class:`~mmap.mmap` has a new :meth:`~mmap.mmap.seekable` method that can be used when a seekable file-like object is required. The :meth:`~mmap.mmap.seek` method now returns the new absolute position. (Contributed by Donghee Na and Sylvie Liberman in :gh:`111835`.) -* :class:`mmap.mmap` now has a *trackfd* parameter on Unix; if it is ``False``, - the file descriptor specified by *fileno* will not be duplicated. + +* The new UNIX-only *trackfd* parameter for :class:`~mmap.mmap` controls + file descriptor duplication; + if false, the file descriptor specified by *fileno* will not be duplicated. (Contributed by Zackery Spytz and Petr Viktorin in :gh:`78502`.) -* :class:`mmap.mmap` is now protected from crashing on Windows when the mapped memory - is inaccessible due to file system errors or access violations. - (Contributed by Jannis Weigend in :gh:`118209`.) -opcode ------- -* Move ``opcode.ENABLE_SPECIALIZATION`` to ``_opcode.ENABLE_SPECIALIZATION``. - This field was added in 3.12, it was never documented and is not intended for - external usage. (Contributed by Irit Katriel in :gh:`105481`.) +multiprocessing +--------------- + +* The default number of worker threads and processes is now selected using + :func:`os.process_cpu_count` instead of :func:`os.cpu_count`. + (Contributed by Victor Stinner in :gh:`109649`.) -* Removed ``opcode.is_pseudo``, ``opcode.MIN_PSEUDO_OPCODE`` and - ``opcode.MAX_PSEUDO_OPCODE``, which were added in 3.12, were never - documented or exposed through ``dis``, and were not intended to be - used externally. os -- -* Add :func:`os.process_cpu_count` function to get the number of logical CPUs - usable by the calling thread of the current process. +* Add :func:`~os.process_cpu_count` function to get the number + of logical CPU cores usable by the calling thread of the current process. (Contributed by Victor Stinner in :gh:`109649`.) -* Add a low level interface for Linux's timer notification file descriptors - via :func:`os.timerfd_create`, - :func:`os.timerfd_settime`, :func:`os.timerfd_settime_ns`, - :func:`os.timerfd_gettime`, and :func:`os.timerfd_gettime_ns`, - :const:`os.TFD_NONBLOCK`, :const:`os.TFD_CLOEXEC`, - :const:`os.TFD_TIMER_ABSTIME`, and :const:`os.TFD_TIMER_CANCEL_ON_SET` - (Contributed by Masaru Tsuchiyama in :gh:`108277`.) - -* :func:`os.cpu_count` and :func:`os.process_cpu_count` can be overridden through - the new environment variable :envvar:`PYTHON_CPU_COUNT` or the new command-line option - :option:`-X cpu_count <-X>`. This option is useful for users who need to limit - CPU resources of a container system without having to modify the container (application code). +* :func:`~os.cpu_count` and :func:`~os.process_cpu_count` can be overridden + through the new environment variable :envvar:`PYTHON_CPU_COUNT` + or the new command-line option :option:`-X cpu_count <-X>`. + This option is useful for users who need to limit CPU resources + of a container system without having to modify application code + or the container itself. (Contributed by Donghee Na in :gh:`109595`.) -* Add support of :func:`os.lchmod` and the *follow_symlinks* argument - in :func:`os.chmod` on Windows. - Note that the default value of *follow_symlinks* in :func:`!os.lchmod` is - ``False`` on Windows. +* Add a :ref:`low level interface ` to Linux's + :manpage:`timer file descriptors ` + via :func:`~os.timerfd_create`, + :func:`~os.timerfd_settime`, :func:`~os.timerfd_settime_ns`, + :func:`~os.timerfd_gettime`, :func:`~os.timerfd_gettime_ns`, + :const:`~os.TFD_NONBLOCK`, :const:`~os.TFD_CLOEXEC`, + :const:`~os.TFD_TIMER_ABSTIME`, and :const:`~os.TFD_TIMER_CANCEL_ON_SET` + (Contributed by Masaru Tsuchiyama in :gh:`108277`.) + +* :func:`~os.lchmod` and the *follow_symlinks* argument of :func:`~os.chmod` + are both now available on Windows. + Note that the default value of *follow_symlinks* + in :func:`!lchmod` is ``False`` on Windows. (Contributed by Serhiy Storchaka in :gh:`59616`.) -* Add support of :func:`os.fchmod` and a file descriptor - in :func:`os.chmod` on Windows. +* :func:`~os.fchmod` and support for file descriptors in :func:`~os.chmod` + are both now available on Windows. (Contributed by Serhiy Storchaka in :gh:`113191`.) -* :func:`os.posix_spawn` now accepts ``env=None``, which makes the newly spawned - process use the current process environment. +* On Windows, :func:`~os.mkdir` and :func:`~os.makedirs` now support passing + a *mode* value of ``0o700`` to apply access control to the new directory. + This implicitly affects :func:`tempfile.mkdtemp` + and is a mitigation for :cve:`2024-4030`. + Other values for *mode* continue to be ignored. + (Contributed by Steve Dower in :gh:`118486`.) + +* :func:`~os.posix_spawn` now accepts ``None`` for the *env* argument, + which makes the newly spawned process use the current process environment. (Contributed by Jakub Kulik in :gh:`113119`.) -* :func:`os.posix_spawn` gains an :attr:`os.POSIX_SPAWN_CLOSEFROM` attribute for - use in ``file_actions=`` on platforms that support +* :func:`~os.posix_spawn` can now use the :attr:`~os.POSIX_SPAWN_CLOSEFROM` + attribute in the *file_actions* parameter on platforms that support :c:func:`!posix_spawn_file_actions_addclosefrom_np`. (Contributed by Jakub Kulik in :gh:`113117`.) -* :func:`os.mkdir` and :func:`os.makedirs` on Windows now support passing a - *mode* value of ``0o700`` to apply access control to the new directory. This - implicitly affects :func:`tempfile.mkdtemp` and is a mitigation for - :cve:`2024-4030`. Other values for *mode* continue to be ignored. - (Contributed by Steve Dower in :gh:`118486`.) os.path ------- -* Add :func:`os.path.isreserved` to check if a path is reserved on the current - system. This function is only available on Windows. +* Add :func:`~os.path.isreserved` to check if a path is reserved + on the current system. + This function is only available on Windows. (Contributed by Barney Gale in :gh:`88569`.) -* On Windows, :func:`os.path.isabs` no longer considers paths starting with - exactly one (back)slash to be absolute. + +* On Windows, :func:`~os.path.isabs` no longer considers paths + starting with exactly one slash (``\`` or ``/``) to be absolute. (Contributed by Barney Gale and Jon Foster in :gh:`44626`.) -* Add support of *dir_fd* and *follow_symlinks* keyword arguments in - :func:`shutil.chown`. - (Contributed by Berker Peksag and Tahia K in :gh:`62308`) +* :func:`~os.path.realpath` now resolves MS-DOS style file names + even if the file is not accessible. + (Contributed by Moonsik Park in :gh:`82367`.) + pathlib ------- -* Add :exc:`pathlib.UnsupportedOperation`, which is raised instead of +* Add :exc:`~pathlib.UnsupportedOperation`, which is raised instead of :exc:`NotImplementedError` when a path operation isn't supported. (Contributed by Barney Gale in :gh:`89812`.) -* Add :meth:`pathlib.Path.from_uri`, a new constructor to create a :class:`pathlib.Path` - object from a 'file' URI (``file://``). +* Add a new constructor for creating :class:`~pathlib.Path` objects + from 'file' URIs (``file:///``), :meth:`.Path.from_uri`. (Contributed by Barney Gale in :gh:`107465`.) -* Add :meth:`pathlib.PurePath.full_match` for matching paths with +* Add :meth:`.PurePath.full_match` for matching paths with shell-style wildcards, including the recursive wildcard "``**``". (Contributed by Barney Gale in :gh:`73435`.) -* Add :attr:`pathlib.PurePath.parser` class attribute that stores the - implementation of :mod:`os.path` used for low-level path parsing and - joining: either ``posixpath`` or ``ntpath``. +* Add the :attr:`.PurePath.parser` class attribute to store the + implementation of :mod:`os.path` used + for low-level path parsing and joining. + This will be either :mod:`!posixpath` or :mod:`!ntpath`. -* Add *recurse_symlinks* keyword-only argument to :meth:`pathlib.Path.glob` - and :meth:`~pathlib.Path.rglob`. +* Add *recurse_symlinks* keyword-only argument to + :meth:`.Path.glob` and :meth:`~pathlib.Path.rglob`. (Contributed by Barney Gale in :gh:`77609`.) -* Add *follow_symlinks* keyword-only argument to :meth:`~pathlib.Path.is_file`, - :meth:`~pathlib.Path.is_dir`, :meth:`~pathlib.Path.owner`, - :meth:`~pathlib.Path.group`. - (Contributed by Barney Gale in :gh:`105793`, and Kamil Turek in - :gh:`107962`.) - -* Return files and directories from :meth:`pathlib.Path.glob` and - :meth:`~pathlib.Path.rglob` when given a pattern that ends with "``**``". In - earlier versions, only directories were returned. +* :meth:`.Path.glob` and :meth:`~pathlib.Path.rglob` + now return files and directories when given a pattern that ends with "``**``". + Previously, only directories were returned. (Contributed by Barney Gale in :gh:`70303`.) +* Add the *follow_symlinks* keyword-only argument to + :meth:`Path.is_file `, + :meth:`Path.is_dir `, + :meth:`.Path.owner`, and :meth:`.Path.group`. + (Contributed by Barney Gale in :gh:`105793` and Kamil Turek in :gh:`107962`.) + + pdb --- -* Add ability to move between chained exceptions during post mortem debugging in :func:`~pdb.pm` using - the new ``exceptions [exc_number]`` command for Pdb. (Contributed by Matthias - Bussonnier in :gh:`106676`.) - -* Expressions/statements whose prefix is a pdb command are now correctly - identified and executed. - (Contributed by Tian Gao in :gh:`108464`.) +* :func:`breakpoint` and :func:`~pdb.set_trace` now enter the debugger immediately + rather than on the next line of code to be executed. This change prevents the + debugger from breaking outside of the context when :func:`!breakpoint` is positioned + at the end of the context. + (Contributed by Tian Gao in :gh:`118579`.) -* ``sys.path[0]`` will no longer be replaced by the directory of the script - being debugged when ``sys.flags.safe_path`` is set (via the :option:`-P` - command line option or :envvar:`PYTHONSAFEPATH` environment variable). +* ``sys.path[0]`` is no longer replaced by the directory of the script + being debugged when :attr:`sys.flags.safe_path` is set. (Contributed by Tian Gao and Christian Walther in :gh:`111762`.) -* :mod:`zipapp` is supported as a debugging target. +* :mod:`zipapp` is now supported as a debugging target. (Contributed by Tian Gao in :gh:`118501`.) -* ``breakpoint()`` and ``pdb.set_trace()`` now enter the debugger immediately - rather than on the next line of code to be executed. This change prevents the - debugger from breaking outside of the context when ``breakpoint()`` is positioned - at the end of the context. - (Contributed by Tian Gao in :gh:`118579`.) +* Add ability to move between chained exceptions during + post-mortem debugging in :func:`~pdb.pm` using + the new :pdbcmd:`exceptions [exc_number] ` command for Pdb. + (Contributed by Matthias Bussonnier in :gh:`106676`.) + +* Expressions and statements whose prefix is a pdb command are now correctly + identified and executed. + (Contributed by Tian Gao in :gh:`108464`.) + queue ----- -* Add :meth:`queue.Queue.shutdown` (along with :exc:`queue.ShutDown`) for queue - termination. +* Add :meth:`Queue.shutdown ` and :exc:`~queue.ShutDown` + to manage queue termination. (Contributed by Laurie Opperman and Yves Duprat in :gh:`104750`.) + random ------ * Add a :ref:`command-line interface `. (Contributed by Hugo van Kemenade in :gh:`118131`.) + re -- -* Rename :exc:`!re.error` to :exc:`re.PatternError` for improved clarity. + +* Rename :exc:`!re.error` to :exc:`~re.PatternError` for improved clarity. :exc:`!re.error` is kept for backward compatibility. -site ----- -* :file:`.pth` files are now decoded by UTF-8 first, and then by the - :term:`locale encoding` if the UTF-8 decoding fails. +shutil +------ + +* Support the *dir_fd* and *follow_symlinks* keyword arguments + in :func:`~shutil.chown`. + (Contributed by Berker Peksag and Tahia K in :gh:`62308`) + + +site +---- + +* :file:`.pth` files are now decoded using UTF-8 first, + and then with the :term:`locale encoding` if UTF-8 decoding fails. (Contributed by Inada Naoki in :gh:`117802`.) + sqlite3 ------- -* A :exc:`ResourceWarning` is now emitted if a :class:`sqlite3.Connection` +* A :exc:`ResourceWarning` is now emitted if a :class:`~sqlite3.Connection` object is not :meth:`closed ` explicitly. (Contributed by Erlend E. Aasland in :gh:`105539`.) -* Add *filter* keyword-only parameter to :meth:`sqlite3.Connection.iterdump` +* Add the *filter* keyword-only parameter to :meth:`.Connection.iterdump` for filtering database objects to dump. (Contributed by Mariusz Felisiak in :gh:`91602`.) + +ssl +--- + +* The :func:`~ssl.create_default_context` API now includes + :data:`~ssl.VERIFY_X509_PARTIAL_CHAIN` and :data:`~ssl.VERIFY_X509_STRICT` + in its default flags. + + .. note:: + + :data:`~ssl.VERIFY_X509_STRICT` may reject pre-:rfc:`5280` + or malformed certificates that the underlying OpenSSL implementation + might otherwise accept. + Whilst disabling this is not recommended, you can do so using: + + .. code-block:: python + + import ssl + + ctx = ssl.create_default_context() + ctx.verify_flags &= ~ssl.VERIFY_X509_STRICT + + (Contributed by William Woodruff in :gh:`112389`.) + + statistics ---------- -* Add :func:`statistics.kde` for kernel density estimation. +* Add :func:`~statistics.kde` for kernel density estimation. This makes it possible to estimate a continuous probability density function - from a fixed number of discrete samples. Also added :func:`statistics.kde_random` - for sampling from the estimated probability density function. + from a fixed number of discrete samples. + (Contributed by Raymond Hettinger in :gh:`115863`.) + +* Add :func:`~statistics.kde_random` for sampling from an + estimated probability density function created by :func:`~statistics.kde`. (Contributed by Raymond Hettinger in :gh:`115863`.) + .. _whatsnew313-subprocess: subprocess ---------- -* The :mod:`subprocess` module now uses the :func:`os.posix_spawn` function in - more situations. Notably in the default case of ``close_fds=True`` on more - recent versions of platforms including Linux, FreeBSD, and Solaris where the - C library provides :c:func:`!posix_spawn_file_actions_addclosefrom_np`. - On Linux this should perform similar to our existing Linux :c:func:`!vfork` - based code. A private control knob :attr:`!subprocess._USE_POSIX_SPAWN` can - be set to ``False`` if you need to force :mod:`subprocess` not to ever use - :func:`os.posix_spawn`. Please report your reason and platform details in - the CPython issue tracker if you set this so that we can improve our API - selection logic for everyone. +* The :mod:`subprocess` module now uses the :func:`~os.posix_spawn` function in + more situations. + + Notably, when *close_fds* is ``True`` (the default), + :func:`~os.posix_spawn` will be used when the C library provides + :c:func:`!posix_spawn_file_actions_addclosefrom_np`, + which includes recent versions of Linux, FreeBSD, and Solaris. + On Linux, this should perform similarly to the existing + Linux :c:func:`!vfork` based code. + + A private control knob :attr:`!subprocess._USE_POSIX_SPAWN` can + be set to ``False`` if you need to force :mod:`subprocess` + to never use :func:`~os.posix_spawn`. + Please report your reason and platform details in + the :ref:`issue tracker ` if you set this + so that we can improve our API selection logic for everyone. (Contributed by Jakub Kulik in :gh:`113117`.) + sys --- -* Add the :func:`sys._is_interned` function to test if the string was interned. +* Add the :func:`~sys._is_interned` function to test if a string was interned. This function is not guaranteed to exist in all implementations of Python. (Contributed by Serhiy Storchaka in :gh:`78573`.) + tempfile -------- * On Windows, the default mode ``0o700`` used by :func:`tempfile.mkdtemp` now - limits access to the new directory due to changes to :func:`os.mkdir`. This - is a mitigation for :cve:`2024-4030`. + limits access to the new directory due to changes to :func:`os.mkdir`. + This is a mitigation for :cve:`2024-4030`. (Contributed by Steve Dower in :gh:`118486`.) + time ---- -* On Windows, :func:`time.monotonic()` now uses the - ``QueryPerformanceCounter()`` clock to have a resolution better than 1 us, - instead of the ``GetTickCount64()`` clock which has a resolution of 15.6 ms. +* On Windows, :func:`~time.monotonic` now uses the + ``QueryPerformanceCounter()`` clock for a resolution of 1 microsecond, + instead of the ``GetTickCount64()`` clock which has + a resolution of 15.6 milliseconds. (Contributed by Victor Stinner in :gh:`88494`.) -* On Windows, :func:`time.time()` now uses the - ``GetSystemTimePreciseAsFileTime()`` clock to have a resolution better - than 1 μs, instead of the ``GetSystemTimeAsFileTime()`` clock which has a - resolution of 15.6 ms. +* On Windows, :func:`~time.time` now uses the + ``GetSystemTimePreciseAsFileTime()`` clock for a resolution of 1 microsecond, + instead of the ``GetSystemTimeAsFileTime()`` clock which has + a resolution of 15.6 milliseconds. (Contributed by Victor Stinner in :gh:`63207`.) @@ -1093,20 +1300,21 @@ tkinter * The :mod:`tkinter` widget method :meth:`!wm_attributes` now accepts the attribute name without the minus prefix to get window attributes, - e.g. ``w.wm_attributes('alpha')`` and allows to specify attributes and - values to set as keyword arguments, e.g. ``w.wm_attributes(alpha=0.5)``. - Add new optional keyword-only parameter *return_python_dict*: calling - ``w.wm_attributes(return_python_dict=True)`` returns the attributes as - a dict instead of a tuple. + for example ``w.wm_attributes('alpha')`` + and allows specifying attributes and values to set as keyword arguments, + for example ``w.wm_attributes(alpha=0.5)``. + (Contributed by Serhiy Storchaka in :gh:`43457`.) + +* :meth:`!wm_attributes` can now return attributes as a :class:`dict`, + by using the new optional keyword-only parameter *return_python_dict*. (Contributed by Serhiy Storchaka in :gh:`43457`.) -* Add new optional keyword-only parameter *return_ints* in - the :meth:`!Text.count` method. - Passing ``return_ints=True`` makes it always returning the single count - as an integer instead of a 1-tuple or ``None``. +* :meth:`!Text.count` can now return a simple :class:`int` + when the new optional keyword-only parameter *return_ints* is used. + Otherwise, the single count is returned as a 1-tuple or ``None``. (Contributed by Serhiy Storchaka in :gh:`97928`.) -* Add support of the "vsapi" element type in +* Support the "vsapi" element type in the :meth:`~tkinter.ttk.Style.element_create` method of :class:`tkinter.ttk.Style`. (Contributed by Serhiy Storchaka in :gh:`68166`.) @@ -1114,307 +1322,375 @@ tkinter * Add the :meth:`!after_info` method for Tkinter widgets. (Contributed by Cheryl Sabella in :gh:`77020`.) -* Add the :class:`!PhotoImage` method :meth:`!copy_replace` to copy a region - from one image to other image, possibly with pixel zooming and/or - subsampling. - Add *from_coords* parameter to :class:`!PhotoImage` methods :meth:`!copy()`, - :meth:`!zoom()` and :meth:`!subsample()`. - Add *zoom* and *subsample* parameters to :class:`!PhotoImage` method - :meth:`!copy()`. +* Add a new :meth:`!copy_replace` method to :class:`!PhotoImage` + to copy a region from one image to another, + possibly with pixel zooming, subsampling, or both. (Contributed by Serhiy Storchaka in :gh:`118225`.) -* Add the :class:`!PhotoImage` methods :meth:`!read` to read - an image from a file and :meth:`!data` to get the image data. - Add *background* and *grayscale* parameters to :class:`!PhotoImage` method - :meth:`!write`. +* Add *from_coords* parameter to the :class:`!PhotoImage` methods + :meth:`!copy`, :meth:`!zoom` and :meth:`!subsample`. + Add *zoom* and *subsample* parameters to the :class:`!PhotoImage` method + :meth:`!copy`. + (Contributed by Serhiy Storchaka in :gh:`118225`.) + +* Add the :class:`!PhotoImage` methods + :meth:`!read` to read an image from a file + and :meth:`!data` to get the image data. + Add *background* and *grayscale* parameters to the :meth:`!write` method. (Contributed by Serhiy Storchaka in :gh:`118271`.) + traceback --------- -* Add *show_group* parameter to :func:`traceback.TracebackException.format_exception_only` - to format the nested exceptions of a :exc:`BaseExceptionGroup` instance, recursively. +* Add the :attr:`~traceback.TracebackException.exc_type_str` attribute + to :class:`~traceback.TracebackException`, + which holds a string display of the *exc_type*. + Deprecate the :attr:`~traceback.TracebackException.exc_type` attribute, + which holds the type object itself. + Add parameter *save_exc_type* (default ``True``) + to indicate whether ``exc_type`` should be saved. + (Contributed by Irit Katriel in :gh:`112332`.) + +* Add a new *show_group* keyword-only parameter to + :meth:`.TracebackException.format_exception_only` to (recursively) format + the nested exceptions of a :exc:`BaseExceptionGroup` instance. (Contributed by Irit Katriel in :gh:`105292`.) -* Add the field *exc_type_str* to :class:`~traceback.TracebackException`, which - holds a string display of the *exc_type*. Deprecate the field *exc_type* - which holds the type object itself. Add parameter *save_exc_type* (default - ``True``) to indicate whether ``exc_type`` should be saved. - (Contributed by Irit Katriel in :gh:`112332`.) types ----- -* :class:`~types.SimpleNamespace` constructor now allows specifying initial - values of attributes as a positional argument which must be a mapping or - an iterable of key-value pairs. +* :class:`~types.SimpleNamespace` can now take a single positional argument + to initialise the namespace's arguments. + This argument must either be a mapping or an iterable of key-value pairs. (Contributed by Serhiy Storchaka in :gh:`108191`.) + typing ------ -* Add :func:`typing.get_protocol_members` to return the set of members - defining a :class:`typing.Protocol`. Add :func:`typing.is_protocol` to - check whether a class is a :class:`typing.Protocol`. (Contributed by Jelle Zijlstra in - :gh:`104873`.) +* :pep:`705`: Add :data:`~typing.ReadOnly`, a special typing construct + to mark a :class:`~typing.TypedDict` item as read-only for type checkers. + +* :pep:`742`: Add :data:`~typing.TypeIs`, a typing construct + that can be used to instruct a type checker how to narrow a type. + +* Add :data:`~typing.NoDefault`, a sentinel object used to represent + the defaults of some parameters in the :mod:`typing` module. + (Contributed by Jelle Zijlstra in :gh:`116126`.) + +* Add :func:`~typing.get_protocol_members` to return the set of members + defining a :class:`typing.Protocol`. + (Contributed by Jelle Zijlstra in :gh:`104873`.) + +* Add :func:`~typing.is_protocol` to check whether a class + is a :class:`~typing.Protocol`. + (Contributed by Jelle Zijlstra in :gh:`104873`.) -* Add :data:`typing.ReadOnly`, a special typing construct to mark - an item of a :class:`typing.TypedDict` as read-only for type checkers. - See :pep:`705` for more details. +* :data:`~typing.ClassVar` can now be nested in :data:`~typing.Final`, + and vice versa. + (Contributed by Mehdi Drissi in :gh:`89547`.) -* Add :data:`typing.NoDefault`, a sentinel object used to represent the defaults - of some parameters in the :mod:`typing` module. (Contributed by Jelle Zijlstra in - :gh:`116126`.) unicodedata ----------- -* The Unicode database has been updated to version 15.1.0. (Contributed by - James Gerity in :gh:`109559`.) +* Update the Unicode database to `version 15.1.0`__. + (Contributed by James Gerity in :gh:`109559`.) + + __ https://www.unicode.org/versions/Unicode15.1.0/ + venv ---- -* Add support for adding source control management (SCM) ignore files to a - virtual environment's directory. By default, Git is supported. This is - implemented as opt-in via the API which can be extended to support other SCMs - (:class:`venv.EnvBuilder` and :func:`venv.create`), and opt-out via the CLI - (using ``--without-scm-ignore-files``). (Contributed by Brett Cannon in - :gh:`108125`.) +* Add support for creating source control management (SCM) ignore files + in a virtual environment's directory. + By default, Git is supported. + This is implemented as opt-in via the API, + which can be extended to support other SCMs + (:class:`~venv.EnvBuilder` and :func:`~venv.create`), + and opt-out via the CLI, using :option:`!--without-scm-ignore-files`. + (Contributed by Brett Cannon in :gh:`108125`.) + warnings -------- -* The new :func:`warnings.deprecated` decorator provides a way to communicate - deprecations to :term:`static type checkers ` and - to warn on usage of deprecated classes and functions. A runtime deprecation - warning may also be emitted when a decorated function or class is used at runtime. - See :pep:`702`. (Contributed by Jelle Zijlstra in :gh:`104003`.) +* :pep:`702`: The new :func:`warnings.deprecated` decorator provides a way to + communicate deprecations to a :term:`static type checker` + and to warn on usage of deprecated classes and functions. + A :exc:`DeprecationWarning` may also be emitted when + a decorated function or class is used at runtime. + (Contributed by Jelle Zijlstra in :gh:`104003`.) -xml.etree.ElementTree ---------------------- + +xml +--- + +* Allow controlling Expat >=2.6.0 reparse deferral (:cve:`2023-52425`) + by adding five new methods: + + * :meth:`xml.etree.ElementTree.XMLParser.flush` + * :meth:`xml.etree.ElementTree.XMLPullParser.flush` + * :meth:`xml.parsers.expat.xmlparser.GetReparseDeferralEnabled` + * :meth:`xml.parsers.expat.xmlparser.SetReparseDeferralEnabled` + * :meth:`!xml.sax.expatreader.ExpatParser.flush` + + (Contributed by Sebastian Pipping in :gh:`115623`.) * Add the :meth:`!close` method for the iterator returned by - :func:`~xml.etree.ElementTree.iterparse` for explicit cleaning up. + :func:`~xml.etree.ElementTree.iterparse` for explicit cleanup. (Contributed by Serhiy Storchaka in :gh:`69893`.) + zipimport --------- -* Gains support for ZIP64 format files. Everybody loves huge code right? +* Add support for ZIP64_ format files. + Everybody loves huge data, right? (Contributed by Tim Hatch in :gh:`94146`.) -.. Add improved modules above alphabetically, not here at the end. + .. _ZIP64: https://en.wikipedia.org/wiki/Zip_(file_format)#ZIP64 + Optimizations ============= -* :func:`textwrap.indent` is now ~30% faster than before for large input. +* Several standard library modules have had + their import times significantly improved. + For example, the import time of the :mod:`typing` module + has been reduced by around a third by removing dependencies + on :mod:`re` and :mod:`contextlib`. + Other modules to enjoy import-time speedups include + :mod:`email.utils`, :mod:`enum`, :mod:`functools`, + :mod:`importlib.metadata`, and :mod:`threading`. + (Contributed by Alex Waygood, Shantanu Jain, Adam Turner, Daniel Hollas, + and others in :gh:`109653`.) + +* :func:`textwrap.indent` is now around 30% faster than before for large input. (Contributed by Inada Naoki in :gh:`107369`.) -* The :mod:`subprocess` module uses :func:`os.posix_spawn` in more situations - including the default where ``close_fds=True`` on many modern platforms. This - should provide a noteworthy performance increase launching processes on - FreeBSD and Solaris. See the :ref:`subprocess ` - section above for details. +* The :mod:`subprocess` module now uses the :func:`~os.posix_spawn` function in + more situations, including when *close_fds* is ``True`` (the default) + on many modern platforms. + This should provide a notable performance increase + when launching processes on FreeBSD and Solaris. + See the :ref:`subprocess ` section above for details. (Contributed by Jakub Kulik in :gh:`113117`.) -* Several standard library modules have had their import times significantly - improved. For example, the import time of the :mod:`typing` module has been - reduced by around a third by removing dependencies on :mod:`re` and - :mod:`contextlib`. Other modules to enjoy import-time speedups include - :mod:`importlib.metadata`, :mod:`threading`, :mod:`enum`, :mod:`functools` - and :mod:`email.utils`. - (Contributed by Alex Waygood, Shantanu Jain, Adam Turner, Daniel Hollas and - others in :gh:`109653`.) Removed Modules And APIs ======================== -.. _whatsnew313-pep594: - -PEP 594: dead batteries (and other module removals) ---------------------------------------------------- - -* :pep:`594` removed 19 modules from the standard library, - deprecated in Python 3.11: - - * :mod:`!aifc`. - (Contributed by Victor Stinner in :gh:`104773`.) - - * :mod:`!audioop`. - (Contributed by Victor Stinner in :gh:`104773`.) - - * :mod:`!chunk`. - (Contributed by Victor Stinner in :gh:`104773`.) - - * :mod:`!cgi` and :mod:`!cgitb`. - - * ``cgi.FieldStorage`` can typically be replaced with - :func:`urllib.parse.parse_qsl` for ``GET`` and ``HEAD`` requests, - and the :mod:`email.message` module or :pypi:`multipart` - PyPI project for ``POST`` and ``PUT``. - - * ``cgi.parse()`` can be replaced by calling :func:`urllib.parse.parse_qs` - directly on the desired query string, except for ``multipart/form-data`` - input, which can be handled as described for ``cgi.parse_multipart()``. - - * ``cgi.parse_header()`` can be replaced with the functionality in the - :mod:`email` package, which implements the same MIME RFCs. For example, - with :class:`email.message.EmailMessage`:: - - from email.message import EmailMessage - msg = EmailMessage() - msg['content-type'] = 'application/json; charset="utf8"' - main, params = msg.get_content_type(), msg['content-type'].params - - * ``cgi.parse_multipart()`` can be replaced with the functionality in the - :mod:`email` package (e.g. :class:`email.message.EmailMessage` and - :class:`email.message.Message`) which implements the same MIME RFCs, or - with the :pypi:`multipart` PyPI project. - - (Contributed by Victor Stinner in :gh:`104773`.) - - * :mod:`!crypt` module and its private :mod:`!_crypt` extension. - The :mod:`hashlib` module is a potential replacement for certain use cases. - Otherwise, the following PyPI projects can be used: - - * :pypi:`bcrypt`: - Modern password hashing for your software and your servers. - * :pypi:`passlib`: - Comprehensive password hashing framework supporting over 30 schemes. - * :pypi:`argon2-cffi`: - The secure Argon2 password hashing algorithm. - * :pypi:`legacycrypt`: - :mod:`ctypes` wrapper to the POSIX crypt library call and associated functionality. - * :pypi:`crypt_r`: - Fork of the :mod:`!crypt` module, wrapper to the :manpage:`crypt_r(3)` library - call and associated functionality. - - (Contributed by Victor Stinner in :gh:`104773`.) - - * :mod:`!imghdr`: use the projects :pypi:`filetype`, - :pypi:`puremagic`, or :pypi:`python-magic` instead. - The ``puremagic.what()`` function can be used to replace - the ``imghdr.what()`` function for all file formats that - were supported by ``imghdr``. - (Contributed by Victor Stinner in :gh:`104773`.) - - * :mod:`!mailcap`. - The :mod:`mimetypes` module provides an alternative. - (Contributed by Victor Stinner in :gh:`104773`.) - - * :mod:`!msilib`. - (Contributed by Zachary Ware in :gh:`104773`.) - - * :mod:`!nis`. - (Contributed by Victor Stinner in :gh:`104773`.) - - * :mod:`!nntplib`: - the :pypi:`nntplib` PyPI project can be used instead. - (Contributed by Victor Stinner in :gh:`104773`.) - - * :mod:`!ossaudiodev`: use the - `pygame project `_ for audio playback. - (Contributed by Victor Stinner in :gh:`104780`.) - * :mod:`!pipes`: use the :mod:`subprocess` module instead. - (Contributed by Victor Stinner in :gh:`104773`.) - - * :mod:`!sndhdr`: use the projects :pypi:`filetype`, - :pypi:`puremagic`, or :pypi:`python-magic` instead. - (Contributed by Victor Stinner in :gh:`104773`.) - - * :mod:`!spwd`: - the :pypi:`python-pam` project can be used instead. - (Contributed by Victor Stinner in :gh:`104773`.) +.. _whatsnew313-pep594: - * :mod:`!sunau`. - (Contributed by Victor Stinner in :gh:`104773`.) +PEP 594: Remove "dead batteries" from the standard library +---------------------------------------------------------- + +:pep:`594` proposed removing 19 modules from the standard library, +colloquially referred to as 'dead batteries' due to their +historic, obsolete, or insecure status. +All of the following modules were deprecated in Python 3.11, +and are now removed: + +* :mod:`!aifc` +* :mod:`!audioop` +* :mod:`!chunk` +* :mod:`!cgi` and :mod:`!cgitb` + + * :class:`!cgi.FieldStorage` can typically be replaced with + :func:`urllib.parse.parse_qsl` for ``GET`` and ``HEAD`` requests, + and the :mod:`email.message` module or the :pypi:`multipart` library + for ``POST`` and ``PUT`` requests. + + * :func:`!cgi.parse` can be replaced by calling + :func:`urllib.parse.parse_qs` directly on the desired query string, + unless the input is ``multipart/form-data``, + which should be replaced as described below for :func:`!cgi.parse_multipart`. + + * :func:`!cgi.parse_header` can be replaced with the functionality + in the :mod:`email` package, which implements the same MIME RFCs. + For example, with :class:`email.message.EmailMessage`: + + .. code-block:: python + + from email.message import EmailMessage + + msg = EmailMessage() + msg['content-type'] = 'application/json; charset="utf8"' + main, params = msg.get_content_type(), msg['content-type'].params + + * :func:`!cgi.parse_multipart` can be replaced with the functionality + in the :mod:`email` package, which implements the same MIME RFCs, + or with the :pypi:`multipart` library. + For example, the :class:`email.message.EmailMessage` + and :class:`email.message.Message` classes. + +* :mod:`!crypt` and the private :mod:`!_crypt` extension. + The :mod:`hashlib` module may be an appropriate replacement + when simply hashing a value is required. + Otherwise, various third-party libraries on PyPI are available: + + * :pypi:`bcrypt`: + Modern password hashing for your software and your servers. + * :pypi:`passlib`: + Comprehensive password hashing framework supporting over 30 schemes. + * :pypi:`argon2-cffi`: + The secure Argon2 password hashing algorithm. + * :pypi:`legacycrypt`: + :mod:`ctypes` wrapper to the POSIX crypt library call + and associated functionality. + * :pypi:`crypt_r`: + Fork of the :mod:`!crypt` module, + wrapper to the :manpage:`crypt_r(3)` library call + and associated functionality. + +* :mod:`!imghdr`: + The :pypi:`filetype`, :pypi:`puremagic`, or :pypi:`python-magic` libraries + should be used as replacements. + For example, the :func:`!puremagic.what` function can be used + to replace the :func:`!imghdr.what` function for all file formats + that were supported by :mod:`!imghdr`. +* :mod:`!mailcap`: + Use the :mod:`mimetypes` module instead. +* :mod:`!msilib` +* :mod:`!nis` +* :mod:`!nntplib`: + Use the :pypi:`pynntp` library from PyPI instead. +* :mod:`!ossaudiodev`: + For audio playback, use the :pypi:`pygame` library from PyPI instead. +* :mod:`!pipes`: + Use the :mod:`subprocess` module instead. +* :mod:`!sndhdr`: + The :pypi:`filetype`, :pypi:`puremagic`, or :pypi:`python-magic` libraries + should be used as replacements. +* :mod:`!spwd`: + Use the :pypi:`python-pam` library from PyPI instead. +* :mod:`!sunau` +* :mod:`!telnetlib`, + Use the :pypi:`telnetlib3` or :pypi:`Exscript` libraries from PyPI instead. +* :mod:`!uu`: + Use the :mod:`base64` module instead, as a modern alternative. +* :mod:`!xdrlib` + +(Contributed by Victor Stinner and Zachary Ware in :gh:`104773` and :gh:`104780`.) + + +2to3 +---- - * :mod:`!telnetlib`, use the projects :pypi:`telnetlib3` or - :pypi:`Exscript` instead. - (Contributed by Victor Stinner in :gh:`104773`.) +* Remove the :program:`2to3` program and the :mod:`!lib2to3` module, + previously deprecated in Python 3.11. + (Contributed by Victor Stinner in :gh:`104780`.) - * :mod:`!uu`: the :mod:`base64` module is a modern alternative. - (Contributed by Victor Stinner in :gh:`104773`.) - * :mod:`!xdrlib`. - (Contributed by Victor Stinner in :gh:`104773`.) +builtins +-------- -* Remove the ``2to3`` program and the :mod:`!lib2to3` module, - deprecated in Python 3.11. - (Contributed by Victor Stinner in :gh:`104780`.) +* Remove support for chained :class:`classmethod` descriptors + (introduced in :gh:`63272`). + These can no longer be used to wrap other descriptors, + such as :class:`property`. + The core design of this feature was flawed and led to several problems. + To "pass-through" a :class:`classmethod`, consider using + the :attr:`!__wrapped__` attribute that was added in Python 3.10. + (Contributed by Raymond Hettinger in :gh:`89519`.) + +* Raise a :exc:`RuntimeError` when calling :meth:`frame.clear` + on a suspended frame (as has always been the case for an executing frame). + (Contributed by Irit Katriel in :gh:`79932`.) -* Remove the :mod:`!tkinter.tix` module, deprecated in Python 3.6. The - third-party Tix library which the module wrapped is unmaintained. - (Contributed by Zachary Ware in :gh:`75552`.) configparser ------------ -* Remove the undocumented :class:`!configparser.LegacyInterpolation` class, +* Remove the undocumented :class:`!LegacyInterpolation` class, deprecated in the docstring since Python 3.2, - and with a deprecation warning since Python 3.11. + and at runtime since Python 3.11. (Contributed by Hugo van Kemenade in :gh:`104886`.) -importlib ---------- -* Remove deprecated :meth:`~object.__getitem__` access for - :class:`!importlib.metadata.EntryPoint` objects. +importlib.metadata +------------------ + +* Remove deprecated subscript (:meth:`~object.__getitem__`) access for + :ref:`EntryPoint ` objects. (Contributed by Jason R. Coombs in :gh:`113175`.) + locale ------ -* Remove ``locale.resetlocale()`` function deprecated in Python 3.11: - use ``locale.setlocale(locale.LC_ALL, "")`` instead. +* Remove the :func:`!locale.resetlocale` function, deprecated in Python 3.11. + Use ``locale.setlocale(locale.LC_ALL, "")`` instead. (Contributed by Victor Stinner in :gh:`104783`.) -logging -------- -* :mod:`logging`: Remove undocumented and untested ``Logger.warn()`` and - ``LoggerAdapter.warn()`` methods and ``logging.warn()`` function. Deprecated - since Python 3.3, they were aliases to the :meth:`logging.Logger.warning` - method, :meth:`!logging.LoggerAdapter.warning` method and - :func:`logging.warning` function. - (Contributed by Victor Stinner in :gh:`105376`.) +opcode +------ + +* Move :attr:`!opcode.ENABLE_SPECIALIZATION` to :attr:`!_opcode.ENABLE_SPECIALIZATION`. + This field was added in 3.12, it was never documented, + and is not intended for external use. + (Contributed by Irit Katriel in :gh:`105481`.) + +* Remove :func:`!opcode.is_pseudo`, :attr:`!opcode.MIN_PSEUDO_OPCODE`, + and :attr:`!opcode.MAX_PSEUDO_OPCODE`, which were added in Python 3.12, + but were neither documented nor exposed through :mod:`dis`, + and were not intended to be used externally. + (Contributed by Irit Katriel in :gh:`105481`.) + pathlib ------- -* Remove support for using :class:`pathlib.Path` objects as context managers. - This functionality was deprecated and made a no-op in Python 3.9. +* Remove the ability to use :class:`~pathlib.Path` objects as context managers. + This functionality was deprecated and has had no effect since Python 3.9. + (Contributed by Barney Gale in :gh:`83863`.) + re -- -* Remove undocumented, never working, and deprecated ``re.template`` function - and ``re.TEMPLATE`` flag (and ``re.T`` alias). +* Remove the undocumented, deprecated, and broken + :func:`!re.template` function and :attr:`!re.TEMPLATE` / :attr:`!re.T` flag. (Contributed by Serhiy Storchaka and Nikita Sobolev in :gh:`105687`.) +tkinter.tix +----------- + +* Remove the :mod:`!tkinter.tix` module, deprecated in Python 3.6. + The third-party Tix library which the module wrapped is unmaintained. + (Contributed by Zachary Ware in :gh:`75552`.) + + turtle ------ -* Remove the :meth:`!turtle.RawTurtle.settiltangle` method, - deprecated in docs since Python 3.1 - and with a deprecation warning since Python 3.11. +* Remove the :meth:`!RawTurtle.settiltangle` method, + deprecated in the documentation since Python 3.1 + and at runtime since Python 3.11. (Contributed by Hugo van Kemenade in :gh:`104876`.) + typing ------ -* Namespaces ``typing.io`` and ``typing.re``, deprecated in Python 3.8, - are now removed. The items in those namespaces can be imported directly - from :mod:`typing`. (Contributed by Sebastian Rittau in :gh:`92871`.) +* Remove the :mod:`!typing.io` and :mod:`!typing.re` namespaces, + deprecated since Python 3.8. + The items in those namespaces can be imported directly + from the :mod:`typing` module. + (Contributed by Sebastian Rittau in :gh:`92871`.) -* Remove support for the keyword-argument method of creating - :class:`typing.TypedDict` types, deprecated in Python 3.11. +* Remove the keyword-argument method of creating + :class:`~typing.TypedDict` types, deprecated in Python 3.11. (Contributed by Tomas Roun in :gh:`104786`.) + unittest -------- @@ -1426,776 +1702,785 @@ unittest Use :class:`~unittest.TestLoader` methods instead: - * :meth:`unittest.TestLoader.loadTestsFromModule` - * :meth:`unittest.TestLoader.loadTestsFromTestCase` - * :meth:`unittest.TestLoader.getTestCaseNames` + * :meth:`~unittest.TestLoader.loadTestsFromModule` + * :meth:`~unittest.TestLoader.loadTestsFromTestCase` + * :meth:`~unittest.TestLoader.getTestCaseNames` (Contributed by Hugo van Kemenade in :gh:`104835`.) -* Remove the untested and undocumented :meth:`!unittest.TestProgram.usageExit` +* Remove the untested and undocumented :meth:`!TestProgram.usageExit` method, deprecated in Python 3.11. (Contributed by Hugo van Kemenade in :gh:`104992`.) + urllib ------ -* Remove *cafile*, *capath* and *cadefault* parameters of the - :func:`urllib.request.urlopen` function, deprecated in Python 3.6: pass - the *context* parameter instead. Use - :meth:`ssl.SSLContext.load_cert_chain` to load specific certificates, or - let :func:`ssl.create_default_context` select the system's trusted CA - certificates for you. +* Remove the *cafile*, *capath*, and *cadefault* parameters of the + :func:`urllib.request.urlopen` function, deprecated in Python 3.6. + Use the *context* parameter instead with an :class:`~ssl.SSLContext` instance. + The :meth:`ssl.SSLContext.load_cert_chain` function + can be used to load specific certificates, + or let :func:`ssl.create_default_context` select + the operating system's trusted certificate authority (CA) certificates. (Contributed by Victor Stinner in :gh:`105382`.) + webbrowser ---------- -* Remove the untested and undocumented :mod:`webbrowser` :class:`!MacOSX` class, +* Remove the untested and undocumented :class:`!MacOSX` class, deprecated in Python 3.11. Use the :class:`!MacOSXOSAScript` class (introduced in Python 3.2) instead. (Contributed by Hugo van Kemenade in :gh:`104804`.) -* Remove deprecated ``webbrowser.MacOSXOSAScript._name`` attribute. - Use :attr:`webbrowser.MacOSXOSAScript.name ` +* Remove the deprecated :attr:`!MacOSXOSAScript._name` attribute. + Use the :attr:`MacOSXOSAScript.name ` attribute instead. (Contributed by Nikita Sobolev in :gh:`105546`.) + New Deprecations ================ -* Removed chained :class:`classmethod` descriptors (introduced in - :gh:`63272`). This can no longer be used to wrap other descriptors - such as :class:`property`. The core design of this feature was flawed - and caused a number of downstream problems. To "pass-through" a - :class:`classmethod`, consider using the :attr:`!__wrapped__` - attribute that was added in Python 3.10. (Contributed by Raymond - Hettinger in :gh:`89519`.) - -* :mod:`array`: :mod:`array`'s ``'u'`` format code, deprecated in docs since Python 3.3, - emits :exc:`DeprecationWarning` since 3.13 - and will be removed in Python 3.16. - Use the ``'w'`` format code instead. - (Contributed by Hugo van Kemenade in :gh:`80480`.) - -* :mod:`ctypes`: Deprecate undocumented :func:`!ctypes.SetPointerType` - and :func:`!ctypes.ARRAY` functions. - Replace ``ctypes.ARRAY(item_type, size)`` with ``item_type * size``. - (Contributed by Victor Stinner in :gh:`105733`.) - -* :mod:`decimal`: Deprecate non-standard format specifier "N" for - :class:`decimal.Decimal`. - It was not documented and only supported in the C implementation. - (Contributed by Serhiy Storchaka in :gh:`89902`.) - -* :mod:`dis`: The ``dis.HAVE_ARGUMENT`` separator is deprecated. Check - membership in :data:`~dis.hasarg` instead. - (Contributed by Irit Katriel in :gh:`109319`.) - -* :ref:`frame-objects`: - Calling :meth:`frame.clear` on a suspended frame raises :exc:`RuntimeError` - (as has always been the case for an executing frame). - (Contributed by Irit Katriel in :gh:`79932`.) +* :ref:`User-defined functions `: -* :mod:`getopt` and :mod:`optparse` modules: They are now - :term:`soft deprecated`: the :mod:`argparse` module should be used for new projects. - Previously, the :mod:`optparse` module was already deprecated, its removal - was not scheduled, and no warnings was emitted: so there is no change in - practice. - (Contributed by Victor Stinner in :gh:`106535`.) - -* :mod:`gettext`: Emit deprecation warning for non-integer numbers in - :mod:`gettext` functions and methods that consider plural forms even if the - translation was not found. - (Contributed by Serhiy Storchaka in :gh:`88434`.) - -* :mod:`glob`: The undocumented :func:`!glob.glob0` and :func:`!glob.glob1` - functions are deprecated. Use :func:`glob.glob` and pass a directory to its - *root_dir* argument instead. - (Contributed by Barney Gale in :gh:`117337`.) - -* :mod:`http.server`: :class:`http.server.CGIHTTPRequestHandler` now emits a - :exc:`DeprecationWarning` as it will be removed in 3.15. Process-based CGI - HTTP servers have been out of favor for a very long time. This code was - outdated, unmaintained, and rarely used. It has a high potential for both - security and functionality bugs. This includes removal of the ``--cgi`` - flag to the ``python -m http.server`` command line in 3.15. - -* :mod:`mimetypes`: Passing file path instead of URL in :func:`~mimetypes.guess_type` is - :term:`soft deprecated`. Use :func:`~mimetypes.guess_file_type` instead. - (Contributed by Serhiy Storchaka in :gh:`66543`.) + * Deprecate assignment to a function's :attr:`~function.__code__` attribute, + where the new code object's type does not match the function's type. + The different types are: + plain function, generator, async generator, and coroutine. + (Contributed by Irit Katriel in :gh:`81137`.) -* :mod:`re`: Passing optional arguments *maxsplit*, *count* and *flags* in module-level - functions :func:`re.split`, :func:`re.sub` and :func:`re.subn` as positional - arguments is now deprecated. In future Python versions these parameters will be - :ref:`keyword-only `. - (Contributed by Serhiy Storchaka in :gh:`56166`.) +* :mod:`array`: -* :mod:`pathlib`: - :meth:`pathlib.PurePath.is_reserved` is deprecated and scheduled for - removal in Python 3.15. Use :func:`os.path.isreserved` to detect reserved - paths on Windows. + * Deprecate the ``'u'`` format code (:c:type:`wchar_t`) at runtime. + This format code has been deprecated in documentation since Python 3.3, + and will be removed in Python 3.16. + Use the ``'w'`` format code (:c:type:`Py_UCS4`) + for Unicode characters instead. + (Contributed by Hugo van Kemenade in :gh:`80480`.) -* :mod:`platform`: - :func:`~platform.java_ver` is deprecated and will be removed in 3.15. - It was largely untested, had a confusing API, - and was only useful for Jython support. - (Contributed by Nikita Sobolev in :gh:`116349`.) +* :mod:`ctypes`: + + * Deprecate the undocumented :func:`!SetPointerType` function, + to be removed in Python 3.15. + (Contributed by Victor Stinner in :gh:`105733`.) + + * :term:`Soft-deprecate ` the :func:`~ctypes.ARRAY` + function in favour of ``type * length`` multiplication. + (Contributed by Victor Stinner in :gh:`105733`.) + +* :mod:`decimal`: + + * Deprecate the non-standard and undocumented :class:`~decimal.Decimal` + format specifier ``'N'``, + which is only supported in the :mod:`!decimal` module's C implementation. + (Contributed by Serhiy Storchaka in :gh:`89902`.) + +* :mod:`dis`: + + * Deprecate the :attr:`!HAVE_ARGUMENT` separator. + Check membership in :data:`~dis.hasarg` instead. + (Contributed by Irit Katriel in :gh:`109319`.) -* :mod:`pydoc`: Deprecate undocumented :func:`!pydoc.ispackage` function. - (Contributed by Zackery Spytz in :gh:`64020`.) +* :mod:`getopt` and :mod:`optparse`: -* :mod:`sqlite3`: Passing more than one positional argument to - :func:`sqlite3.connect` and the :class:`sqlite3.Connection` constructor is - deprecated. The remaining parameters will become keyword-only in Python 3.15. + * Both modules are now :term:`soft deprecated`, + with :mod:`argparse` preferred for new projects. + This is a new soft-deprecation for the :mod:`!getopt` module, + whereas the :mod:`!optparse` module was already *de facto* soft deprecated. + (Contributed by Victor Stinner in :gh:`106535`.) - Deprecate passing name, number of arguments, and the callable as keyword - arguments for the following :class:`sqlite3.Connection` APIs: +* :mod:`gettext`: - * :meth:`~sqlite3.Connection.create_function` - * :meth:`~sqlite3.Connection.create_aggregate` + * Deprecate non-integer numbers as arguments to functions and methods + that consider plural forms in the :mod:`!gettext` module, + even if no translation was found. + (Contributed by Serhiy Storchaka in :gh:`88434`.) + +* :mod:`glob`: + + * Deprecate the undocumented :func:`!glob0` and :func:`!glob1` functions. + Use :func:`~glob.glob` and pass a :term:`path-like object` specifying + the root directory to the *root_dir* parameter instead. + (Contributed by Barney Gale in :gh:`117337`.) + +* :mod:`http.server`: + + * Deprecate :class:`~http.server.CGIHTTPRequestHandler`, + to be removed in Python 3.15. + Process-based CGI HTTP servers have been out of favor for a very long time. + This code was outdated, unmaintained, and rarely used. + It has a high potential for both security and functionality bugs. + (Contributed by Gregory P. Smith in :gh:`109096`.) + + * Deprecate the :option:`!--cgi` flag to + the :program:`python -m http.server` command-line interface, + to be removed in Python 3.15. + (Contributed by Gregory P. Smith in :gh:`109096`.) + +* :mod:`mimetypes`: + + * :term:`Soft-deprecate ` file path arguments + to :func:`~mimetypes.guess_type`, + use :func:`~mimetypes.guess_file_type` instead. + (Contributed by Serhiy Storchaka in :gh:`66543`.) + +* :mod:`re`: + + * Deprecate passing the optional *maxsplit*, *count*, or *flags* arguments + as positional arguments to the module-level + :func:`~re.split`, :func:`~re.sub`, and :func:`~re.subn` functions. + These parameters will become :ref:`keyword-only ` + in a future version of Python. + (Contributed by Serhiy Storchaka in :gh:`56166`.) + +* :mod:`pathlib`: + + * Deprecate :meth:`.PurePath.is_reserved`, + to be removed in Python 3.15. + Use :func:`os.path.isreserved` to detect reserved paths on Windows. + (Contributed by Barney Gale in :gh:`88569`.) + +* :mod:`platform`: - Deprecate passing the callback callable by keyword for the following - :class:`sqlite3.Connection` APIs: + * Deprecate :func:`~platform.java_ver`, + to be removed in Python 3.15. + This function is only useful for Jython support, has a confusing API, + and is largely untested. + (Contributed by Nikita Sobolev in :gh:`116349`.) - * :meth:`~sqlite3.Connection.set_authorizer` - * :meth:`~sqlite3.Connection.set_progress_handler` - * :meth:`~sqlite3.Connection.set_trace_callback` +* :mod:`pydoc`: - The affected parameters will become positional-only in Python 3.15. + * Deprecate the undocumented :func:`!ispackage` function. + (Contributed by Zackery Spytz in :gh:`64020`.) - (Contributed by Erlend E. Aasland in :gh:`107948` and :gh:`108278`.) +* :mod:`sqlite3`: -* :mod:`sys`: :func:`sys._enablelegacywindowsfsencoding` function. - Replace it with the :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment variable. - (Contributed by Inada Naoki in :gh:`73427`.) + * Deprecate passing more than one positional argument to + the :func:`~sqlite3.connect` function + and the :class:`~sqlite3.Connection` constructor. + The remaining parameters will become keyword-only in Python 3.15. + (Contributed by Erlend E. Aasland in :gh:`107948`.) + + * Deprecate passing name, number of arguments, and the callable as keyword + arguments for :meth:`.Connection.create_function` + and :meth:`.Connection.create_aggregate` + These parameters will become positional-only in Python 3.15. + (Contributed by Erlend E. Aasland in :gh:`108278`.) + + * Deprecate passing the callback callable by keyword for the + :meth:`~sqlite3.Connection.set_authorizer`, + :meth:`~sqlite3.Connection.set_progress_handler`, and + :meth:`~sqlite3.Connection.set_trace_callback` + :class:`~sqlite3.Connection` methods. + The callback callables will become positional-only in Python 3.15. + (Contributed by Erlend E. Aasland in :gh:`108278`.) + +* :mod:`sys`: + + * Deprecate the :func:`~sys._enablelegacywindowsfsencoding` function, + to be removed in Python 3.16. + Use the :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment variable instead. + (Contributed by Inada Naoki in :gh:`73427`.) * :mod:`tarfile`: - The undocumented and unused ``tarfile`` attribute of :class:`tarfile.TarFile` - is deprecated and scheduled for removal in Python 3.16. -* :mod:`traceback`: The field *exc_type* of :class:`traceback.TracebackException` - is deprecated. Use *exc_type_str* instead. + * Deprecate the undocumented and unused :attr:`!TarFile.tarfile` attribute, + to be removed in Python 3.16. + (Contributed in :gh:`115256`.) + +* :mod:`traceback`: + + * Deprecate the :attr:`.TracebackException.exc_type` attribute. + Use :attr:`.TracebackException.exc_type_str` instead. + (Contributed by Irit Katriel in :gh:`112332`.) * :mod:`typing`: - * Creating a :class:`typing.NamedTuple` class using keyword arguments to denote - the fields (``NT = NamedTuple("NT", x=int, y=int)``) is deprecated, and will - be disallowed in Python 3.15. Use the class-based syntax or the functional - syntax instead. (Contributed by Alex Waygood in :gh:`105566`.) - - * When using the functional syntax to create a :class:`typing.NamedTuple` - class or a :class:`typing.TypedDict` class, failing to pass a value to the - 'fields' parameter (``NT = NamedTuple("NT")`` or ``TD = TypedDict("TD")``) is - deprecated. Passing ``None`` to the 'fields' parameter - (``NT = NamedTuple("NT", None)`` or ``TD = TypedDict("TD", None)``) is also - deprecated. Both will be disallowed in Python 3.15. To create a NamedTuple - class with 0 fields, use ``class NT(NamedTuple): pass`` or - ``NT = NamedTuple("NT", [])``. To create a TypedDict class with 0 fields, use - ``class TD(TypedDict): pass`` or ``TD = TypedDict("TD", {})``. + * Deprecate the undocumented keyword argument syntax for creating + :class:`~typing.NamedTuple` classes + (e.g. ``Point = NamedTuple("Point", x=int, y=int)``), + to be removed in Python 3.15. + Use the class-based syntax or the functional syntax instead. + (Contributed by Alex Waygood in :gh:`105566`.) + + * Deprecate omitting the *fields* parameter when creating + a :class:`~typing.NamedTuple` or :class:`typing.TypedDict` class, + and deprecate passing ``None`` to the *fields* parameter of both types. + Python 3.15 will require a valid sequence for the *fields* parameter. + To create a NamedTuple class with zero fields, + use ``class NT(NamedTuple): pass`` or ``NT = NamedTuple("NT", ())``. + To create a TypedDict class with zero fields, + use ``class TD(TypedDict): pass`` or ``TD = TypedDict("TD", {})``. (Contributed by Alex Waygood in :gh:`105566` and :gh:`105570`.) - * :func:`typing.no_type_check_decorator` is deprecated, and scheduled for - removal in Python 3.15. After eight years in the :mod:`typing` module, it - has yet to be supported by any major type checkers. + * Deprecate the :func:`typing.no_type_check_decorator` decorator function, + to be removed in in Python 3.15. + After eight years in the :mod:`typing` module, + it has yet to be supported by any major type checker. (Contributed by Alex Waygood in :gh:`106309`.) - * :data:`typing.AnyStr` is deprecated. In Python 3.16, it will be removed from - ``typing.__all__``, and a :exc:`DeprecationWarning` will be emitted when it - is imported or accessed. It will be removed entirely in Python 3.18. Use - the new :ref:`type parameter syntax ` instead. + * Deprecate :data:`typing.AnyStr`. + In Python 3.16, it will be removed from ``typing.__all__``, + and a :exc:`DeprecationWarning` will be emitted at runtime + when it is imported or accessed. + It will be removed entirely in Python 3.18. + Use the new :ref:`type parameter syntax ` instead. (Contributed by Michael The in :gh:`107116`.) -* :ref:`user-defined-funcs`: - Assignment to a function's :attr:`~function.__code__` attribute where the new code - object's type does not match the function's type, is deprecated. The - different types are: plain function, generator, async generator and - coroutine. - (Contributed by Irit Katriel in :gh:`81137`.) +* :mod:`wave`: -* :mod:`wave`: Deprecate the ``getmark()``, ``setmark()`` and ``getmarkers()`` - methods of the :class:`wave.Wave_read` and :class:`wave.Wave_write` classes. - They will be removed in Python 3.15. - (Contributed by Victor Stinner in :gh:`105096`.) + * Deprecate the :meth:`~wave.Wave_read.getmark`, :meth:`!setmark`, + and :meth:`~wave.Wave_read.getmarkers` methods of + the :class:`~wave.Wave_read` and :class:`~wave.Wave_write` classes, + to be removed in Python 3.15. + (Contributed by Victor Stinner in :gh:`105096`.) .. Add deprecations above alphabetically, not here at the end. -Pending Removal in Python 3.14 ------------------------------- +.. include:: ../deprecations/pending-removal-in-3.14.rst -* :mod:`argparse`: The *type*, *choices*, and *metavar* parameters - of :class:`!argparse.BooleanOptionalAction` are deprecated - and will be removed in 3.14. - (Contributed by Nikita Sobolev in :gh:`92248`.) +.. include:: ../deprecations/pending-removal-in-3.15.rst -* :mod:`ast`: The following features have been deprecated in documentation - since Python 3.8, now cause a :exc:`DeprecationWarning` to be emitted at - runtime when they are accessed or used, and will be removed in Python 3.14: +.. include:: ../deprecations/pending-removal-in-3.16.rst - * :class:`!ast.Num` - * :class:`!ast.Str` - * :class:`!ast.Bytes` - * :class:`!ast.NameConstant` - * :class:`!ast.Ellipsis` +.. include:: ../deprecations/pending-removal-in-future.rst - Use :class:`ast.Constant` instead. - (Contributed by Serhiy Storchaka in :gh:`90953`.) +CPython Bytecode Changes +======================== -* :mod:`collections.abc`: Deprecated :class:`~collections.abc.ByteString`. - Prefer :class:`!Sequence` or :class:`~collections.abc.Buffer`. - For use in typing, prefer a union, like ``bytes | bytearray``, - or :class:`collections.abc.Buffer`. - (Contributed by Shantanu Jain in :gh:`91896`.) +* The oparg of :opcode:`YIELD_VALUE` is now + ``1`` if the yield is part of a yield-from or await, and ``0`` otherwise. + The oparg of :opcode:`RESUME` was changed to add a bit indicating + if the except-depth is 1, which is needed to optimize closing of generators. + (Contributed by Irit Katriel in :gh:`111354`.) -* :mod:`email`: Deprecated the *isdst* parameter in :func:`email.utils.localtime`. - (Contributed by Alan Williams in :gh:`72346`.) -* :mod:`importlib`: ``__package__`` and ``__cached__`` will cease to be set or - taken into consideration by the import system (:gh:`97879`). +C API Changes +============= -* :mod:`importlib.abc` deprecated classes: +New Features +------------ - * :class:`!importlib.abc.ResourceReader` - * :class:`!importlib.abc.Traversable` - * :class:`!importlib.abc.TraversableResources` +* Add the :ref:`PyMonitoring C API ` + for generating :pep:`669` monitoring events: + + * :c:type:`PyMonitoringState` + * :c:func:`PyMonitoring_FirePyStartEvent` + * :c:func:`PyMonitoring_FirePyResumeEvent` + * :c:func:`PyMonitoring_FirePyReturnEvent` + * :c:func:`PyMonitoring_FirePyYieldEvent` + * :c:func:`PyMonitoring_FireCallEvent` + * :c:func:`PyMonitoring_FireLineEvent` + * :c:func:`PyMonitoring_FireJumpEvent` + * :c:func:`PyMonitoring_FireBranchEvent` + * :c:func:`PyMonitoring_FireCReturnEvent` + * :c:func:`PyMonitoring_FirePyThrowEvent` + * :c:func:`PyMonitoring_FireRaiseEvent` + * :c:func:`PyMonitoring_FireCRaiseEvent` + * :c:func:`PyMonitoring_FireReraiseEvent` + * :c:func:`PyMonitoring_FireExceptionHandledEvent` + * :c:func:`PyMonitoring_FirePyUnwindEvent` + * :c:func:`PyMonitoring_FireStopIterationEvent` + * :c:func:`PyMonitoring_EnterScope` + * :c:func:`PyMonitoring_ExitScope` + + (Contributed by Irit Katriel in :gh:`111997`). + +* Add :c:type:`PyMutex`, a lightweight mutex that occupies a single byte, + and the new :c:func:`PyMutex_Lock` and :c:func:`PyMutex_Unlock` functions. + :c:func:`!PyMutex_Lock` will release the :term:`GIL` (if currently held) + if the operation needs to block. + (Contributed by Sam Gross in :gh:`108724`.) - Use :mod:`importlib.resources.abc` classes instead: +* Add the :ref:`PyTime C API ` to provide access to system clocks: - * :class:`importlib.resources.abc.Traversable` - * :class:`importlib.resources.abc.TraversableResources` + * :c:type:`PyTime_t`. + * :c:var:`PyTime_MIN` and :c:var:`PyTime_MAX`. + * :c:func:`PyTime_AsSecondsDouble`. + * :c:func:`PyTime_Monotonic`. + * :c:func:`PyTime_MonotonicRaw`. + * :c:func:`PyTime_PerfCounter`. + * :c:func:`PyTime_PerfCounterRaw`. + * :c:func:`PyTime_Time`. + * :c:func:`PyTime_TimeRaw`. - (Contributed by Jason R. Coombs and Hugo van Kemenade in :gh:`93963`.) + (Contributed by Victor Stinner and Petr Viktorin in :gh:`110850`.) -* :mod:`itertools` had undocumented, inefficient, historically buggy, - and inconsistent support for copy, deepcopy, and pickle operations. - This will be removed in 3.14 for a significant reduction in code - volume and maintenance burden. - (Contributed by Raymond Hettinger in :gh:`101588`.) +* Add the :c:func:`PyDict_ContainsString` function + with the same behavior as :c:func:`PyDict_Contains`, + but *key* is specified as a :c:expr:`const char*` UTF-8 encoded bytes string, + rather than a :c:expr:`PyObject*`. + (Contributed by Victor Stinner in :gh:`108314`.) -* :mod:`multiprocessing`: The default start method will change to a safer one on - Linux, BSDs, and other non-macOS POSIX platforms where ``'fork'`` is currently - the default (:gh:`84559`). Adding a runtime warning about this was deemed too - disruptive as the majority of code is not expected to care. Use the - :func:`~multiprocessing.get_context` or - :func:`~multiprocessing.set_start_method` APIs to explicitly specify when - your code *requires* ``'fork'``. See :ref:`multiprocessing-start-methods`. +* Add the :c:func:`PyDict_GetItemRef` and :c:func:`PyDict_GetItemStringRef` + functions, + which behave similarly to :c:func:`PyDict_GetItemWithError`, + but return a :term:`strong reference` instead of a :term:`borrowed reference`. + Moreover, these functions return ``-1`` on error, + removing the need to check :c:func:`!PyErr_Occurred`. + (Contributed by Victor Stinner in :gh:`106004`.) -* :mod:`pathlib`: :meth:`~pathlib.PurePath.is_relative_to` and - :meth:`~pathlib.PurePath.relative_to`: passing additional arguments is - deprecated. +* Add the :c:func:`PyDict_SetDefaultRef` function, + which behaves similarly to :c:func:`PyDict_SetDefault`, + but returns a :term:`strong reference` instead of a :term:`borrowed reference`. + This function returns ``-1`` on error, + ``0`` on insertion, + and ``1`` if the key was already present in the dictionary. + (Contributed by Sam Gross in :gh:`112066`.) -* :mod:`pkgutil`: :func:`~pkgutil.find_loader` and :func:`~pkgutil.get_loader` - now raise :exc:`DeprecationWarning`; - use :func:`importlib.util.find_spec` instead. - (Contributed by Nikita Sobolev in :gh:`97850`.) +* Add the :c:func:`PyDict_Pop` and :c:func:`PyDict_PopString` functions + to remove a key from a dictionary and optionally return the removed value. + This is similar to :meth:`dict.pop`, + though there is no default value, + and :exc:`KeyError` is not raised for missing keys. + (Contributed by Stefan Behnel and Victor Stinner in :gh:`111262`.) -* :mod:`pty`: +* Add the :c:func:`PyMapping_GetOptionalItem` + and :c:func:`PyMapping_GetOptionalItemString` functions + as alternatives to :c:func:`PyObject_GetItem` + and :c:func:`PyMapping_GetItemString` respectively. + The new functions do not raise :exc:`KeyError` + if the requested key is missing from the mapping. + These variants are more convenient and faster + if a missing key should not be treated as a failure. + (Contributed by Serhiy Storchaka in :gh:`106307`.) - * ``master_open()``: use :func:`pty.openpty`. - * ``slave_open()``: use :func:`pty.openpty`. +* Add the :c:func:`PyObject_GetOptionalAttr` + and :c:func:`PyObject_GetOptionalAttrString` functions + as alternatives to :c:func:`PyObject_GetAttr` + and :c:func:`PyObject_GetAttrString` respectively. + The new functions do not raise :exc:`AttributeError` + if the requested attribute is not found on the object. + These variants are more convenient and faster + if the missing attribute should not be treated as a failure. + (Contributed by Serhiy Storchaka in :gh:`106521`.) -* :mod:`sqlite3`: +* Add the :c:func:`PyErr_FormatUnraisable` function + as an extension to :c:func:`PyErr_WriteUnraisable` + that allows customizing the warning message. + (Contributed by Serhiy Storchaka in :gh:`108082`.) - * :data:`~sqlite3.version` and :data:`~sqlite3.version_info`. +* Add new functions that return a :term:`strong reference` instead of + a :term:`borrowed reference` for frame locals, globals, and builtins, + as part of :ref:`PEP 667 `: - * :meth:`~sqlite3.Cursor.execute` and :meth:`~sqlite3.Cursor.executemany` - if :ref:`named placeholders ` are used and - *parameters* is a sequence instead of a :class:`dict`. + * :c:func:`PyEval_GetFrameBuiltins` replaces :c:func:`PyEval_GetBuiltins` + * :c:func:`PyEval_GetFrameGlobals` replaces :c:func:`PyEval_GetGlobals` + * :c:func:`PyEval_GetFrameLocals` replaces :c:func:`PyEval_GetLocals` - * date and datetime adapter, date and timestamp converter: - see the :mod:`sqlite3` documentation for suggested replacement recipes. + (Contributed by Mark Shannon and Tian Gao in :gh:`74929`.) -* :class:`types.CodeType`: Accessing :attr:`~codeobject.co_lnotab` was - deprecated in :pep:`626` - since 3.10 and was planned to be removed in 3.12, - but it only got a proper :exc:`DeprecationWarning` in 3.12. - May be removed in 3.14. - (Contributed by Nikita Sobolev in :gh:`101866`.) +* Add the :c:func:`Py_GetConstant` and :c:func:`Py_GetConstantBorrowed` + functions to get :term:`strong ` + or :term:`borrowed ` references to constants. + For example, ``Py_GetConstant(Py_CONSTANT_ZERO)`` returns a strong reference + to the constant zero. + (Contributed by Victor Stinner in :gh:`115754`.) -* :mod:`typing`: :class:`~typing.ByteString`, deprecated since Python 3.9, - now causes a :exc:`DeprecationWarning` to be emitted when it is used. +* Add the :c:func:`PyImport_AddModuleRef` function + as a replacement for :c:func:`PyImport_AddModule` + that returns a :term:`strong reference` instead of a :term:`borrowed reference`. + (Contributed by Victor Stinner in :gh:`105922`.) -* :mod:`urllib`: - :class:`!urllib.parse.Quoter` is deprecated: it was not intended to be a - public API. - (Contributed by Gregory P. Smith in :gh:`88168`.) +* Add the :c:func:`Py_IsFinalizing` function to check + whether the main Python interpreter is + :term:`shutting down `. + (Contributed by Victor Stinner in :gh:`108014`.) -Pending Removal in Python 3.15 ------------------------------- +* Add the :c:func:`PyList_GetItemRef` function + as a replacement for :c:func:`PyList_GetItem` + that returns a :term:`strong reference` instead of a :term:`borrowed reference`. + (Contributed by Sam Gross in :gh:`114329`.) -* :class:`http.server.CGIHTTPRequestHandler` will be removed along with its - related ``--cgi`` flag to ``python -m http.server``. It was obsolete and - rarely used. No direct replacement exists. *Anything* is better than CGI - to interface a web server with a request handler. +* Add the :c:func:`PyList_Extend` and :c:func:`PyList_Clear` functions, + mirroring the Python :meth:`!list.extend` and :meth:`!list.clear` methods. + (Contributed by Victor Stinner in :gh:`111138`.) -* :class:`locale`: :func:`locale.getdefaultlocale` was deprecated in Python 3.11 - and originally planned for removal in Python 3.13 (:gh:`90817`), - but removal has been postponed to Python 3.15. - Use :func:`locale.setlocale()`, :func:`locale.getencoding()` and - :func:`locale.getlocale()` instead. - (Contributed by Hugo van Kemenade in :gh:`111187`.) +* Add the :c:func:`PyLong_AsInt` function. + It behaves similarly to :c:func:`PyLong_AsLong`, + but stores the result in a C :c:expr:`int` instead of a C :c:expr:`long`. + (Contributed by Victor Stinner in :gh:`108014`.) -* :mod:`pathlib`: - :meth:`pathlib.PurePath.is_reserved` is deprecated and scheduled for - removal in Python 3.15. Use :func:`os.path.isreserved` to detect reserved - paths on Windows. +* Add the :c:func:`PyLong_AsNativeBytes`, :c:func:`PyLong_FromNativeBytes`, + and :c:func:`PyLong_FromUnsignedNativeBytes` functions + to simplify converting between native integer types + and Python :class:`int` objects. + (Contributed by Steve Dower in :gh:`111140`.) -* :mod:`platform`: - :func:`~platform.java_ver` is deprecated and will be removed in 3.15. - It was largely untested, had a confusing API, - and was only useful for Jython support. - (Contributed by Nikita Sobolev in :gh:`116349`.) - -* :mod:`threading`: - Passing any arguments to :func:`threading.RLock` is now deprecated. - C version allows any numbers of args and kwargs, - but they are just ignored. Python version does not allow any arguments. - All arguments will be removed from :func:`threading.RLock` in Python 3.15. - (Contributed by Nikita Sobolev in :gh:`102029`.) - -* :class:`typing.NamedTuple`: - - * The undocumented keyword argument syntax for creating :class:`!NamedTuple` classes - (``NT = NamedTuple("NT", x=int)``) is deprecated, and will be disallowed in - 3.15. Use the class-based syntax or the functional syntax instead. - - * When using the functional syntax to create a :class:`!NamedTuple` class, failing to - pass a value to the *fields* parameter (``NT = NamedTuple("NT")``) is - deprecated. Passing ``None`` to the *fields* parameter - (``NT = NamedTuple("NT", None)``) is also deprecated. Both will be - disallowed in Python 3.15. To create a :class:`!NamedTuple` class with 0 fields, use - ``class NT(NamedTuple): pass`` or ``NT = NamedTuple("NT", [])``. - -* :class:`typing.TypedDict`: When using the functional syntax to create a - :class:`!TypedDict` class, failing to pass a value to the *fields* parameter (``TD = - TypedDict("TD")``) is deprecated. Passing ``None`` to the *fields* parameter - (``TD = TypedDict("TD", None)``) is also deprecated. Both will be disallowed - in Python 3.15. To create a :class:`!TypedDict` class with 0 fields, use ``class - TD(TypedDict): pass`` or ``TD = TypedDict("TD", {})``. - -* :mod:`wave`: Deprecate the ``getmark()``, ``setmark()`` and ``getmarkers()`` - methods of the :class:`wave.Wave_read` and :class:`wave.Wave_write` classes. - They will be removed in Python 3.15. - (Contributed by Victor Stinner in :gh:`105096`.) - -Pending Removal in Python 3.16 ------------------------------- - -* :class:`array.array` ``'u'`` type (:c:type:`wchar_t`): - use the ``'w'`` type instead (``Py_UCS4``). - -Pending Removal in Future Versions ----------------------------------- - -The following APIs were deprecated in earlier Python versions and will be removed, -although there is currently no date scheduled for their removal. - -* :mod:`argparse`: Nesting argument groups and nesting mutually exclusive - groups are deprecated. - -* :mod:`builtins`: - - * ``~bool``, bitwise inversion on bool. - * ``bool(NotImplemented)``. - * Generators: ``throw(type, exc, tb)`` and ``athrow(type, exc, tb)`` - signature is deprecated: use ``throw(exc)`` and ``athrow(exc)`` instead, - the single argument signature. - * Currently Python accepts numeric literals immediately followed by keywords, - for example ``0in x``, ``1or x``, ``0if 1else 2``. It allows confusing and - ambiguous expressions like ``[0x1for x in y]`` (which can be interpreted as - ``[0x1 for x in y]`` or ``[0x1f or x in y]``). A syntax warning is raised - if the numeric literal is immediately followed by one of keywords - :keyword:`and`, :keyword:`else`, :keyword:`for`, :keyword:`if`, - :keyword:`in`, :keyword:`is` and :keyword:`or`. In a future release it - will be changed to a syntax error. (:gh:`87999`) - * Support for ``__index__()`` and ``__int__()`` method returning non-int type: - these methods will be required to return an instance of a strict subclass of - :class:`int`. - * Support for ``__float__()`` method returning a strict subclass of - :class:`float`: these methods will be required to return an instance of - :class:`float`. - * Support for ``__complex__()`` method returning a strict subclass of - :class:`complex`: these methods will be required to return an instance of - :class:`complex`. - * Delegation of ``int()`` to ``__trunc__()`` method. - -* :mod:`calendar`: ``calendar.January`` and ``calendar.February`` constants are - deprecated and replaced by :data:`calendar.JANUARY` and - :data:`calendar.FEBRUARY`. - (Contributed by Prince Roshan in :gh:`103636`.) - -* :attr:`codeobject.co_lnotab`: use the :meth:`codeobject.co_lines` method - instead. - -* :mod:`datetime`: - - * :meth:`~datetime.datetime.utcnow`: - use ``datetime.datetime.now(tz=datetime.UTC)``. - * :meth:`~datetime.datetime.utcfromtimestamp`: - use ``datetime.datetime.fromtimestamp(timestamp, tz=datetime.UTC)``. - -* :mod:`gettext`: Plural value must be an integer. - -* :mod:`importlib`: - - * ``load_module()`` method: use ``exec_module()`` instead. - * :func:`~importlib.util.cache_from_source` *debug_override* parameter is - deprecated: use the *optimization* parameter instead. - -* :mod:`importlib.metadata`: - - * ``EntryPoints`` tuple interface. - * Implicit ``None`` on return values. - -* :mod:`mailbox`: Use of StringIO input and text mode is deprecated, use - BytesIO and binary mode instead. - -* :mod:`os`: Calling :func:`os.register_at_fork` in multi-threaded process. - -* :class:`!pydoc.ErrorDuringImport`: A tuple value for *exc_info* parameter is - deprecated, use an exception instance. - -* :mod:`re`: More strict rules are now applied for numerical group references - and group names in regular expressions. Only sequence of ASCII digits is now - accepted as a numerical reference. The group name in bytes patterns and - replacement strings can now only contain ASCII letters and digits and - underscore. - (Contributed by Serhiy Storchaka in :gh:`91760`.) - -* :mod:`!sre_compile`, :mod:`!sre_constants` and :mod:`!sre_parse` modules. - -* :mod:`shutil`: :func:`~shutil.rmtree`'s *onerror* parameter is deprecated in - Python 3.12; use the *onexc* parameter instead. - -* :mod:`ssl` options and protocols: - - * :class:`ssl.SSLContext` without protocol argument is deprecated. - * :class:`ssl.SSLContext`: :meth:`~ssl.SSLContext.set_npn_protocols` and - :meth:`!selected_npn_protocol` are deprecated: use ALPN - instead. - * ``ssl.OP_NO_SSL*`` options - * ``ssl.OP_NO_TLS*`` options - * ``ssl.PROTOCOL_SSLv3`` - * ``ssl.PROTOCOL_TLS`` - * ``ssl.PROTOCOL_TLSv1`` - * ``ssl.PROTOCOL_TLSv1_1`` - * ``ssl.PROTOCOL_TLSv1_2`` - * ``ssl.TLSVersion.SSLv3`` - * ``ssl.TLSVersion.TLSv1`` - * ``ssl.TLSVersion.TLSv1_1`` - -* :func:`sysconfig.is_python_build` *check_home* parameter is deprecated and - ignored. - -* :mod:`threading` methods: - - * :meth:`!threading.Condition.notifyAll`: use :meth:`~threading.Condition.notify_all`. - * :meth:`!threading.Event.isSet`: use :meth:`~threading.Event.is_set`. - * :meth:`!threading.Thread.isDaemon`, :meth:`threading.Thread.setDaemon`: - use :attr:`threading.Thread.daemon` attribute. - * :meth:`!threading.Thread.getName`, :meth:`threading.Thread.setName`: - use :attr:`threading.Thread.name` attribute. - * :meth:`!threading.currentThread`: use :meth:`threading.current_thread`. - * :meth:`!threading.activeCount`: use :meth:`threading.active_count`. - -* :class:`typing.Text` (:gh:`92332`). - -* :class:`unittest.IsolatedAsyncioTestCase`: it is deprecated to return a value - that is not ``None`` from a test case. - -* :mod:`urllib.parse` deprecated functions: :func:`~urllib.parse.urlparse` instead - - * ``splitattr()`` - * ``splithost()`` - * ``splitnport()`` - * ``splitpasswd()`` - * ``splitport()`` - * ``splitquery()`` - * ``splittag()`` - * ``splittype()`` - * ``splituser()`` - * ``splitvalue()`` - * ``to_bytes()`` +* Add :c:func:`PyModule_Add` function, which is similar to + :c:func:`PyModule_AddObjectRef` and :c:func:`PyModule_AddObject`, + but always steals a reference to the value. + (Contributed by Serhiy Storchaka in :gh:`86493`.) -* :mod:`urllib.request`: :class:`~urllib.request.URLopener` and - :class:`~urllib.request.FancyURLopener` style of invoking requests is - deprecated. Use newer :func:`~urllib.request.urlopen` functions and methods. +* Add the :c:func:`PyObject_GenericHash` function + that implements the default hashing function of a Python object. + (Contributed by Serhiy Storchaka in :gh:`113024`.) -* :mod:`wsgiref`: ``SimpleHandler.stdout.write()`` should not do partial - writes. +* Add the :c:func:`Py_HashPointer` function to hash a raw pointer. + (Contributed by Victor Stinner in :gh:`111545`.) -* :mod:`xml.etree.ElementTree`: Testing the truth value of an - :class:`~xml.etree.ElementTree.Element` is deprecated. In a future release it - it will always return ``True``. Prefer explicit ``len(elem)`` or - ``elem is not None`` tests instead. +* Add the :c:func:`PyObject_VisitManagedDict` and + :c:func:`PyObject_ClearManagedDict` functions. + which must be called by the traverse and clear functions of a type using + the :c:macro:`Py_TPFLAGS_MANAGED_DICT` flag. + The `pythoncapi-compat project`_ can be used to + use these functions with Python 3.11 and 3.12. + (Contributed by Victor Stinner in :gh:`107073`.) -* :meth:`zipimport.zipimporter.load_module` is deprecated: - use :meth:`~zipimport.zipimporter.exec_module` instead. +* Add the :c:func:`PyRefTracer_SetTracer` + and :c:func:`PyRefTracer_GetTracer` functions, + which enable tracking object creation and destruction + in the same way that the :mod:`tracemalloc` module does. + (Contributed by Pablo Galindo in :gh:`93502`.) +* Add the :c:func:`PySys_AuditTuple` function + as an alternative to :c:func:`PySys_Audit` + that takes event arguments as a Python :class:`tuple` object. + (Contributed by Victor Stinner in :gh:`85283`.) -CPython Bytecode Changes -======================== +* Add the :c:func:`PyThreadState_GetUnchecked()` function + as an alternative to :c:func:`PyThreadState_Get()` + that doesn't kill the process with a fatal error if it is ``NULL``. + The caller is responsible for checking if the result is ``NULL``. + (Contributed by Victor Stinner in :gh:`108867`.) -* The oparg of ``YIELD_VALUE`` is now ``1`` if the yield is part of a - yield-from or await, and ``0`` otherwise. The oparg of ``RESUME`` was - changed to add a bit indicating whether the except-depth is 1, which - is needed to optimize closing of generators. - (Contributed by Irit Katriel in :gh:`111354`.) +* Add the :c:func:`PyType_GetFullyQualifiedName` function + to get the type's fully qualified name. + The module name is prepended if :attr:`type.__module__` is + a string and is not equal to either ``'builtins'`` or ``'__main__'``. + (Contributed by Victor Stinner in :gh:`111696`.) +* Add the :c:func:`PyType_GetModuleName` function + to get the type's module name. This is equivalent to getting the + :attr:`type.__module__` attribute. + (Contributed by Eric Snow and Victor Stinner in :gh:`111696`.) -C API Changes -============= +* Add the :c:func:`PyUnicode_EqualToUTF8AndSize` + and :c:func:`PyUnicode_EqualToUTF8` functions + to compare a Unicode object with a :c:expr:`const char*` UTF-8 encoded string + and ``1`` if they are equal or ``0`` otherwise. + These functions do not raise exceptions. + (Contributed by Serhiy Storchaka in :gh:`110289`.) -New Features ------------- +* Add the :c:func:`PyWeakref_GetRef` function + as an alternative to :c:func:`PyWeakref_GetObject` + that returns a :term:`strong reference` + or ``NULL`` if the referent is no longer live. + (Contributed by Victor Stinner in :gh:`105927`.) + +* Add fixed variants of functions which silently ignore errors: + + * :c:func:`PyObject_HasAttrWithError` replaces :c:func:`PyObject_HasAttr`. + * :c:func:`PyObject_HasAttrStringWithError` + replaces :c:func:`PyObject_HasAttrString`. + * :c:func:`PyMapping_HasKeyWithError` replaces :c:func:`PyMapping_HasKey`. + * :c:func:`PyMapping_HasKeyStringWithError` + replaces :c:func:`PyMapping_HasKeyString`. + + The new functions return ``-1`` for errors + and the standard ``1`` for true and ``0`` for false. + + (Contributed by Serhiy Storchaka in :gh:`108511`.) + + +Changed C APIs +-------------- -* You no longer have to define the ``PY_SSIZE_T_CLEAN`` macro before including - :file:`Python.h` when using ``#`` formats in +* The *keywords* parameter of :c:func:`PyArg_ParseTupleAndKeywords` + and :c:func:`PyArg_VaParseTupleAndKeywords` + now has type :c:expr:`char * const *` in C + and :c:expr:`const char * const *` in C++, + instead of :c:expr:`char **`. + In C++, this makes these functions compatible with arguments + of type :c:expr:`const char * const *`, :c:expr:`const char **`, + or :c:expr:`char * const *` without an explicit type cast. + In C, the functions only support arguments of type :c:expr:`char * const *`. + This can be overridden with the :c:macro:`PY_CXX_CONST` macro. + (Contributed by Serhiy Storchaka in :gh:`65210`.) + +* :c:func:`PyArg_ParseTupleAndKeywords` now supports + non-ASCII keyword parameter names. + (Contributed by Serhiy Storchaka in :gh:`110815`.) + +* The :c:func:`!PyCode_GetFirstFree` function is now unstable API + and is now named :c:func:`PyUnstable_Code_GetFirstFree`. + (Contributed by Bogdan Romanyuk in :gh:`115781`.) + +* The :c:func:`PyDict_GetItem`, :c:func:`PyDict_GetItemString`, + :c:func:`PyMapping_HasKey`, :c:func:`PyMapping_HasKeyString`, + :c:func:`PyObject_HasAttr`, :c:func:`PyObject_HasAttrString`, + and :c:func:`PySys_GetObject` functions, + each of which clears all errors which occurred when calling them + now reports these errors using :func:`sys.unraisablehook`. + You may replace them with other functions as recommended in the documentation. + (Contributed by Serhiy Storchaka in :gh:`106672`.) + +* Add support for the ``%T``, ``%#T``, ``%N`` and ``%#N`` formats + to :c:func:`PyUnicode_FromFormat`: + + * ``%T``: Get the fully qualified name of an object type + * ``%#T``: As above, but use a colon as the separator + * ``%N``: Get the fully qualified name of a type + * ``%#N``: As above, but use a colon as the separator + + See :pep:`737` for more information. + (Contributed by Victor Stinner in :gh:`111696`.) + +* You no longer have to define the ``PY_SSIZE_T_CLEAN`` macro before + including :file:`Python.h` when using ``#`` formats in :ref:`format codes `. APIs accepting the format codes always use ``Py_ssize_t`` for ``#`` formats. (Contributed by Inada Naoki in :gh:`104922`.) -* The *keywords* parameter of :c:func:`PyArg_ParseTupleAndKeywords` and - :c:func:`PyArg_VaParseTupleAndKeywords` now has type :c:expr:`char * const *` - in C and :c:expr:`const char * const *` in C++, instead of :c:expr:`char **`. - It makes these functions compatible with arguments of type - :c:expr:`const char * const *`, :c:expr:`const char **` or - :c:expr:`char * const *` in C++ and :c:expr:`char * const *` in C - without an explicit type cast. - This can be overridden with the :c:macro:`PY_CXX_CONST` macro. - (Contributed by Serhiy Storchaka in :gh:`65210`.) +* If Python is built in :ref:`debug mode ` + or :option:`with assertions <--with-assertions>`, + :c:func:`PyTuple_SET_ITEM` and :c:func:`PyList_SET_ITEM` + now check the index argument with an assertion. + (Contributed by Victor Stinner in :gh:`106168`.) -* Add :c:func:`PyImport_AddModuleRef`: similar to - :c:func:`PyImport_AddModule`, but return a :term:`strong reference` instead - of a :term:`borrowed reference`. - (Contributed by Victor Stinner in :gh:`105922`.) -* Add :c:func:`PyWeakref_GetRef` function: similar to - :c:func:`PyWeakref_GetObject` but returns a :term:`strong reference`, or - ``NULL`` if the referent is no longer live. - (Contributed by Victor Stinner in :gh:`105927`.) +Limited C API Changes +--------------------- -* Add :c:func:`PyObject_GetOptionalAttr` and - :c:func:`PyObject_GetOptionalAttrString`, variants of - :c:func:`PyObject_GetAttr` and :c:func:`PyObject_GetAttrString` which - don't raise :exc:`AttributeError` if the attribute is not found. - These variants are more convenient and faster if the missing attribute - should not be treated as a failure. - (Contributed by Serhiy Storchaka in :gh:`106521`.) +* The following functions are now included in the Limited C API: -* Add :c:func:`PyMapping_GetOptionalItem` and - :c:func:`PyMapping_GetOptionalItemString`: variants of - :c:func:`PyObject_GetItem` and :c:func:`PyMapping_GetItemString` which don't - raise :exc:`KeyError` if the key is not found. - These variants are more convenient and faster if the missing key should not - be treated as a failure. - (Contributed by Serhiy Storchaka in :gh:`106307`.) + * :c:func:`PyMem_RawMalloc` + * :c:func:`PyMem_RawCalloc` + * :c:func:`PyMem_RawRealloc` + * :c:func:`PyMem_RawFree` + * :c:func:`PySys_Audit` + * :c:func:`PySys_AuditTuple` + * :c:func:`PyType_GetModuleByDef` -* Add fixed variants of functions which silently ignore errors: + (Contributed by Victor Stinner in :gh:`85283`, :gh:`85283`, and :gh:`116936`.) - - :c:func:`PyObject_HasAttrWithError` replaces :c:func:`PyObject_HasAttr`. - - :c:func:`PyObject_HasAttrStringWithError` replaces :c:func:`PyObject_HasAttrString`. - - :c:func:`PyMapping_HasKeyWithError` replaces :c:func:`PyMapping_HasKey`. - - :c:func:`PyMapping_HasKeyStringWithError` replaces :c:func:`PyMapping_HasKeyString`. +* Python built with :option:`--with-trace-refs` (tracing references) + now supports the :ref:`Limited API `. + (Contributed by Victor Stinner in :gh:`108634`.) - New functions return not only ``1`` for true and ``0`` for false, but also - ``-1`` for error. - (Contributed by Serhiy Storchaka in :gh:`108511`.) +Removed C APIs +-------------- -* If Python is built in :ref:`debug mode ` or :option:`with - assertions <--with-assertions>`, :c:func:`PyTuple_SET_ITEM` and - :c:func:`PyList_SET_ITEM` now check the index argument with an assertion. - (Contributed by Victor Stinner in :gh:`106168`.) +* Remove several functions, macros, variables, etc + with names prefixed by ``_Py`` or ``_PY`` (which are considered private). + If your project is affected by one of these removals + and you believe that the removed API should remain available, + please :ref:`open a new issue ` to request a public C API + and add ``cc: @vstinner`` to the issue to notify Victor Stinner. + (Contributed by Victor Stinner in :gh:`106320`.) -* Add :c:func:`PyModule_Add` function: similar to - :c:func:`PyModule_AddObjectRef` and :c:func:`PyModule_AddObject` but - always steals a reference to the value. - (Contributed by Serhiy Storchaka in :gh:`86493`.) +* Remove old buffer protocols deprecated in Python 3.0. + Use :ref:`bufferobjects` instead. -* Add :c:func:`PyDict_GetItemRef` and :c:func:`PyDict_GetItemStringRef` - functions: similar to :c:func:`PyDict_GetItemWithError` but returning a - :term:`strong reference` instead of a :term:`borrowed reference`. Moreover, - these functions return -1 on error and so checking ``PyErr_Occurred()`` is - not needed. - (Contributed by Victor Stinner in :gh:`106004`.) + * :c:func:`!PyObject_CheckReadBuffer`: + Use :c:func:`PyObject_CheckBuffer` to test + whether the object supports the buffer protocol. + Note that :c:func:`PyObject_CheckBuffer` doesn't guarantee + that :c:func:`PyObject_GetBuffer` will succeed. + To test if the object is actually readable, + see the next example of :c:func:`PyObject_GetBuffer`. -* Added :c:func:`PyDict_SetDefaultRef`, which is similar to - :c:func:`PyDict_SetDefault` but returns a :term:`strong reference` instead of - a :term:`borrowed reference`. This function returns ``-1`` on error, ``0`` on - insertion, and ``1`` if the key was already present in the dictionary. - (Contributed by Sam Gross in :gh:`112066`.) + * :c:func:`!PyObject_AsCharBuffer`, :c:func:`!PyObject_AsReadBuffer`: + Use :c:func:`PyObject_GetBuffer` and :c:func:`PyBuffer_Release` instead: -* Add :c:func:`PyDict_ContainsString` function: same as - :c:func:`PyDict_Contains`, but *key* is specified as a :c:expr:`const char*` - UTF-8 encoded bytes string, rather than a :c:expr:`PyObject*`. - (Contributed by Victor Stinner in :gh:`108314`.) + .. code-block:: c -* Added :c:func:`PyList_GetItemRef` function: similar to - :c:func:`PyList_GetItem` but returns a :term:`strong reference` instead of - a :term:`borrowed reference`. + Py_buffer view; + if (PyObject_GetBuffer(obj, &view, PyBUF_SIMPLE) < 0) { + return NULL; + } + // Use `view.buf` and `view.len` to read from the buffer. + // You may need to cast buf as `(const char*)view.buf`. + PyBuffer_Release(&view); -* Add :c:func:`Py_IsFinalizing` function: check if the main Python interpreter is - :term:`shutting down `. - (Contributed by Victor Stinner in :gh:`108014`.) + * :c:func:`!PyObject_AsWriteBuffer`: + Use :c:func:`PyObject_GetBuffer` and :c:func:`PyBuffer_Release` instead: -* Add :c:func:`PyLong_AsInt` function: similar to :c:func:`PyLong_AsLong`, but - store the result in a C :c:expr:`int` instead of a C :c:expr:`long`. - Previously, it was known as the private function :c:func:`!_PyLong_AsInt` - (with an underscore prefix). - (Contributed by Victor Stinner in :gh:`108014`.) + .. code-block:: c -* Python built with :file:`configure` :option:`--with-trace-refs` (tracing - references) now supports the :ref:`Limited API `. - (Contributed by Victor Stinner in :gh:`108634`.) + Py_buffer view; + if (PyObject_GetBuffer(obj, &view, PyBUF_WRITABLE) < 0) { + return NULL; + } + // Use `view.buf` and `view.len` to write to the buffer. + PyBuffer_Release(&view); -* Add :c:func:`PyObject_VisitManagedDict` and - :c:func:`PyObject_ClearManagedDict` functions which must be called by the - traverse and clear functions of a type using - :c:macro:`Py_TPFLAGS_MANAGED_DICT` flag. The `pythoncapi-compat project - `__ can be used to get these - functions on Python 3.11 and 3.12. - (Contributed by Victor Stinner in :gh:`107073`.) + (Contributed by Inada Naoki in :gh:`85275`.) -* Add :c:func:`PyUnicode_EqualToUTF8AndSize` and :c:func:`PyUnicode_EqualToUTF8` - functions: compare Unicode object with a :c:expr:`const char*` UTF-8 encoded - string and return true (``1``) if they are equal, or false (``0``) otherwise. - These functions do not raise exceptions. - (Contributed by Serhiy Storchaka in :gh:`110289`.) +* Remove various functions deprecated in Python 3.9: -* Add :c:func:`PyThreadState_GetUnchecked()` function: similar to - :c:func:`PyThreadState_Get()`, but don't kill the process with a fatal error - if it is NULL. The caller is responsible to check if the result is NULL. - Previously, the function was private and known as - ``_PyThreadState_UncheckedGet()``. - (Contributed by Victor Stinner in :gh:`108867`.) + * :c:func:`!PyEval_CallObject`, :c:func:`!PyEval_CallObjectWithKeywords`: + Use :c:func:`PyObject_CallNoArgs` or :c:func:`PyObject_Call` instead. -* Add :c:func:`PySys_AuditTuple` function: similar to :c:func:`PySys_Audit`, - but pass event arguments as a Python :class:`tuple` object. - (Contributed by Victor Stinner in :gh:`85283`.) + .. warning:: -* :c:func:`PyArg_ParseTupleAndKeywords` now supports non-ASCII keyword - parameter names. - (Contributed by Serhiy Storchaka in :gh:`110815`.) + In :c:func:`PyObject_Call`, positional arguments must be a :class:`tuple` + and must not be ``NULL``, + and keyword arguments must be a :class:`dict` or ``NULL``, + whereas the removed functions checked argument types + and accepted ``NULL`` positional and keyword arguments. + To replace ``PyEval_CallObjectWithKeywords(func, NULL, kwargs)`` with + :c:func:`PyObject_Call`, + pass an empty tuple as positional arguments using + :c:func:`PyTuple_New(0) `. -* Add :c:func:`PyMem_RawMalloc`, :c:func:`PyMem_RawCalloc`, - :c:func:`PyMem_RawRealloc` and :c:func:`PyMem_RawFree` to the limited C API - (version 3.13). - (Contributed by Victor Stinner in :gh:`85283`.) + * :c:func:`!PyEval_CallFunction`: + Use :c:func:`PyObject_CallFunction` instead. + * :c:func:`!PyEval_CallMethod`: + Use :c:func:`PyObject_CallMethod` instead. + * :c:func:`!PyCFunction_Call`: + Use :c:func:`PyObject_Call` instead. -* Add :c:func:`PySys_Audit` and :c:func:`PySys_AuditTuple` functions to the - limited C API. - (Contributed by Victor Stinner in :gh:`85283`.) + (Contributed by Victor Stinner in :gh:`105107`.) -* Add :c:func:`PyErr_FormatUnraisable` function: similar to - :c:func:`PyErr_WriteUnraisable`, but allow customizing the warning message. - (Contributed by Serhiy Storchaka in :gh:`108082`.) +* Remove the following old functions to configure the Python initialization, + deprecated in Python 3.11: -* Add :c:func:`PyList_Extend` and :c:func:`PyList_Clear` functions: similar to - Python ``list.extend()`` and ``list.clear()`` methods. - (Contributed by Victor Stinner in :gh:`111138`.) + * :c:func:`!PySys_AddWarnOptionUnicode`: + Use :c:member:`PyConfig.warnoptions` instead. + * :c:func:`!PySys_AddWarnOption`: + Use :c:member:`PyConfig.warnoptions` instead. + * :c:func:`!PySys_AddXOption`: + Use :c:member:`PyConfig.xoptions` instead. + * :c:func:`!PySys_HasWarnOptions`: + Use :c:member:`PyConfig.xoptions` instead. + * :c:func:`!PySys_SetPath`: + Set :c:member:`PyConfig.module_search_paths` instead. + * :c:func:`!Py_SetPath`: + Set :c:member:`PyConfig.module_search_paths` instead. + * :c:func:`!Py_SetStandardStreamEncoding`: + Set :c:member:`PyConfig.stdio_encoding` instead, + and set also maybe :c:member:`PyConfig.legacy_windows_stdio` (on Windows). + * :c:func:`!_Py_SetProgramFullPath`: + Set :c:member:`PyConfig.executable` instead. -* Add :c:func:`PyDict_Pop` and :c:func:`PyDict_PopString` functions: remove a - key from a dictionary and optionally return the removed value. This is - similar to :meth:`dict.pop`, but without the default value and not raising - :exc:`KeyError` if the key is missing. - (Contributed by Stefan Behnel and Victor Stinner in :gh:`111262`.) + Use the new :c:type:`PyConfig` API of the :ref:`Python Initialization + Configuration ` instead (:pep:`587`), added to Python 3.8. + (Contributed by Victor Stinner in :gh:`105145`.) -* Add :c:func:`Py_HashPointer` function to hash a pointer. - (Contributed by Victor Stinner in :gh:`111545`.) +* Remove :c:func:`!PyEval_AcquireLock` and :c:func:`!PyEval_ReleaseLock` functions, + deprecated in Python 3.2. + They didn't update the current thread state. + They can be replaced with: -* Add :c:func:`PyObject_GenericHash` function that implements the default - hashing function of a Python object. - (Contributed by Serhiy Storchaka in :gh:`113024`.) + * :c:func:`PyEval_SaveThread` and :c:func:`PyEval_RestoreThread`; + * low-level :c:func:`PyEval_AcquireThread` and :c:func:`PyEval_RestoreThread`; + * or :c:func:`PyGILState_Ensure` and :c:func:`PyGILState_Release`. -* Add PyTime C API: + (Contributed by Victor Stinner in :gh:`105182`.) - * :c:type:`PyTime_t` type. - * :c:var:`PyTime_MIN` and :c:var:`PyTime_MAX` constants. - * Add functions: +* Remove the :c:func:`!PyEval_ThreadsInitialized` function, + deprecated in Python 3.9. + Since Python 3.7, :c:func:`!Py_Initialize` always creates the GIL: + calling :c:func:`!PyEval_InitThreads` does nothing and + :c:func:`!PyEval_ThreadsInitialized` always returns non-zero. + (Contributed by Victor Stinner in :gh:`105182`.) - * :c:func:`PyTime_AsSecondsDouble`. - * :c:func:`PyTime_Monotonic`. - * :c:func:`PyTime_MonotonicRaw`. - * :c:func:`PyTime_PerfCounter`. - * :c:func:`PyTime_PerfCounterRaw`. - * :c:func:`PyTime_Time`. - * :c:func:`PyTime_TimeRaw`. +* Remove the :c:func:`!_PyInterpreterState_Get` alias to + :c:func:`PyInterpreterState_Get()` + which was kept for backward compatibility with Python 3.8. + The `pythoncapi-compat project`_ can be used to get + :c:func:`PyInterpreterState_Get()` on Python 3.8 and older. + (Contributed by Victor Stinner in :gh:`106320`.) - (Contributed by Victor Stinner and Petr Viktorin in :gh:`110850`.) +* Remove the private :c:func:`!_PyObject_FastCall` function: + use :c:func:`!PyObject_Vectorcall` which is available since Python 3.8 + (:pep:`590`). + (Contributed by Victor Stinner in :gh:`106023`.) -* Add :c:func:`PyLong_AsNativeBytes`, :c:func:`PyLong_FromNativeBytes` and - :c:func:`PyLong_FromUnsignedNativeBytes` functions to simplify converting - between native integer types and Python :class:`int` objects. - (Contributed by Steve Dower in :gh:`111140`.) +* Remove the ``cpython/pytime.h`` header file, + which only contained private functions. + (Contributed by Victor Stinner in :gh:`106316`.) -* Add :c:func:`PyType_GetFullyQualifiedName` function to get the type's fully - qualified name. Equivalent to ``f"{type.__module__}.{type.__qualname__}"``, - or ``type.__qualname__`` if ``type.__module__`` is not a string or is equal - to ``"builtins"``. - (Contributed by Victor Stinner in :gh:`111696`.) +* Remove the undocumented ``PY_TIMEOUT_MAX`` constant from the limited C API. + (Contributed by Victor Stinner in :gh:`110014`.) -* Add :c:func:`PyType_GetModuleName` function to get the type's module name. - Equivalent to getting the ``type.__module__`` attribute. - (Contributed by Eric Snow and Victor Stinner in :gh:`111696`.) +* Remove the old trashcan macros ``Py_TRASHCAN_SAFE_BEGIN`` + and ``Py_TRASHCAN_SAFE_END``. + Replace both with the new macros ``Py_TRASHCAN_BEGIN`` + and ``Py_TRASHCAN_END``. + (Contributed by Irit Katriel in :gh:`105111`.) -* Add support for ``%T``, ``%#T``, ``%N`` and ``%#N`` formats to - :c:func:`PyUnicode_FromFormat`: format the fully qualified name of an object - type and of a type: call :c:func:`PyType_GetModuleName`. See :pep:`737` for - more information. - (Contributed by Victor Stinner in :gh:`111696`.) +Deprecated C APIs +----------------- -* Add :c:func:`Py_GetConstant` and :c:func:`Py_GetConstantBorrowed` functions - to get constants. For example, ``Py_GetConstant(Py_CONSTANT_ZERO)`` returns a - :term:`strong reference` to the constant zero. - (Contributed by Victor Stinner in :gh:`115754`.) +* Deprecate old Python initialization functions: -* Add :c:func:`PyType_GetModuleByDef` to the limited C API - (Contributed by Victor Stinner in :gh:`116936`.) + * :c:func:`PySys_ResetWarnOptions`: + Clear :data:`sys.warnoptions` and :data:`!warnings.filters` instead. + * :c:func:`Py_GetExecPrefix`: + Get :data:`sys.exec_prefix` instead. + * :c:func:`Py_GetPath`: + Get :data:`sys.path` instead. + * :c:func:`Py_GetPrefix`: + Get :data:`sys.prefix` instead. + * :c:func:`Py_GetProgramFullPath`: + Get :data:`sys.executable` instead. + * :c:func:`Py_GetProgramName`: + Get :data:`sys.executable` instead. + * :c:func:`Py_GetPythonHome`: + Get :c:member:`PyConfig.home` + or the :envvar:`PYTHONHOME` environment variable instead. -* Add two new functions to the C-API, :c:func:`PyRefTracer_SetTracer` and - :c:func:`PyRefTracer_GetTracer`, that allows to track object creation and - destruction the same way the :mod:`tracemalloc` module does. (Contributed - by Pablo Galindo in :gh:`93502`.) + (Contributed by Victor Stinner in :gh:`105145`.) -* Add :c:func:`PyEval_GetFrameBuiltins`, :c:func:`PyEval_GetFrameGlobals`, and - :c:func:`PyEval_GetFrameLocals` to the C API. These replacements for - :c:func:`PyEval_GetBuiltins`, :c:func:`PyEval_GetGlobals`, and - :c:func:`PyEval_GetLocals` return :term:`strong references ` - rather than borrowed references. (Added as part of :pep:`667`.) +* :term:`Soft deprecate ` the + :c:func:`PyEval_GetBuiltins`, :c:func:`PyEval_GetGlobals`, + and :c:func:`PyEval_GetLocals` functions, + which return a :term:`borrowed reference`. + (Soft deprecated as part of :pep:`667`.) -* Add :c:type:`PyMutex` API, a lightweight mutex that occupies a single byte. - The :c:func:`PyMutex_Lock` function will release the GIL (if currently held) - if the operation needs to block. - (Contributed by Sam Gross in :gh:`108724`.) +* Deprecate the :c:func:`PyImport_ImportModuleNoBlock` function, + which is just an alias to :c:func:`PyImport_ImportModule` since Python 3.3. + (Contributed by Victor Stinner in :gh:`105396`.) -Build Changes -============= +* :term:`Soft deprecate ` the + :c:func:`PyModule_AddObject` function. + It should be replaced with :c:func:`PyModule_Add` + or :c:func:`PyModule_AddObjectRef`. + (Contributed by Serhiy Storchaka in :gh:`86493`.) -* The :file:`configure` option :option:`--with-system-libmpdec` now defaults - to ``yes``. The bundled copy of ``libmpdecimal`` will be removed in Python - 3.15. +* Deprecate the old ``Py_UNICODE`` and ``PY_UNICODE_TYPE`` types + and the :c:macro:`!Py_UNICODE_WIDE` define. + Use the :c:type:`wchar_t` type directly instead. + Since Python 3.3, ``Py_UNICODE`` and ``PY_UNICODE_TYPE`` + are just aliases to :c:type:`!wchar_t`. + (Contributed by Victor Stinner in :gh:`105156`.) -* Autoconf 2.71 and aclocal 1.16.4 are now required to regenerate - the :file:`configure` script. - (Contributed by Christian Heimes in :gh:`89886`.) +* Deprecate the :c:func:`PyWeakref_GetObject` and + :c:func:`PyWeakref_GET_OBJECT` functions, + which return a :term:`borrowed reference`. + Replace them with the new :c:func:`PyWeakref_GetRef` function, + which returns a :term:`strong reference`. + The `pythoncapi-compat project`_ can be used to get + :c:func:`PyWeakref_GetRef` on Python 3.12 and older. + (Contributed by Victor Stinner in :gh:`105927`.) -* SQLite 3.15.2 or newer is required to build the :mod:`sqlite3` extension module. - (Contributed by Erlend Aasland in :gh:`105875`.) +.. Add deprecations above alphabetically, not here at the end. -* Python built with :file:`configure` :option:`--with-trace-refs` (tracing - references) is now ABI compatible with the Python release build and - :ref:`debug build `. - (Contributed by Victor Stinner in :gh:`108634`.) +.. include:: ../deprecations/c-api-pending-removal-in-3.14.rst -* Building CPython now requires a compiler with support for the C11 atomic - library, GCC built-in atomic functions, or MSVC interlocked intrinsics. +.. include:: ../deprecations/c-api-pending-removal-in-3.15.rst -* The ``errno``, ``fcntl``, ``grp``, ``md5``, ``pwd``, ``resource``, - ``termios``, ``winsound``, - ``_ctypes_test``, ``_multiprocessing.posixshmem``, ``_scproxy``, ``_stat``, - ``_statistics``, ``_testconsole``, ``_testimportmultiple`` and ``_uuid`` - C extensions are now built with the :ref:`limited C API `. - (Contributed by Victor Stinner in :gh:`85283`.) +.. include:: ../deprecations/c-api-pending-removal-in-future.rst + +.. _pythoncapi-compat project: https://github.com/python/pythoncapi-compat/ + +Build Changes +============= + +* ``arm64-apple-ios`` and ``arm64-apple-ios-simulator`` are both + now :pep:`11` tier 3 platforms. + (:ref:`PEP 730 ` written + and implementation contributed by Russell Keith-Magee in :gh:`114099`.) + +* ``aarch64-linux-android`` and ``x86_64-linux-android`` are both + now :pep:`11` tier 3 platforms. + (:ref:`PEP 738 ` written + and implementation contributed by Malcolm Smith in :gh:`116622`.) * ``wasm32-wasi`` is now a :pep:`11` tier 2 platform. (Contributed by Brett Cannon in :gh:`115192`.) @@ -2203,15 +2488,45 @@ Build Changes * ``wasm32-emscripten`` is no longer a :pep:`11` supported platform. (Contributed by Brett Cannon in :gh:`115192`.) -* Python now bundles the `mimalloc library `__. - It is licensed under the MIT license; see :ref:`mimalloc license `. +* Building CPython now requires a compiler with support for the C11 atomic + library, GCC built-in atomic functions, or MSVC interlocked intrinsics. + +* Autoconf 2.71 and aclocal 1.16.4 are now required to regenerate + the :file:`configure` script. + (Contributed by Christian Heimes in :gh:`89886`.) + +* SQLite 3.15.2 or newer is required to build + the :mod:`sqlite3` extension module. + (Contributed by Erlend Aasland in :gh:`105875`.) + +* CPython now bundles the `mimalloc library`_ by default. + It is licensed under the MIT license; + see :ref:`mimalloc license `. The bundled mimalloc has custom changes, see :gh:`113141` for details. (Contributed by Dino Viehland in :gh:`109914`.) + .. _mimalloc library: https://github.com/microsoft/mimalloc/ + +* The :file:`configure` option :option:`--with-system-libmpdec` + now defaults to ``yes``. + The bundled copy of ``libmpdecimal`` will be removed in Python 3.15. + +* Python built with :file:`configure` :option:`--with-trace-refs` + (tracing references) is now ABI compatible with the Python release build + and :ref:`debug build `. + (Contributed by Victor Stinner in :gh:`108634`.) + * On POSIX systems, the pkg-config (``.pc``) filenames now include the ABI flags. For example, the free-threaded build generates ``python-3.13t.pc`` and the debug build generates ``python-3.13d.pc``. +* The ``errno``, ``fcntl``, ``grp``, ``md5``, ``pwd``, ``resource``, + ``termios``, ``winsound``, + ``_ctypes_test``, ``_multiprocessing.posixshmem``, ``_scproxy``, ``_stat``, + ``_statistics``, ``_testconsole``, ``_testimportmultiple`` and ``_uuid`` + C extensions are now built with the :ref:`limited C API `. + (Contributed by Victor Stinner in :gh:`85283`.) + Porting to Python 3.13 ====================== @@ -2222,84 +2537,89 @@ that may require changes to your code. Changes in the Python API ------------------------- -* An :exc:`OSError` is now raised by :func:`getpass.getuser` for any failure to - retrieve a username, instead of :exc:`ImportError` on non-Unix platforms or - :exc:`KeyError` on Unix platforms where the password database is empty. +.. _pep667-porting-notes-py: + +* :ref:`PEP 667 ` introduces several changes + to the semantics of :func:`locals` and :attr:`f_locals `: + + * Calling :func:`locals` in an :term:`optimized scope` now produces an + independent snapshot on each call, and hence no longer implicitly updates + previously returned references. Obtaining the legacy CPython behavior now + requires explicit calls to update the initially returned dictionary with the + results of subsequent calls to :func:`!locals`. Code execution functions that + implicitly target :func:`!locals` (such as ``exec`` and ``eval``) must be + passed an explicit namespace to access their results in an optimized scope. + (Changed as part of :pep:`667`.) + + * Calling :func:`locals` from a comprehension at module or class scope + (including via ``exec`` or ``eval``) once more behaves as if the comprehension + were running as an independent nested function (i.e. the local variables from + the containing scope are not included). In Python 3.12, this had changed + to include the local variables from the containing scope when implementing + :pep:`709`. (Changed as part of :pep:`667`.) + + * Accessing :attr:`FrameType.f_locals ` in an + :term:`optimized scope` now returns a write-through proxy rather than a + snapshot that gets updated at ill-specified times. If a snapshot is desired, + it must be created explicitly with ``dict`` or the proxy's ``.copy()`` method. + (Changed as part of :pep:`667`.) + +* :class:`functools.partial` now emits a :exc:`FutureWarning` + when used as a method. + The behavior will change in future Python versions. + Wrap it in :func:`staticmethod` if you want to preserve the old behavior. + (Contributed by Serhiy Storchaka in :gh:`121027`.) -* The :mod:`threading` module now expects the :mod:`!_thread` module to have - an ``_is_main_interpreter`` attribute. It is a function with no - arguments that returns ``True`` if the current interpreter is the - main interpreter. +* An :exc:`OSError` is now raised by :func:`getpass.getuser` + for any failure to retrieve a username, + instead of :exc:`ImportError` on non-Unix platforms + or :exc:`KeyError` on Unix platforms where the password database is empty. - Any library or application that provides a custom ``_thread`` module - must provide ``_is_main_interpreter()``, just like the module's - other "private" attributes. - (See :gh:`112826`.) +* The value of the :attr:`!mode` attribute of :class:`gzip.GzipFile` + is now a string (``'rb'`` or ``'wb'``) instead of an integer (``1`` or ``2``). + The value of the :attr:`!mode` attribute of the readable file-like object + returned by :meth:`zipfile.ZipFile.open` is now ``'rb'`` instead of ``'r'``. + (Contributed by Serhiy Storchaka in :gh:`115961`.) -* :class:`mailbox.Maildir` now ignores files with a leading dot. +* :class:`mailbox.Maildir` now ignores files with a leading dot (``.``). (Contributed by Zackery Spytz in :gh:`65559`.) * :meth:`pathlib.Path.glob` and :meth:`~pathlib.Path.rglob` now return both - files and directories if a pattern that ends with "``**``" is given, rather - than directories only. Users may add a trailing slash to match only - directories. + files and directories if a pattern that ends with "``**``" is given, + rather than directories only. + Add a trailing slash to keep the previous behavior and only match directories. -* The value of the :attr:`!mode` attribute of :class:`gzip.GzipFile` was - changed from integer (``1`` or ``2``) to string (``'rb'`` or ``'wb'``). - The value of the :attr:`!mode` attribute of the readable file-like object - returned by :meth:`zipfile.ZipFile.open` was changed from ``'r'`` to ``'rb'``. - (Contributed by Serhiy Storchaka in :gh:`115961`.) - -* :class:`functools.partial` now emits a :exc:`FutureWarning` when it is - used as a method. - Its behavior will be changed in future Python versions. - Wrap it in :func:`staticmethod` if you want to preserve the old behavior. - (Contributed by Serhiy Storchaka in :gh:`121027`.) +* The :mod:`threading` module now expects the :mod:`!_thread` module + to have an :func:`!_is_main_interpreter` function. + This function takes no arguments and returns ``True`` + if the current interpreter is the main interpreter. -.. _pep667-porting-notes-py: + Any library or application that provides a custom :mod:`!_thread` module + must provide :func:`!_is_main_interpreter`, + just like the module's other "private" attributes. + (:gh:`112826`.) -* Calling :func:`locals` in an :term:`optimized scope` now produces an - independent snapshot on each call, and hence no longer implicitly updates - previously returned references. Obtaining the legacy CPython behaviour now - requires explicit calls to update the initially returned dictionary with the - results of subsequent calls to ``locals()``. Code execution functions that - implicitly target ``locals()`` (such as ``exec`` and ``eval``) must be - passed an explicit namespace to access their results in an optimized scope. - (Changed as part of :pep:`667`.) - -* Calling :func:`locals` from a comprehension at module or class scope - (including via ``exec`` or ``eval``) once more behaves as if the comprehension - were running as an independent nested function (i.e. the local variables from - the containing scope are not included). In Python 3.12, this had changed - to include the local variables from the containing scope when implementing - :pep:`709`. (Changed as part of :pep:`667`.) - -* Accessing :attr:`FrameType.f_locals ` in an - :term:`optimized scope` now returns a write-through proxy rather than a - snapshot that gets updated at ill-specified times. If a snapshot is desired, - it must be created explicitly with ``dict`` or the proxy's ``.copy()`` method. - (Changed as part of :pep:`667`.) Changes in the C API -------------------- * ``Python.h`` no longer includes the ```` standard header. It was - included for the ``finite()`` function which is now provided by the + included for the :c:func:`!finite` function which is now provided by the ```` header. It should now be included explicitly if needed. Remove also the ``HAVE_IEEEFP_H`` macro. (Contributed by Victor Stinner in :gh:`108765`.) * ``Python.h`` no longer includes these standard header files: ````, ```` and ````. If needed, they should now be - included explicitly. For example, ```` provides the ``clock()`` and - ``gmtime()`` functions, ```` provides the ``select()`` - function, and ```` provides the ``futimes()``, ``gettimeofday()`` - and ``setitimer()`` functions. + included explicitly. For example, ```` provides the :c:func:`!clock` and + :c:func:`!gmtime` functions, ```` provides the :c:func:`!select` + function, and ```` provides the :c:func:`!futimes`, :c:func:`!gettimeofday` + and :c:func:`!setitimer` functions. (Contributed by Victor Stinner in :gh:`108765`.) * On Windows, ``Python.h`` no longer includes the ```` standard header file. If needed, it should now be included explicitly. For example, it - provides ``offsetof()`` function, and ``size_t`` and ``ptrdiff_t`` types. + provides :c:func:`!offsetof` function, and ``size_t`` and ``ptrdiff_t`` types. Including ```` explicitly was already needed by all other platforms, the ``HAVE_STDDEF_H`` macro is only defined on Windows. (Contributed by Victor Stinner in :gh:`108765`.) @@ -2340,303 +2660,39 @@ Changes in the C API added in Python 3.8 and the old macros were deprecated in Python 3.11. (Contributed by Irit Katriel in :gh:`105111`.) -* Functions :c:func:`PyDict_GetItem`, :c:func:`PyDict_GetItemString`, - :c:func:`PyMapping_HasKey`, :c:func:`PyMapping_HasKeyString`, - :c:func:`PyObject_HasAttr`, :c:func:`PyObject_HasAttrString`, and - :c:func:`PySys_GetObject`, which clear all errors which occurred when calling - them, now report them using :func:`sys.unraisablehook`. - You may replace them with other functions as - recommended in the documentation. - (Contributed by Serhiy Storchaka in :gh:`106672`.) - -* :c:func:`!PyCode_GetFirstFree` is an unstable API now and has been renamed - to :c:func:`PyUnstable_Code_GetFirstFree`. - (Contributed by Bogdan Romanyuk in :gh:`115781`.) - .. _pep667-porting-notes-c: -* The effects of mutating the dictionary returned from :c:func:`PyEval_GetLocals` in an - :term:`optimized scope` have changed. New dict entries added this way will now *only* be - visible to subsequent :c:func:`PyEval_GetLocals` calls in that frame, as - :c:func:`PyFrame_GetLocals`, :func:`locals`, and - :attr:`FrameType.f_locals ` no longer access the same underlying cached - dictionary. Changes made to entries for actual variable names and names added via the - write-through proxy interfaces will be overwritten on subsequent calls to - :c:func:`PyEval_GetLocals` in that frame. The recommended code update depends on how the - function was being used, so refer to the deprecation notice on the function for details. - (Changed as part of :pep:`667`.) - -* Calling :c:func:`PyFrame_GetLocals` in an :term:`optimized scope` now returns a - write-through proxy rather than a snapshot that gets updated at ill-specified times. - If a snapshot is desired, it must be created explicitly (e.g. with :c:func:`PyDict_Copy`) - or by calling the new :c:func:`PyEval_GetFrameLocals` API. (Changed as part of :pep:`667`.) - -* :c:func:`!PyFrame_FastToLocals` and :c:func:`!PyFrame_FastToLocalsWithError` - no longer have any effect. Calling these functions has been redundant since - Python 3.11, when :c:func:`PyFrame_GetLocals` was first introduced. - (Changed as part of :pep:`667`.) - -* :c:func:`!PyFrame_LocalsToFast` no longer has any effect. Calling this function - is redundant now that :c:func:`PyFrame_GetLocals` returns a write-through proxy - for :term:`optimized scopes `. (Changed as part of :pep:`667`.) - -Removed C APIs --------------- - -* Remove many APIs (functions, macros, variables) with names prefixed by - ``_Py`` or ``_PY`` (considered as private API). If your project is affected - by one of these removals and you consider that the removed API should remain - available, please open a new issue to request a public C API and - add ``cc @vstinner`` to the issue to notify Victor Stinner. - (Contributed by Victor Stinner in :gh:`106320`.) - -* Remove functions deprecated in Python 3.9: - - * ``PyEval_CallObject()``, ``PyEval_CallObjectWithKeywords()``: use - :c:func:`PyObject_CallNoArgs` or :c:func:`PyObject_Call` instead. - Warning: :c:func:`PyObject_Call` positional arguments must be a - :class:`tuple` and must not be ``NULL``, keyword arguments must be a - :class:`dict` or ``NULL``, whereas removed functions checked arguments type - and accepted ``NULL`` positional and keyword arguments. - To replace ``PyEval_CallObjectWithKeywords(func, NULL, kwargs)`` with - :c:func:`PyObject_Call`, pass an empty tuple as positional arguments using - :c:func:`PyTuple_New(0) `. - * ``PyEval_CallFunction()``: use :c:func:`PyObject_CallFunction` instead. - * ``PyEval_CallMethod()``: use :c:func:`PyObject_CallMethod` instead. - * ``PyCFunction_Call()``: use :c:func:`PyObject_Call` instead. - - (Contributed by Victor Stinner in :gh:`105107`.) - -* Remove old buffer protocols deprecated in Python 3.0. Use :ref:`bufferobjects` instead. - - * :c:func:`!PyObject_CheckReadBuffer`: Use :c:func:`PyObject_CheckBuffer` to - test if the object supports the buffer protocol. - Note that :c:func:`PyObject_CheckBuffer` doesn't guarantee that - :c:func:`PyObject_GetBuffer` will succeed. - To test if the object is actually readable, see the next example - of :c:func:`PyObject_GetBuffer`. - - * :c:func:`!PyObject_AsCharBuffer`, :c:func:`!PyObject_AsReadBuffer`: - :c:func:`PyObject_GetBuffer` and :c:func:`PyBuffer_Release` instead: - - .. code-block:: c - - Py_buffer view; - if (PyObject_GetBuffer(obj, &view, PyBUF_SIMPLE) < 0) { - return NULL; - } - // Use `view.buf` and `view.len` to read from the buffer. - // You may need to cast buf as `(const char*)view.buf`. - PyBuffer_Release(&view); - - * :c:func:`!PyObject_AsWriteBuffer`: Use - :c:func:`PyObject_GetBuffer` and :c:func:`PyBuffer_Release` instead: - - .. code-block:: c - - Py_buffer view; - if (PyObject_GetBuffer(obj, &view, PyBUF_WRITABLE) < 0) { - return NULL; - } - // Use `view.buf` and `view.len` to write to the buffer. - PyBuffer_Release(&view); - - (Contributed by Inada Naoki in :gh:`85275`.) - -* Remove the following old functions to configure the Python initialization, - deprecated in Python 3.11: - - * ``PySys_AddWarnOptionUnicode()``: use :c:member:`PyConfig.warnoptions` instead. - * ``PySys_AddWarnOption()``: use :c:member:`PyConfig.warnoptions` instead. - * ``PySys_AddXOption()``: use :c:member:`PyConfig.xoptions` instead. - * ``PySys_HasWarnOptions()``: use :c:member:`PyConfig.xoptions` instead. - * ``PySys_SetPath()``: set :c:member:`PyConfig.module_search_paths` instead. - * ``Py_SetPath()``: set :c:member:`PyConfig.module_search_paths` instead. - * ``Py_SetStandardStreamEncoding()``: set :c:member:`PyConfig.stdio_encoding` - instead, and set also maybe :c:member:`PyConfig.legacy_windows_stdio` (on - Windows). - * ``_Py_SetProgramFullPath()``: set :c:member:`PyConfig.executable` instead. - - Use the new :c:type:`PyConfig` API of the :ref:`Python Initialization - Configuration ` instead (:pep:`587`), added to Python 3.8. - (Contributed by Victor Stinner in :gh:`105145`.) - -* Remove ``PyEval_ThreadsInitialized()`` - function, deprecated in Python 3.9. Since Python 3.7, ``Py_Initialize()`` - always creates the GIL: calling ``PyEval_InitThreads()`` does nothing and - ``PyEval_ThreadsInitialized()`` always returned non-zero. - (Contributed by Victor Stinner in :gh:`105182`.) - -* Remove ``PyEval_AcquireLock()`` and ``PyEval_ReleaseLock()`` functions, - deprecated in Python 3.2. They didn't update the current thread state. - They can be replaced with: - - * :c:func:`PyEval_SaveThread` and :c:func:`PyEval_RestoreThread`; - * low-level :c:func:`PyEval_AcquireThread` and :c:func:`PyEval_RestoreThread`; - * or :c:func:`PyGILState_Ensure` and :c:func:`PyGILState_Release`. - - (Contributed by Victor Stinner in :gh:`105182`.) - -* Remove private ``_PyObject_FastCall()`` function: - use ``PyObject_Vectorcall()`` which is available since Python 3.8 - (:pep:`590`). - (Contributed by Victor Stinner in :gh:`106023`.) - -* Remove ``cpython/pytime.h`` header file: it only contained private functions. - (Contributed by Victor Stinner in :gh:`106316`.) - -* Remove ``_PyInterpreterState_Get()`` alias to - :c:func:`PyInterpreterState_Get()` which was kept for backward compatibility - with Python 3.8. The `pythoncapi-compat project - `__ can be used to get - :c:func:`PyInterpreterState_Get()` on Python 3.8 and older. - (Contributed by Victor Stinner in :gh:`106320`.) - -* The :c:func:`PyModule_AddObject` function is now :term:`soft deprecated`: - :c:func:`PyModule_Add` or :c:func:`PyModule_AddObjectRef` functions should - be used instead. - (Contributed by Serhiy Storchaka in :gh:`86493`.) - -Deprecated C APIs ------------------ - -* Deprecate the old ``Py_UNICODE`` and ``PY_UNICODE_TYPE`` types: use directly - the :c:type:`wchar_t` type instead. Since Python 3.3, ``Py_UNICODE`` and - ``PY_UNICODE_TYPE`` are just aliases to :c:type:`wchar_t`. - (Contributed by Victor Stinner in :gh:`105156`.) - -* Deprecate old Python initialization functions: - - * :c:func:`PySys_ResetWarnOptions`: - clear :data:`sys.warnoptions` and :data:`!warnings.filters` instead. - * :c:func:`Py_GetExecPrefix`: get :data:`sys.exec_prefix` instead. - * :c:func:`Py_GetPath`: get :data:`sys.path` instead. - * :c:func:`Py_GetPrefix`: get :data:`sys.prefix` instead. - * :c:func:`Py_GetProgramFullPath`: get :data:`sys.executable` instead. - * :c:func:`Py_GetProgramName`: get :data:`sys.executable` instead. - * :c:func:`Py_GetPythonHome`: get :c:member:`PyConfig.home` or - :envvar:`PYTHONHOME` environment variable instead. - - Functions scheduled for removal in Python 3.15. - (Contributed by Victor Stinner in :gh:`105145`.) - -* Deprecate the :c:func:`PyImport_ImportModuleNoBlock` function which is just - an alias to :c:func:`PyImport_ImportModule` since Python 3.3. - Scheduled for removal in Python 3.15. - (Contributed by Victor Stinner in :gh:`105396`.) - -* Deprecate the :c:func:`PyWeakref_GetObject` and - :c:func:`PyWeakref_GET_OBJECT` functions, which return a :term:`borrowed - reference`: use the new :c:func:`PyWeakref_GetRef` function instead, it - returns a :term:`strong reference`. The `pythoncapi-compat project - `__ can be used to get - :c:func:`PyWeakref_GetRef` on Python 3.12 and older. - (Contributed by Victor Stinner in :gh:`105927`.) - -* Deprecate the :c:func:`PyEval_GetBuiltins`, :c:func:`PyEval_GetGlobals`, and - :c:func:`PyEval_GetLocals` functions, which return a :term:`borrowed reference`. - Refer to the deprecation notices on each function for their recommended replacements. - (Soft deprecated as part of :pep:`667`.) - -Pending Removal in Python 3.14 ------------------------------- - -* Creating immutable types (:c:macro:`Py_TPFLAGS_IMMUTABLETYPE`) with mutable - bases using the C API. - -* Functions to configure the Python initialization, deprecated in Python 3.11: - - * ``PySys_SetArgvEx()``: set :c:member:`PyConfig.argv` instead. - * ``PySys_SetArgv()``: set :c:member:`PyConfig.argv` instead. - * ``Py_SetProgramName()``: set :c:member:`PyConfig.program_name` instead. - * ``Py_SetPythonHome()``: set :c:member:`PyConfig.home` instead. - - The :c:func:`Py_InitializeFromConfig` API should be used with - :c:type:`PyConfig` instead. - -* Global configuration variables: - - * :c:var:`Py_DebugFlag`: use :c:member:`PyConfig.parser_debug` - * :c:var:`Py_VerboseFlag`: use :c:member:`PyConfig.verbose` - * :c:var:`Py_QuietFlag`: use :c:member:`PyConfig.quiet` - * :c:var:`Py_InteractiveFlag`: use :c:member:`PyConfig.interactive` - * :c:var:`Py_InspectFlag`: use :c:member:`PyConfig.inspect` - * :c:var:`Py_OptimizeFlag`: use :c:member:`PyConfig.optimization_level` - * :c:var:`Py_NoSiteFlag`: use :c:member:`PyConfig.site_import` - * :c:var:`Py_BytesWarningFlag`: use :c:member:`PyConfig.bytes_warning` - * :c:var:`Py_FrozenFlag`: use :c:member:`PyConfig.pathconfig_warnings` - * :c:var:`Py_IgnoreEnvironmentFlag`: use :c:member:`PyConfig.use_environment` - * :c:var:`Py_DontWriteBytecodeFlag`: use :c:member:`PyConfig.write_bytecode` - * :c:var:`Py_NoUserSiteDirectory`: use :c:member:`PyConfig.user_site_directory` - * :c:var:`Py_UnbufferedStdioFlag`: use :c:member:`PyConfig.buffered_stdio` - * :c:var:`Py_HashRandomizationFlag`: use :c:member:`PyConfig.use_hash_seed` - and :c:member:`PyConfig.hash_seed` - * :c:var:`Py_IsolatedFlag`: use :c:member:`PyConfig.isolated` - * :c:var:`Py_LegacyWindowsFSEncodingFlag`: use :c:member:`PyPreConfig.legacy_windows_fs_encoding` - * :c:var:`Py_LegacyWindowsStdioFlag`: use :c:member:`PyConfig.legacy_windows_stdio` - * :c:var:`!Py_FileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` - * :c:var:`!Py_HasFileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` - * :c:var:`!Py_FileSystemDefaultEncodeErrors`: use :c:member:`PyConfig.filesystem_errors` - * :c:var:`!Py_UTF8Mode`: use :c:member:`PyPreConfig.utf8_mode` (see :c:func:`Py_PreInitialize`) - - The :c:func:`Py_InitializeFromConfig` API should be used with - :c:type:`PyConfig` instead. - -Pending Removal in Python 3.15 ------------------------------- - -* The bundled copy of ``libmpdecimal``. -* :c:func:`PyImport_ImportModuleNoBlock`: use :c:func:`PyImport_ImportModule`. -* :c:func:`PyWeakref_GET_OBJECT`: use :c:func:`PyWeakref_GetRef` instead. -* :c:func:`PyWeakref_GetObject`: use :c:func:`PyWeakref_GetRef` instead. -* :c:type:`!Py_UNICODE_WIDE` type: use :c:type:`wchar_t` instead. -* :c:type:`Py_UNICODE` type: use :c:type:`wchar_t` instead. -* Python initialization functions: - - * :c:func:`PySys_ResetWarnOptions`: clear :data:`sys.warnoptions` and - :data:`!warnings.filters` instead. - * :c:func:`Py_GetExecPrefix`: get :data:`sys.exec_prefix` instead. - * :c:func:`Py_GetPath`: get :data:`sys.path` instead. - * :c:func:`Py_GetPrefix`: get :data:`sys.prefix` instead. - * :c:func:`Py_GetProgramFullPath`: get :data:`sys.executable` instead. - * :c:func:`Py_GetProgramName`: get :data:`sys.executable` instead. - * :c:func:`Py_GetPythonHome`: get :c:member:`PyConfig.home` or - :envvar:`PYTHONHOME` environment variable instead. - -Pending Removal in Future Versions ----------------------------------- - -The following APIs were deprecated in earlier Python versions and will be -removed, although there is currently no date scheduled for their removal. - -* :c:macro:`Py_TPFLAGS_HAVE_FINALIZE`: no needed since Python 3.8. -* :c:func:`PyErr_Fetch`: use :c:func:`PyErr_GetRaisedException`. -* :c:func:`PyErr_NormalizeException`: use :c:func:`PyErr_GetRaisedException`. -* :c:func:`PyErr_Restore`: use :c:func:`PyErr_SetRaisedException`. -* :c:func:`PyModule_GetFilename`: use :c:func:`PyModule_GetFilenameObject`. -* :c:func:`PyOS_AfterFork`: use :c:func:`PyOS_AfterFork_Child()`. -* :c:func:`PySlice_GetIndicesEx`. -* :c:func:`!PyUnicode_AsDecodedObject`. -* :c:func:`!PyUnicode_AsDecodedUnicode`. -* :c:func:`!PyUnicode_AsEncodedObject`. -* :c:func:`!PyUnicode_AsEncodedUnicode`. -* :c:func:`PyUnicode_READY`: not needed since Python 3.12. -* :c:func:`!_PyErr_ChainExceptions`. -* :c:member:`!PyBytesObject.ob_shash` member: - call :c:func:`PyObject_Hash` instead. -* :c:member:`!PyDictObject.ma_version_tag` member. -* TLS API: - - * :c:func:`PyThread_create_key`: use :c:func:`PyThread_tss_alloc`. - * :c:func:`PyThread_delete_key`: use :c:func:`PyThread_tss_free`. - * :c:func:`PyThread_set_key_value`: use :c:func:`PyThread_tss_set`. - * :c:func:`PyThread_get_key_value`: use :c:func:`PyThread_tss_get`. - * :c:func:`PyThread_delete_key_value`: use :c:func:`PyThread_tss_delete`. - * :c:func:`PyThread_ReInitTLS`: no longer needed. - -* Remove undocumented ``PY_TIMEOUT_MAX`` constant from the limited C API. - (Contributed by Victor Stinner in :gh:`110014`.) - +* :ref:`PEP 667 ` introduces several changes + to frame-related functions: + + * The effects of mutating the dictionary returned from + :c:func:`PyEval_GetLocals` in an :term:`optimized scope` have changed. + New dict entries added this way will now *only* be visible to + subsequent :c:func:`PyEval_GetLocals` calls in that frame, + as :c:func:`PyFrame_GetLocals`, :func:`locals`, + and :attr:`FrameType.f_locals ` no longer access + the same underlying cached dictionary. + Changes made to entries for actual variable names and names added via + the write-through proxy interfaces will be overwritten on subsequent calls + to :c:func:`PyEval_GetLocals` in that frame. + The recommended code update depends on how the function was being used, + so refer to the deprecation notice on the function for details. + + * Calling :c:func:`PyFrame_GetLocals` in an :term:`optimized scope` + now returns a write-through proxy rather than a snapshot + that gets updated at ill-specified times. + If a snapshot is desired, it must be created explicitly + (e.g. with :c:func:`PyDict_Copy`), + or by calling the new :c:func:`PyEval_GetFrameLocals` API. + + * :c:func:`!PyFrame_FastToLocals` and :c:func:`!PyFrame_FastToLocalsWithError` + no longer have any effect. + Calling these functions has been redundant since Python 3.11, + when :c:func:`PyFrame_GetLocals` was first introduced. + + * :c:func:`!PyFrame_LocalsToFast` no longer has any effect. + Calling this function is redundant now that :c:func:`PyFrame_GetLocals` + returns a write-through proxy for :term:`optimized scopes `. Regression Test Changes ======================= diff --git a/Doc/whatsnew/3.2.rst b/Doc/whatsnew/3.2.rst index a6b38207b70..c09fa839886 100644 --- a/Doc/whatsnew/3.2.rst +++ b/Doc/whatsnew/3.2.rst @@ -531,7 +531,7 @@ Some smaller changes made to the core Python language are: (Proposed and implemented by Mark Dickinson; :issue:`9337`.) -* :class:`memoryview` objects now have a :meth:`~memoryview.release()` method +* :class:`memoryview` objects now have a :meth:`~memoryview.release` method and they also now support the context management protocol. This allows timely release of any resources that were acquired when requesting a buffer from the original object. @@ -1312,7 +1312,7 @@ An early decision to limit the interoperability of various numeric types has been relaxed. It is still unsupported (and ill-advised) to have implicit mixing in arithmetic expressions such as ``Decimal('1.1') + float('1.1')`` because the latter loses information in the process of constructing the binary -float. However, since existing floating point value can be converted losslessly +float. However, since existing floating-point value can be converted losslessly to either a decimal or rational representation, it makes sense to add them to the constructor and to support mixed-type comparisons. @@ -1325,7 +1325,7 @@ the constructor and to support mixed-type comparisons. and :class:`fractions.Fraction` (:issue:`2531` and :issue:`8188`). Similar changes were made to :class:`fractions.Fraction` so that the -:meth:`~fractions.Fraction.from_float()` and :meth:`~fractions.Fraction.from_decimal` +:meth:`~fractions.Fraction.from_float` and :meth:`~fractions.Fraction.from_decimal` methods are no longer needed (:issue:`8294`): >>> from decimal import Decimal @@ -1622,7 +1622,7 @@ socket The :mod:`socket` module has two new improvements. -* Socket objects now have a :meth:`~socket.socket.detach()` method which puts +* Socket objects now have a :meth:`~socket.socket.detach` method which puts the socket into closed state without actually closing the underlying file descriptor. The latter can then be reused for other purposes. (Added by Antoine Pitrou; :issue:`8524`.) @@ -1650,7 +1650,7 @@ for secure (encrypted, authenticated) internet connections: * The :func:`ssl.wrap_socket() ` constructor function now takes a *ciphers* argument. The *ciphers* string lists the allowed encryption algorithms using the format described in the `OpenSSL documentation - `__. + `__. * When linked against recent versions of OpenSSL, the :mod:`ssl` module now supports the Server Name Indication extension to the TLS protocol, allowing @@ -1859,11 +1859,11 @@ asyncore -------- :class:`!asyncore.dispatcher` now provides a -:meth:`!handle_accepted()` method +:meth:`!handle_accepted` method returning a ``(sock, addr)`` pair which is called when a connection has actually been established with a new remote endpoint. This is supposed to be used as a -replacement for old :meth:`!handle_accept()` and avoids -the user to call :meth:`!accept()` directly. +replacement for old :meth:`!handle_accept` and avoids +the user to call :meth:`!accept` directly. (Contributed by Giampaolo Rodolà; :issue:`6706`.) @@ -2321,7 +2321,7 @@ Multi-threading intervals and reduced overhead due to lock contention and the number of ensuing system calls. The notion of a "check interval" to allow thread switches has been abandoned and replaced by an absolute duration expressed in - seconds. This parameter is tunable through :func:`sys.setswitchinterval()`. + seconds. This parameter is tunable through :func:`sys.setswitchinterval`. It currently defaults to 5 milliseconds. Additional details about the implementation can be read from a `python-dev diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst index 29b4034e328..f814c4e90d5 100644 --- a/Doc/whatsnew/3.3.rst +++ b/Doc/whatsnew/3.3.rst @@ -549,9 +549,11 @@ separation of binary and text data). PEP 3155: Qualified name for classes and functions ================================================== -Functions and class objects have a new ``__qualname__`` attribute representing +Functions and class objects have a new :attr:`~definition.__qualname__` +attribute representing the "path" from the module top-level to their definition. For global functions -and classes, this is the same as ``__name__``. For other functions and classes, +and classes, this is the same as :attr:`~definition.__name__`. +For other functions and classes, it provides better information about where they were actually defined, and how they might be accessible from the global scope. @@ -779,8 +781,8 @@ Other Language Changes Some smaller changes made to the core Python language are: * Added support for Unicode name aliases and named sequences. - Both :func:`unicodedata.lookup()` and ``'\N{...}'`` now resolve name aliases, - and :func:`unicodedata.lookup()` resolves named sequences too. + Both :func:`unicodedata.lookup` and ``'\N{...}'`` now resolve name aliases, + and :func:`unicodedata.lookup` resolves named sequences too. (Contributed by Ezio Melotti in :issue:`12753`.) @@ -1097,12 +1099,12 @@ decimal C-module and libmpdec written by Stefan Krah. The new C version of the decimal module integrates the high speed libmpdec -library for arbitrary precision correctly rounded decimal floating point +library for arbitrary precision correctly rounded decimal floating-point arithmetic. libmpdec conforms to IBM's General Decimal Arithmetic Specification. Performance gains range from 10x for database applications to 100x for numerically intensive applications. These numbers are expected gains -for standard precisions used in decimal floating point arithmetic. Since +for standard precisions used in decimal floating-point arithmetic. Since the precision is user configurable, the exact figures may vary. For example, in integer bignum arithmetic the differences can be significantly higher. diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst index 8aef0f5ac26..71425120c37 100644 --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -215,13 +215,12 @@ automatic ``PATH`` modifications to have ``pip`` available from the command line by default, otherwise it can still be accessed through the Python launcher for Windows as ``py -m pip``. -As `discussed in the PEP`__, platform packagers may choose not to install +As :pep:`discussed in the PEP <0453#recommendations-for-downstream-distributors>` +platform packagers may choose not to install these commands by default, as long as, when invoked, they provide clear and simple directions on how to install them on that platform (usually using the system package manager). -__ https://peps.python.org/pep-0453/#recommendations-for-downstream-distributors - .. note:: To avoid conflicts between parallel Python 2 and Python 3 installations, @@ -1495,7 +1494,7 @@ The dictionary returned by :meth:`.SSLSocket.getpeercert` contains additional stat ---- -The :mod:`stat` module is now backed by a C implementation in :mod:`_stat`. A C +The :mod:`stat` module is now backed by a C implementation in :mod:`!_stat`. A C implementation is required as most of the values aren't standardized and are platform-dependent. (Contributed by Christian Heimes in :issue:`11016`.) @@ -1963,11 +1962,11 @@ Other Improvements `_ will build python, run the test suite, and generate an HTML coverage report for the C codebase using ``gcov`` and `lcov - `_. + `_. * The ``-R`` option to the :ref:`python regression test suite ` now also checks for memory allocation leaks, using - :func:`sys.getallocatedblocks()`. (Contributed by Antoine Pitrou in + :func:`sys.getallocatedblocks`. (Contributed by Antoine Pitrou in :issue:`13390`.) * ``python -m`` now works with namespace packages. diff --git a/Doc/whatsnew/3.5.rst b/Doc/whatsnew/3.5.rst index cd8a903327c..d4ae6f1f45d 100644 --- a/Doc/whatsnew/3.5.rst +++ b/Doc/whatsnew/3.5.rst @@ -1667,7 +1667,7 @@ Both the :class:`!SMTPServer` and :class:`!SMTPChannel` classes now accept a *decode_data* keyword argument to determine if the ``DATA`` portion of the SMTP transaction is decoded using the ``"utf-8"`` codec or is instead provided to the -:meth:`!SMTPServer.process_message()` +:meth:`!SMTPServer.process_message` method as a byte string. The default is ``True`` for backward compatibility reasons, but will change to ``False`` in Python 3.6. If *decode_data* is set to ``False``, the ``process_message`` method must be prepared to accept keyword @@ -1677,14 +1677,14 @@ arguments. The :class:`!SMTPServer` class now advertises the ``8BITMIME`` extension (:rfc:`6152`) if *decode_data* has been set ``True``. If the client specifies ``BODY=8BITMIME`` on the ``MAIL`` command, it is passed to -:meth:`!SMTPServer.process_message()` +:meth:`!SMTPServer.process_message` via the *mail_options* keyword. (Contributed by Milan Oberkirch and R. David Murray in :issue:`21795`.) The :class:`!SMTPServer` class now also supports the ``SMTPUTF8`` extension (:rfc:`6531`: Internationalized Email). If the client specified ``SMTPUTF8 BODY=8BITMIME`` on the ``MAIL`` command, they are passed to -:meth:`!SMTPServer.process_message()` +:meth:`!SMTPServer.process_message` via the *mail_options* keyword. It is the responsibility of the ``process_message`` method to correctly handle the ``SMTPUTF8`` data. (Contributed by Milan Oberkirch in :issue:`21725`.) @@ -1935,8 +1935,8 @@ specifying the namespace in which the code will be running. tkinter ------- -The :mod:`tkinter._fix` module used for setting up the Tcl/Tk environment -on Windows has been replaced by a private function in the :mod:`_tkinter` +The :mod:`!tkinter._fix` module used for setting up the Tcl/Tk environment +on Windows has been replaced by a private function in the :mod:`!_tkinter` module which makes no permanent changes to environment variables. (Contributed by Zachary Ware in :issue:`20035`.) @@ -2405,7 +2405,7 @@ Changes in the Python API error-prone and has been removed in Python 3.5. See :issue:`13936` for full details. -* The :meth:`ssl.SSLSocket.send()` method now raises either +* The :meth:`ssl.SSLSocket.send` method now raises either :exc:`ssl.SSLWantReadError` or :exc:`ssl.SSLWantWriteError` on a non-blocking socket if the operation would block. Previously, it would return ``0``. (Contributed by Nikolaus Rath in :issue:`20951`.) @@ -2526,9 +2526,9 @@ Changes in the C API to format the :func:`repr` of the object. (Contributed by Serhiy Storchaka in :issue:`22453`.) -* Because the lack of the :attr:`__module__` attribute breaks pickling and +* Because the lack of the :attr:`~type.__module__` attribute breaks pickling and introspection, a deprecation warning is now raised for builtin types without - the :attr:`__module__` attribute. This would be an AttributeError in + the :attr:`~type.__module__` attribute. This will be an :exc:`AttributeError` in the future. (Contributed by Serhiy Storchaka in :issue:`20204`.) diff --git a/Doc/whatsnew/3.6.rst b/Doc/whatsnew/3.6.rst index 68ab43462b7..2276fed60c8 100644 --- a/Doc/whatsnew/3.6.rst +++ b/Doc/whatsnew/3.6.rst @@ -261,7 +261,7 @@ allowed. The :ref:`string formatting ` language also now has support for the ``'_'`` option to signal the use of an underscore for a thousands -separator for floating point presentation types and for integer +separator for floating-point presentation types and for integer presentation type ``'d'``. For integer presentation types ``'b'``, ``'o'``, ``'x'``, and ``'X'``, underscores will be inserted every 4 digits:: @@ -511,10 +511,10 @@ correct. Prior to Python 3.6, data loss could result when using bytes paths on Windows. With this change, using bytes to represent paths is now supported on Windows, provided those bytes are encoded with the encoding returned by -:func:`sys.getfilesystemencoding()`, which now defaults to ``'utf-8'``. +:func:`sys.getfilesystemencoding`, which now defaults to ``'utf-8'``. Applications that do not use str to represent paths should use -:func:`os.fsencode()` and :func:`os.fsdecode()` to ensure their bytes are +:func:`os.fsencode` and :func:`os.fsdecode` to ensure their bytes are correctly encoded. To revert to the previous behaviour, set :envvar:`PYTHONLEGACYWINDOWSFSENCODING` or call :func:`sys._enablelegacywindowsfsencoding`. @@ -549,7 +549,7 @@ PEP 520: Preserving Class Attribute Definition Order Attributes in a class definition body have a natural ordering: the same order in which the names appear in the source. This order is now -preserved in the new class's :attr:`~object.__dict__` attribute. +preserved in the new class's :attr:`~type.__dict__` attribute. Also, the effective default class *execution* namespace (returned from :ref:`type.__prepare__() `) is now an insertion-order-preserving @@ -780,7 +780,7 @@ for managing secrets, such as account authentication, tokens, and similar. Note that the pseudo-random generators in the :mod:`random` module should *NOT* be used for security purposes. Use :mod:`secrets` - on Python 3.6+ and :func:`os.urandom()` on Python 3.5 and earlier. + on Python 3.6+ and :func:`os.urandom` on Python 3.5 and earlier. .. seealso:: @@ -934,7 +934,7 @@ asynchronous generators. The :func:`~collections.namedtuple` function now accepts an optional keyword argument *module*, which, when specified, is used for -the ``__module__`` attribute of the returned named tuple class. +the :attr:`~type.__module__` attribute of the returned named tuple class. (Contributed by Raymond Hettinger in :issue:`17941`.) The *verbose* and *rename* arguments for @@ -1316,7 +1316,7 @@ Storchaka in :issue:`24164`.) pickletools ----------- -:func:`pickletools.dis()` now outputs the implicit memo index for the +:func:`pickletools.dis` now outputs the implicit memo index for the ``MEMOIZE`` opcode. (Contributed by Serhiy Storchaka in :issue:`25382`.) diff --git a/Doc/whatsnew/3.7.rst b/Doc/whatsnew/3.7.rst index 69d043bcf7e..2d433ef4759 100644 --- a/Doc/whatsnew/3.7.rst +++ b/Doc/whatsnew/3.7.rst @@ -339,7 +339,7 @@ PEP 564: New Time Functions With Nanosecond Resolution ------------------------------------------------------ The resolution of clocks in modern systems can exceed the limited precision -of a floating point number returned by the :func:`time.time` function +of a floating-point number returned by the :func:`time.time` function and its variants. To avoid loss of precision, :pep:`564` adds six new "nanosecond" variants of the existing timer functions to the :mod:`time` module: @@ -353,7 +353,7 @@ module: The new functions return the number of nanoseconds as an integer value. -`Measurements `_ +:pep:`Measurements <0564#annex-clocks-resolution-in-python>` show that on Linux and Windows the resolution of :func:`time.time_ns` is approximately 3 times better than that of :func:`time.time`. @@ -603,7 +603,7 @@ The new :mod:`importlib.resources` module provides several new APIs and one new ABC for access to, opening, and reading *resources* inside packages. Resources are roughly similar to files inside packages, but they needn't be actual files on the physical file system. Module loaders can provide a -:meth:`get_resource_reader()` function which returns +:meth:`get_resource_reader` function which returns a :class:`importlib.abc.ResourceReader` instance to support this new API. Built-in file path loaders and zip file loaders both support this. @@ -1133,7 +1133,7 @@ The MIME type of .bmp has been changed from ``'image/x-ms-bmp'`` to msilib ------ -The new :meth:`!Database.Close()` method can be used +The new :meth:`!Database.Close` method can be used to close the :abbr:`MSI` database. (Contributed by Berker Peksag in :issue:`20486`.) @@ -2017,11 +2017,11 @@ importlib --------- Methods -:meth:`!MetaPathFinder.find_module()` +:meth:`!MetaPathFinder.find_module` (replaced by :meth:`MetaPathFinder.find_spec() `) and -:meth:`!PathEntryFinder.find_loader()` +:meth:`!PathEntryFinder.find_loader` (replaced by :meth:`PathEntryFinder.find_spec() `) both deprecated in Python 3.4 now emit :exc:`DeprecationWarning`. @@ -2048,7 +2048,7 @@ The :mod:`macpath` is now deprecated and will be removed in Python 3.8. threading --------- -:mod:`dummy_threading` and :mod:`_dummy_thread` have been deprecated. It is +:mod:`!dummy_threading` and :mod:`!_dummy_thread` have been deprecated. It is no longer possible to build Python with threading disabled. Use :mod:`threading` instead. (Contributed by Antoine Pitrou in :issue:`31370`.) @@ -2184,7 +2184,7 @@ The following features and APIs have been removed from Python 3.7: ``socket.socketpair`` on Python 3.5 and newer. * :mod:`asyncio` no longer exports the :mod:`selectors` and - :mod:`_overlapped` modules as ``asyncio.selectors`` and + :mod:`!_overlapped` modules as ``asyncio.selectors`` and ``asyncio._overlapped``. Replace ``from asyncio import selectors`` with ``import selectors``. @@ -2366,7 +2366,7 @@ Changes in the Python API positions 2--3. To match only blank lines, the pattern should be rewritten as ``r'(?m)^[^\S\n]*$'``. - :func:`re.sub()` now replaces empty matches adjacent to a previous + :func:`re.sub` now replaces empty matches adjacent to a previous non-empty match. For example ``re.sub('x*', '-', 'abxd')`` returns now ``'-a-b--d-'`` instead of ``'-a-b-d-'`` (the first minus between 'b' and 'd' replaces 'x', and the second minus replaces an empty string between @@ -2425,7 +2425,7 @@ Changes in the Python API to :meth:`ArgumentParser.add_subparsers() `. (Contributed by Anthony Sottile in :issue:`26510`.) -* :meth:`ast.literal_eval()` is now stricter. Addition and subtraction of +* :meth:`ast.literal_eval` is now stricter. Addition and subtraction of arbitrary numbers are no longer allowed. (Contributed by Serhiy Storchaka in :issue:`31778`.) diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index 1356f24547b..d0e60bc280a 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -936,7 +936,7 @@ Add option ``--json-lines`` to parse every input line as a separate JSON object. logging ------- -Added a *force* keyword argument to :func:`logging.basicConfig()` +Added a *force* keyword argument to :func:`logging.basicConfig` When set to true, any existing handlers attached to the root logger are removed and closed before carrying out the configuration specified by the other arguments. @@ -1077,16 +1077,16 @@ pathlib ------- :mod:`pathlib.Path` methods that return a boolean result like -:meth:`~pathlib.Path.exists()`, :meth:`~pathlib.Path.is_dir()`, -:meth:`~pathlib.Path.is_file()`, :meth:`~pathlib.Path.is_mount()`, -:meth:`~pathlib.Path.is_symlink()`, :meth:`~pathlib.Path.is_block_device()`, -:meth:`~pathlib.Path.is_char_device()`, :meth:`~pathlib.Path.is_fifo()`, -:meth:`~pathlib.Path.is_socket()` now return ``False`` instead of raising +:meth:`~pathlib.Path.exists`, :meth:`~pathlib.Path.is_dir`, +:meth:`~pathlib.Path.is_file`, :meth:`~pathlib.Path.is_mount`, +:meth:`~pathlib.Path.is_symlink`, :meth:`~pathlib.Path.is_block_device`, +:meth:`~pathlib.Path.is_char_device`, :meth:`~pathlib.Path.is_fifo`, +:meth:`~pathlib.Path.is_socket` now return ``False`` instead of raising :exc:`ValueError` or its subclass :exc:`UnicodeEncodeError` for paths that contain characters unrepresentable at the OS level. (Contributed by Serhiy Storchaka in :issue:`33721`.) -Added :meth:`!pathlib.Path.link_to()` which creates a hard link pointing +Added :meth:`!pathlib.Path.link_to` which creates a hard link pointing to a path. (Contributed by Joannah Nanjekye in :issue:`26978`) Note that ``link_to`` was deprecated in 3.10 and removed in 3.12 in @@ -1170,13 +1170,13 @@ recursively removing their contents first. socket ------ -Added :meth:`~socket.create_server()` and :meth:`~socket.has_dualstack_ipv6()` +Added :meth:`~socket.create_server` and :meth:`~socket.has_dualstack_ipv6` convenience functions to automate the necessary tasks usually involved when creating a server socket, including accepting both IPv4 and IPv6 connections on the same socket. (Contributed by Giampaolo Rodolà in :issue:`17561`.) -The :func:`socket.if_nameindex()`, :func:`socket.if_nametoindex()`, and -:func:`socket.if_indextoname()` functions have been implemented on Windows. +The :func:`socket.if_nameindex`, :func:`socket.if_nametoindex`, and +:func:`socket.if_indextoname` functions have been implemented on Windows. (Contributed by Zackery Spytz in :issue:`37007`.) @@ -1192,11 +1192,11 @@ post-handshake authentication. statistics ---------- -Added :func:`statistics.fmean` as a faster, floating point variant of -:func:`statistics.mean()`. (Contributed by Raymond Hettinger and +Added :func:`statistics.fmean` as a faster, floating-point variant of +:func:`statistics.mean`. (Contributed by Raymond Hettinger and Steven D'Aprano in :issue:`35904`.) -Added :func:`statistics.geometric_mean()` +Added :func:`statistics.geometric_mean` (Contributed by Raymond Hettinger in :issue:`27181`.) Added :func:`statistics.multimode` that returns a list of the most @@ -1367,10 +1367,10 @@ Added :class:`~unittest.mock.AsyncMock` to support an asynchronous version of have been added as well. (Contributed by Lisa Roach in :issue:`26467`). -Added :func:`~unittest.addModuleCleanup()` and -:meth:`~unittest.TestCase.addClassCleanup()` to unittest to support -cleanups for :func:`~unittest.setUpModule()` and -:meth:`~unittest.TestCase.setUpClass()`. +Added :func:`~unittest.addModuleCleanup` and +:meth:`~unittest.TestCase.addClassCleanup` to unittest to support +cleanups for :func:`~unittest.setUpModule` and +:meth:`~unittest.TestCase.setUpClass`. (Contributed by Lisa Roach in :issue:`24412`.) Several mock assert functions now also print a list of actual calls upon @@ -1432,7 +1432,7 @@ and ``{namespace}*`` which returns all tags in the given namespace. (Contributed by Stefan Behnel in :issue:`28238`.) The :mod:`xml.etree.ElementTree` module provides a new function -:func:`–xml.etree.ElementTree.canonicalize()` that implements C14N 2.0. +:func:`–xml.etree.ElementTree.canonicalize` that implements C14N 2.0. (Contributed by Stefan Behnel in :issue:`13611`.) The target object of :class:`xml.etree.ElementTree.XMLParser` can @@ -1712,7 +1712,7 @@ Deprecated the ``l*gettext()`` functions. (Contributed by Serhiy Storchaka in :issue:`33710`.) -* The :meth:`~threading.Thread.isAlive()` method of :class:`threading.Thread` +* The :meth:`~threading.Thread.isAlive` method of :class:`threading.Thread` has been deprecated. (Contributed by Donghee Na in :issue:`35283`.) @@ -1755,7 +1755,7 @@ The following features and APIs have been removed from Python 3.8: * Starting with Python 3.3, importing ABCs from :mod:`collections` was deprecated, and importing should be done from :mod:`collections.abc`. Being able to import from collections was marked for removal in 3.8, but has been - delayed to 3.9. (See :issue:`36952`.) + delayed to 3.9. (See :gh:`81134`.) * The :mod:`macpath` module, deprecated in Python 3.7, has been removed. (Contributed by Victor Stinner in :issue:`35471`.) diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 90bdcf95416..ab1fb8c582c 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -408,8 +408,8 @@ Added :func:`curses.get_escdelay`, :func:`curses.set_escdelay`, datetime -------- -The :meth:`~datetime.date.isocalendar()` of :class:`datetime.date` -and :meth:`~datetime.datetime.isocalendar()` of :class:`datetime.datetime` +The :meth:`~datetime.date.isocalendar` of :class:`datetime.date` +and :meth:`~datetime.datetime.isocalendar` of :class:`datetime.datetime` methods now returns a :func:`~collections.namedtuple` instead of a :class:`tuple`. (Contributed by Donghee Na in :issue:`24416`.) @@ -610,7 +610,7 @@ convert a wait status to an exit code. pathlib ------- -Added :meth:`pathlib.Path.readlink()` which acts similarly to +Added :meth:`pathlib.Path.readlink` which acts similarly to :func:`os.readlink`. (Contributed by Girts Folkmanis in :issue:`30618`) @@ -637,7 +637,8 @@ pydoc ----- The documentation string is now shown not only for class, function, -method etc, but for any object that has its own ``__doc__`` attribute. +method etc, but for any object that has its own :attr:`~definition.__doc__` +attribute. (Contributed by Serhiy Storchaka in :issue:`40257`.) random @@ -983,13 +984,13 @@ Removed (Contributed by Victor Stinner in :issue:`37312`.) * ``aifc.openfp()`` alias to ``aifc.open()``, ``sunau.openfp()`` alias to - ``sunau.open()``, and ``wave.openfp()`` alias to :func:`wave.open()` have been + ``sunau.open()``, and ``wave.openfp()`` alias to :func:`wave.open` have been removed. They were deprecated since Python 3.7. (Contributed by Victor Stinner in :issue:`37320`.) -* The :meth:`!isAlive()` method of :class:`threading.Thread` +* The :meth:`!isAlive` method of :class:`threading.Thread` has been removed. It was deprecated since Python 3.8. - Use :meth:`~threading.Thread.is_alive()` instead. + Use :meth:`~threading.Thread.is_alive` instead. (Contributed by Donghee Na in :issue:`37804`.) * Methods ``getchildren()`` and ``getiterator()`` of classes diff --git a/Include/Python.h b/Include/Python.h index 882b96b73a7..fb2d32d7110 100644 --- a/Include/Python.h +++ b/Include/Python.h @@ -55,6 +55,10 @@ # include // __readgsqword() #endif +#if defined(Py_GIL_DISABLED) && defined(__MINGW32__) +# include // __readgsqword() +#endif + // Include Python header files #include "pyport.h" #include "pymacro.h" diff --git a/Include/cpython/bytesobject.h b/Include/cpython/bytesobject.h index 816823716e9..41537210b74 100644 --- a/Include/cpython/bytesobject.h +++ b/Include/cpython/bytesobject.h @@ -31,3 +31,7 @@ static inline Py_ssize_t PyBytes_GET_SIZE(PyObject *op) { return Py_SIZE(self); } #define PyBytes_GET_SIZE(self) PyBytes_GET_SIZE(_PyObject_CAST(self)) + +/* _PyBytes_Join(sep, x) is like sep.join(x). sep must be PyBytesObject*, + x must be an iterable object. */ +PyAPI_FUNC(PyObject*) _PyBytes_Join(PyObject *sep, PyObject *x); diff --git a/Include/cpython/code.h b/Include/cpython/code.h index 07ed5200a59..58d93fcfc10 100644 --- a/Include/cpython/code.h +++ b/Include/cpython/code.h @@ -194,9 +194,6 @@ Py_DEPRECATED(3.13) static inline int PyCode_GetFirstFree(PyCodeObject *op) { return PyUnstable_Code_GetFirstFree(op); } -#define _PyCode_CODE(CO) _Py_RVALUE((_Py_CODEUNIT *)(CO)->co_code_adaptive) -#define _PyCode_NBYTES(CO) (Py_SIZE(CO) * (Py_ssize_t)sizeof(_Py_CODEUNIT)) - /* Unstable public interface */ PyAPI_FUNC(PyCodeObject *) PyUnstable_Code_New( int, int, int, int, int, PyObject *, PyObject *, diff --git a/Include/cpython/longobject.h b/Include/cpython/longobject.h index 96815938c82..0d49242ff68 100644 --- a/Include/cpython/longobject.h +++ b/Include/cpython/longobject.h @@ -10,6 +10,7 @@ PyAPI_FUNC(PyObject*) PyLong_FromUnicodeObject(PyObject *u, int base); #define Py_ASNATIVEBYTES_NATIVE_ENDIAN 3 #define Py_ASNATIVEBYTES_UNSIGNED_BUFFER 4 #define Py_ASNATIVEBYTES_REJECT_NEGATIVE 8 +#define Py_ASNATIVEBYTES_ALLOW_INDEX 16 /* PyLong_AsNativeBytes: Copy the integer value to a native variable. buffer points to the first byte of the variable. @@ -20,8 +21,10 @@ PyAPI_FUNC(PyObject*) PyLong_FromUnicodeObject(PyObject *u, int base); * 2 - native endian * 4 - unsigned destination (e.g. don't reject copying 255 into one byte) * 8 - raise an exception for negative inputs - If flags is -1 (all bits set), native endian is used and value truncation - behaves most like C (allows negative inputs and allow MSB set). + * 16 - call __index__ on non-int types + If flags is -1 (all bits set), native endian is used, value truncation + behaves most like C (allows negative inputs and allow MSB set), and non-int + objects will raise a TypeError. Big endian mode will write the most significant byte into the address directly referenced by buffer; little endian will write the least significant byte into that address. diff --git a/Include/cpython/modsupport.h b/Include/cpython/modsupport.h new file mode 100644 index 00000000000..d3b88f58c82 --- /dev/null +++ b/Include/cpython/modsupport.h @@ -0,0 +1,26 @@ +#ifndef Py_CPYTHON_MODSUPPORT_H +# error "this header file must not be included directly" +#endif + +// A data structure that can be used to run initialization code once in a +// thread-safe manner. The C++11 equivalent is std::call_once. +typedef struct { + uint8_t v; +} _PyOnceFlag; + +typedef struct _PyArg_Parser { + const char *format; + const char * const *keywords; + const char *fname; + const char *custom_msg; + _PyOnceFlag once; /* atomic one-time initialization flag */ + int is_kwtuple_owned; /* does this parser own the kwtuple object? */ + int pos; /* number of positional-only arguments */ + int min; /* minimal number of arguments */ + int max; /* maximal number of positional arguments */ + PyObject *kwtuple; /* tuple of keyword parameter names */ + struct _PyArg_Parser *next; +} _PyArg_Parser; + +PyAPI_FUNC(int) _PyArg_ParseTupleAndKeywordsFast(PyObject *, PyObject *, + struct _PyArg_Parser *, ...); diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index 55a139bb915..4ecef4f56ed 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -510,6 +510,9 @@ _Py_atomic_load_ssize_acquire(const Py_ssize_t *obj); // See https://en.cppreference.com/w/cpp/atomic/atomic_thread_fence static inline void _Py_atomic_fence_seq_cst(void); +// Acquire fence +static inline void _Py_atomic_fence_acquire(void); + // Release fence static inline void _Py_atomic_fence_release(void); diff --git a/Include/cpython/pyatomic_gcc.h b/Include/cpython/pyatomic_gcc.h index f2ebdeeb552..ef09954d53a 100644 --- a/Include/cpython/pyatomic_gcc.h +++ b/Include/cpython/pyatomic_gcc.h @@ -542,6 +542,10 @@ static inline void _Py_atomic_fence_seq_cst(void) { __atomic_thread_fence(__ATOMIC_SEQ_CST); } + static inline void +_Py_atomic_fence_acquire(void) +{ __atomic_thread_fence(__ATOMIC_ACQUIRE); } + static inline void _Py_atomic_fence_release(void) { __atomic_thread_fence(__ATOMIC_RELEASE); } diff --git a/Include/cpython/pyatomic_msc.h b/Include/cpython/pyatomic_msc.h index f32995c1f57..84da21bdcbf 100644 --- a/Include/cpython/pyatomic_msc.h +++ b/Include/cpython/pyatomic_msc.h @@ -1066,6 +1066,18 @@ _Py_atomic_fence_seq_cst(void) #else # error "no implementation of _Py_atomic_fence_seq_cst" #endif +} + + static inline void +_Py_atomic_fence_acquire(void) +{ +#if defined(_M_ARM64) + __dmb(_ARM64_BARRIER_ISHLD); +#elif defined(_M_X64) || defined(_M_IX86) + _ReadBarrier(); +#else +# error "no implementation of _Py_atomic_fence_acquire" +#endif } static inline void diff --git a/Include/cpython/pyatomic_std.h b/Include/cpython/pyatomic_std.h index 0cdce4e6dd3..7c71e94c68f 100644 --- a/Include/cpython/pyatomic_std.h +++ b/Include/cpython/pyatomic_std.h @@ -961,6 +961,13 @@ _Py_atomic_fence_seq_cst(void) atomic_thread_fence(memory_order_seq_cst); } + static inline void +_Py_atomic_fence_acquire(void) +{ + _Py_USING_STD; + atomic_thread_fence(memory_order_acquire); +} + static inline void _Py_atomic_fence_release(void) { diff --git a/Include/cpython/pyerrors.h b/Include/cpython/pyerrors.h index 42b4b03b10c..b36b4681f5d 100644 --- a/Include/cpython/pyerrors.h +++ b/Include/cpython/pyerrors.h @@ -100,7 +100,7 @@ PyAPI_FUNC(PyObject*) PyUnstable_Exc_PrepReraiseStar( /* In signalmodule.c */ -int PySignal_SetWakeupFd(int fd); +PyAPI_FUNC(int) PySignal_SetWakeupFd(int fd); /* Support for adding program text to SyntaxErrors */ diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h index bb2af78a376..f005729fff1 100644 --- a/Include/cpython/pystate.h +++ b/Include/cpython/pystate.h @@ -192,6 +192,14 @@ struct _ts { PyObject *previous_executor; uint64_t dict_global_version; + + /* Used to store/retrieve `threading.local` keys/values for this thread */ + PyObject *threading_local_key; + + /* Used by `threading.local`s to be remove keys/values for dying threads. + The PyThreadObject must hold the only reference to this value. + */ + PyObject *threading_local_sentinel; }; #ifdef Py_DEBUG diff --git a/Include/cpython/pystats.h b/Include/cpython/pystats.h index 38480a4f6cd..c4480758f48 100644 --- a/Include/cpython/pystats.h +++ b/Include/cpython/pystats.h @@ -19,12 +19,12 @@ // Define _PY_INTERPRETER macro to increment interpreter_increfs and // interpreter_decrefs. Otherwise, increment increfs and decrefs. -#include "pycore_uop_ids.h" - #ifndef Py_CPYTHON_PYSTATS_H # error "this header file must not be included directly" #endif +#define PYSTATS_MAX_UOP_ID 512 + #define SPECIALIZATION_FAILURE_KINDS 36 /* Stats for determining who is calling PyEval_EvalFrame */ @@ -100,7 +100,7 @@ typedef struct _gc_stats { typedef struct _uop_stats { uint64_t execution_count; uint64_t miss; - uint64_t pair_count[MAX_UOP_ID + 1]; + uint64_t pair_count[PYSTATS_MAX_UOP_ID + 1]; } UOpStats; #define _Py_UOP_HIST_SIZE 32 @@ -118,7 +118,7 @@ typedef struct _optimization_stats { uint64_t recursive_call; uint64_t low_confidence; uint64_t executors_invalidated; - UOpStats opcode[MAX_UOP_ID+1]; + UOpStats opcode[PYSTATS_MAX_UOP_ID + 1]; uint64_t unsupported_opcode[256]; uint64_t trace_length_hist[_Py_UOP_HIST_SIZE]; uint64_t trace_run_length_hist[_Py_UOP_HIST_SIZE]; @@ -128,7 +128,7 @@ typedef struct _optimization_stats { uint64_t optimizer_failure_reason_no_memory; uint64_t remove_globals_builtins_changed; uint64_t remove_globals_incorrect_keys; - uint64_t error_in_opcode[MAX_UOP_ID+1]; + uint64_t error_in_opcode[PYSTATS_MAX_UOP_ID + 1]; } OptimizationStats; typedef struct _rare_event_stats { diff --git a/Include/floatobject.h b/Include/floatobject.h index 999441ac536..8963c16832a 100644 --- a/Include/floatobject.h +++ b/Include/floatobject.h @@ -2,7 +2,7 @@ /* Float object interface */ /* -PyFloatObject represents a (double precision) floating point number. +PyFloatObject represents a (double precision) floating-point number. */ #ifndef Py_FLOATOBJECT_H diff --git a/Include/internal/mimalloc/mimalloc/atomic.h b/Include/internal/mimalloc/mimalloc/atomic.h index eb8478ceed6..1093c540864 100644 --- a/Include/internal/mimalloc/mimalloc/atomic.h +++ b/Include/internal/mimalloc/mimalloc/atomic.h @@ -23,7 +23,9 @@ terms of the MIT license. A copy of the license can be found in the file #define _Atomic(tp) std::atomic #define mi_atomic(name) std::atomic_##name #define mi_memory_order(name) std::memory_order_##name -#if !defined(ATOMIC_VAR_INIT) || (__cplusplus >= 202002L) // c++20, see issue #571 +#if (__cplusplus >= 202002L) // c++20, see issue #571 + #define MI_ATOMIC_VAR_INIT(x) x +#elif !defined(ATOMIC_VAR_INIT) #define MI_ATOMIC_VAR_INIT(x) x #else #define MI_ATOMIC_VAR_INIT(x) ATOMIC_VAR_INIT(x) @@ -39,7 +41,9 @@ terms of the MIT license. A copy of the license can be found in the file #include #define mi_atomic(name) atomic_##name #define mi_memory_order(name) memory_order_##name -#if !defined(ATOMIC_VAR_INIT) || (__STDC_VERSION__ >= 201710L) // c17, see issue #735 +#if (__STDC_VERSION__ >= 201710L) // c17, see issue #735 + #define MI_ATOMIC_VAR_INIT(x) x +#elif !defined(ATOMIC_VAR_INIT) #define MI_ATOMIC_VAR_INIT(x) x #else #define MI_ATOMIC_VAR_INIT(x) ATOMIC_VAR_INIT(x) diff --git a/Include/internal/pycore_bytesobject.h b/Include/internal/pycore_bytesobject.h index 94d421a9eb7..300e7f4896a 100644 --- a/Include/internal/pycore_bytesobject.h +++ b/Include/internal/pycore_bytesobject.h @@ -23,10 +23,6 @@ extern PyObject* _PyBytes_FromHex( PyAPI_FUNC(PyObject*) _PyBytes_DecodeEscape(const char *, Py_ssize_t, const char *, const char **); -/* _PyBytes_Join(sep, x) is like sep.join(x). sep must be PyBytesObject*, - x must be an iterable object. */ -extern PyObject* _PyBytes_Join(PyObject *sep, PyObject *x); - // Substring Search. // diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 26ede31b190..043f5957d48 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -255,6 +255,7 @@ PyAPI_FUNC(void) _PyEval_FormatKwargsError(PyThreadState *tstate, PyObject *func PyAPI_FUNC(PyObject *)_PyEval_MatchClass(PyThreadState *tstate, PyObject *subject, PyObject *type, Py_ssize_t nargs, PyObject *kwargs); PyAPI_FUNC(PyObject *)_PyEval_MatchKeys(PyThreadState *tstate, PyObject *map, PyObject *keys); PyAPI_FUNC(int) _PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v, int argcnt, int argcntafter, PyObject **sp); +PyAPI_FUNC(void) _PyEval_MonitorRaise(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr); PyAPI_FUNC(void) _PyEval_FrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame *frame); diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h index 48ff2701c84..1fb8cc473c6 100644 --- a/Include/internal/pycore_code.h +++ b/Include/internal/pycore_code.h @@ -30,6 +30,9 @@ typedef union { _Py_BackoffCounter counter; // First cache entry of specializable op } _Py_CODEUNIT; +#define _PyCode_CODE(CO) _Py_RVALUE((_Py_CODEUNIT *)(CO)->co_code_adaptive) +#define _PyCode_NBYTES(CO) (Py_SIZE(CO) * (Py_ssize_t)sizeof(_Py_CODEUNIT)) + /* These macros only remain defined for compatibility. */ #define _Py_OPCODE(word) ((word).op.code) @@ -534,8 +537,9 @@ write_location_entry_start(uint8_t *ptr, int code, int length) #define ADAPTIVE_COOLDOWN_BACKOFF 0 // Can't assert this in pycore_backoff.h because of header order dependencies -static_assert(COLD_EXIT_INITIAL_VALUE > ADAPTIVE_COOLDOWN_VALUE, - "Cold exit value should be larger than adaptive cooldown value"); +#if COLD_EXIT_INITIAL_VALUE <= ADAPTIVE_COOLDOWN_VALUE +# error "Cold exit value should be larger than adaptive cooldown value" +#endif static inline _Py_BackoffCounter adaptive_counter_bits(uint16_t value, uint16_t backoff) { diff --git a/Include/internal/pycore_context.h b/Include/internal/pycore_context.h index ae5c47f195e..10c1f1e52be 100644 --- a/Include/internal/pycore_context.h +++ b/Include/internal/pycore_context.h @@ -35,9 +35,11 @@ struct _pycontextvarobject { PyObject_HEAD PyObject *var_name; PyObject *var_default; +#ifndef Py_GIL_DISABLED PyObject *var_cached; uint64_t var_cached_tsid; uint64_t var_cached_tsver; +#endif Py_hash_t var_hash; }; diff --git a/Include/internal/pycore_dict.h b/Include/internal/pycore_dict.h index 8d8d3748eda..36da498db2c 100644 --- a/Include/internal/pycore_dict.h +++ b/Include/internal/pycore_dict.h @@ -16,8 +16,12 @@ extern "C" { // Unsafe flavor of PyDict_GetItemWithError(): no error checking extern PyObject* _PyDict_GetItemWithError(PyObject *dp, PyObject *key); -extern int _PyDict_DelItemIf(PyObject *mp, PyObject *key, - int (*predicate)(PyObject *value)); +// Delete an item from a dict if a predicate is true +// Returns -1 on error, 1 if the item was deleted, 0 otherwise +// Export for '_asyncio' shared extension +PyAPI_FUNC(int) _PyDict_DelItemIf(PyObject *mp, PyObject *key, + int (*predicate)(PyObject *value, void *arg), + void *arg); // "KnownHash" variants // Export for '_asyncio' shared extension @@ -106,8 +110,13 @@ PyAPI_FUNC(PyObject *)_PyDict_LoadGlobal(PyDictObject *, PyDictObject *, PyObjec /* Consumes references to key and value */ PyAPI_FUNC(int) _PyDict_SetItem_Take2(PyDictObject *op, PyObject *key, PyObject *value); extern int _PyDict_SetItem_LockHeld(PyDictObject *dict, PyObject *name, PyObject *value); -extern int _PyDict_GetItemRef_Unicode_LockHeld(PyDictObject *op, PyObject *key, PyObject **result); +// Export for '_asyncio' shared extension +PyAPI_FUNC(int) _PyDict_SetItem_KnownHash_LockHeld(PyDictObject *mp, PyObject *key, + PyObject *value, Py_hash_t hash); +// Export for '_asyncio' shared extension +PyAPI_FUNC(int) _PyDict_GetItemRef_KnownHash_LockHeld(PyDictObject *op, PyObject *key, Py_hash_t hash, PyObject **result); extern int _PyDict_GetItemRef_KnownHash(PyDictObject *op, PyObject *key, Py_hash_t hash, PyObject **result); +extern int _PyDict_GetItemRef_Unicode_LockHeld(PyDictObject *op, PyObject *key, PyObject **result); extern int _PyObjectDict_SetItem(PyTypeObject *tp, PyObject *obj, PyObject **dictptr, PyObject *name, PyObject *value); extern int _PyDict_Pop_KnownHash( @@ -323,6 +332,8 @@ _PyInlineValuesSize(PyTypeObject *tp) int _PyDict_DetachFromObject(PyDictObject *dict, PyObject *obj); +PyDictObject *_PyObject_MaterializeManagedDict_LockHeld(PyObject *); + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_dtoa.h b/Include/internal/pycore_dtoa.h index c5cfdf4ce8f..e4222c5267d 100644 --- a/Include/internal/pycore_dtoa.h +++ b/Include/internal/pycore_dtoa.h @@ -11,8 +11,6 @@ extern "C" { #include "pycore_pymath.h" // _PY_SHORT_FLOAT_REPR -#if _PY_SHORT_FLOAT_REPR == 1 - typedef uint32_t ULong; struct @@ -22,15 +20,15 @@ Bigint { ULong x[1]; }; -#ifdef Py_USING_MEMORY_DEBUGGER +#if defined(Py_USING_MEMORY_DEBUGGER) || _PY_SHORT_FLOAT_REPR == 0 struct _dtoa_state { int _not_used; }; -#define _dtoa_interp_state_INIT(INTERP) \ +#define _dtoa_state_INIT(INTERP) \ {0} -#else // !Py_USING_MEMORY_DEBUGGER +#else // !Py_USING_MEMORY_DEBUGGER && _PY_SHORT_FLOAT_REPR != 0 /* The size of the Bigint freelist */ #define Bigint_Kmax 7 @@ -66,8 +64,6 @@ extern char* _Py_dg_dtoa(double d, int mode, int ndigits, int *decpt, int *sign, char **rve); extern void _Py_dg_freedtoa(char *s); -#endif // _PY_SHORT_FLOAT_REPR == 1 - extern PyStatus _PyDtoa_Init(PyInterpreterState *interp); extern void _PyDtoa_Fini(PyInterpreterState *interp); diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h index e4eb893263c..af181e3760d 100644 --- a/Include/internal/pycore_frame.h +++ b/Include/internal/pycore_frame.h @@ -26,6 +26,10 @@ struct _frame { char f_trace_lines; /* Emit per-line trace events? */ char f_trace_opcodes; /* Emit per-opcode trace events? */ PyObject *f_extra_locals; /* Dict for locals set by users using f_locals, could be NULL */ + /* This is purely for backwards compatibility for PyEval_GetLocals. + PyEval_GetLocals requires a borrowed reference so the actual reference + is stored here */ + PyObject *f_locals_cache; /* The frame data, if this frame object owns the frame */ PyObject *_f_frame_data[1]; }; diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index 28e34d38096..357177bcd6f 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -142,26 +142,11 @@ static inline void _PyObject_GC_SET_SHARED_INLINE(PyObject *op) { /* Bit flags for _gc_prev */ /* Bit 0 is set when tp_finalize is called */ -#define _PyGC_PREV_MASK_FINALIZED 1 +#define _PyGC_PREV_MASK_FINALIZED (1) /* Bit 1 is set when the object is in generation which is GCed currently. */ -#define _PyGC_PREV_MASK_COLLECTING 2 - -/* Bit 0 in _gc_next is the old space bit. - * It is set as follows: - * Young: gcstate->visited_space - * old[0]: 0 - * old[1]: 1 - * permanent: 0 - * - * During a collection all objects handled should have the bit set to - * gcstate->visited_space, as objects are moved from the young gen - * and the increment into old[gcstate->visited_space]. - * When object are moved from the pending space, old[gcstate->visited_space^1] - * into the increment, the old space bit is flipped. -*/ -#define _PyGC_NEXT_MASK_OLD_SPACE_1 1 - -#define _PyGC_PREV_SHIFT 2 +#define _PyGC_PREV_MASK_COLLECTING (2) +/* The (N-2) most significant bits contain the real address. */ +#define _PyGC_PREV_SHIFT (2) #define _PyGC_PREV_MASK (((uintptr_t) -1) << _PyGC_PREV_SHIFT) /* set for debugging information */ @@ -187,13 +172,11 @@ typedef enum { // Lowest bit of _gc_next is used for flags only in GC. // But it is always 0 for normal code. static inline PyGC_Head* _PyGCHead_NEXT(PyGC_Head *gc) { - uintptr_t next = gc->_gc_next & _PyGC_PREV_MASK; + uintptr_t next = gc->_gc_next; return (PyGC_Head*)next; } static inline void _PyGCHead_SET_NEXT(PyGC_Head *gc, PyGC_Head *next) { - uintptr_t unext = (uintptr_t)next; - assert((unext & ~_PyGC_PREV_MASK) == 0); - gc->_gc_next = (gc->_gc_next & ~_PyGC_PREV_MASK) | unext; + gc->_gc_next = (uintptr_t)next; } // Lowest two bits of _gc_prev is used for _PyGC_PREV_MASK_* flags. @@ -201,7 +184,6 @@ static inline PyGC_Head* _PyGCHead_PREV(PyGC_Head *gc) { uintptr_t prev = (gc->_gc_prev & _PyGC_PREV_MASK); return (PyGC_Head*)prev; } - static inline void _PyGCHead_SET_PREV(PyGC_Head *gc, PyGC_Head *prev) { uintptr_t uprev = (uintptr_t)prev; assert((uprev & ~_PyGC_PREV_MASK) == 0); @@ -287,13 +269,6 @@ struct gc_generation { generations */ }; -struct gc_collection_stats { - /* number of collected objects */ - Py_ssize_t collected; - /* total number of uncollectable objects (put into gc.garbage) */ - Py_ssize_t uncollectable; -}; - /* Running stats per generation */ struct gc_generation_stats { /* total number of collections */ @@ -315,8 +290,8 @@ struct _gc_runtime_state { int enabled; int debug; /* linked lists of container objects */ - struct gc_generation young; - struct gc_generation old[2]; + struct gc_generation generations[NUM_GENERATIONS]; + PyGC_Head *generation0; /* a permanent generation which won't be collected */ struct gc_generation permanent_generation; struct gc_generation_stats generation_stats[NUM_GENERATIONS]; @@ -327,12 +302,6 @@ struct _gc_runtime_state { /* a list of callbacks to be invoked when collection is performed */ PyObject *callbacks; - Py_ssize_t heap_size; - Py_ssize_t work_to_do; - /* Which of the old spaces is the visited space */ - int visited_space; - -#ifdef Py_GIL_DISABLED /* This is the number of objects that survived the last full collection. It approximates the number of long lived objects tracked by the GC. @@ -345,6 +314,7 @@ struct _gc_runtime_state { the first time. */ Py_ssize_t long_lived_pending; +#ifdef Py_GIL_DISABLED /* gh-117783: Deferred reference counting is not fully implemented yet, so as a temporary measure we treat objects using deferred reference counting as immortal. The value may be zero, one, or a negative number: @@ -365,7 +335,8 @@ struct _gc_thread_state { extern void _PyGC_InitState(struct _gc_runtime_state *); -extern Py_ssize_t _PyGC_Collect(PyThreadState *tstate, int generation, _PyGC_Reason reason); +extern Py_ssize_t _PyGC_Collect(PyThreadState *tstate, int generation, + _PyGC_Reason reason); extern void _PyGC_CollectNoFail(PyThreadState *tstate); /* Freeze objects tracked by the GC and ignore them in future collections. */ diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h index 16cb4793ad1..006dd911b5a 100644 --- a/Include/internal/pycore_global_objects_fini_generated.h +++ b/Include/internal/pycore_global_objects_fini_generated.h @@ -586,7 +586,6 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__anext__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__annotations__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__args__)); - _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__asyncio_running_event_loop__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__await__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__bases__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__bool__)); diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h index 1e768532971..a5436b6d689 100644 --- a/Include/internal/pycore_global_strings.h +++ b/Include/internal/pycore_global_strings.h @@ -75,7 +75,6 @@ struct _Py_global_strings { STRUCT_FOR_ID(__anext__) STRUCT_FOR_ID(__annotations__) STRUCT_FOR_ID(__args__) - STRUCT_FOR_ID(__asyncio_running_event_loop__) STRUCT_FOR_ID(__await__) STRUCT_FOR_ID(__bases__) STRUCT_FOR_ID(__bool__) diff --git a/Include/internal/pycore_lock.h b/Include/internal/pycore_lock.h index 80d609897b4..2a18bb76447 100644 --- a/Include/internal/pycore_lock.h +++ b/Include/internal/pycore_lock.h @@ -128,12 +128,6 @@ _PyRawMutex_Unlock(_PyRawMutex *m) _PyRawMutex_UnlockSlow(m); } -// A data structure that can be used to run initialization code once in a -// thread-safe manner. The C++11 equivalent is std::call_once. -typedef struct { - uint8_t v; -} _PyOnceFlag; - // Type signature for one-time initialization functions. The function should // return 0 on success and -1 on failure. typedef int _Py_once_fn_t(void *arg); @@ -234,12 +228,12 @@ PyAPI_FUNC(void) _PySeqLock_AbandonWrite(_PySeqLock *seqlock); PyAPI_FUNC(uint32_t) _PySeqLock_BeginRead(_PySeqLock *seqlock); // End the read operation and confirm that the sequence number has not changed. -// Returns 1 if the read was successful or 0 if the read should be re-tried. -PyAPI_FUNC(uint32_t) _PySeqLock_EndRead(_PySeqLock *seqlock, uint32_t previous); +// Returns 1 if the read was successful or 0 if the read should be retried. +PyAPI_FUNC(int) _PySeqLock_EndRead(_PySeqLock *seqlock, uint32_t previous); // Check if the lock was held during a fork and clear the lock. Returns 1 -// if the lock was held and any associated datat should be cleared. -PyAPI_FUNC(uint32_t) _PySeqLock_AfterFork(_PySeqLock *seqlock); +// if the lock was held and any associated data should be cleared. +PyAPI_FUNC(int) _PySeqLock_AfterFork(_PySeqLock *seqlock); #ifdef __cplusplus } diff --git a/Include/internal/pycore_long.h b/Include/internal/pycore_long.h index 8513695c22e..ff7d9afc03a 100644 --- a/Include/internal/pycore_long.h +++ b/Include/internal/pycore_long.h @@ -178,8 +178,12 @@ PyAPI_FUNC(int) _PyLong_Size_t_Converter(PyObject *, void *); * we define them to the numbers in both places and then assert that * they're the same. */ -static_assert(SIGN_MASK == _PyLong_SIGN_MASK, "SIGN_MASK does not match _PyLong_SIGN_MASK"); -static_assert(NON_SIZE_BITS == _PyLong_NON_SIZE_BITS, "NON_SIZE_BITS does not match _PyLong_NON_SIZE_BITS"); +#if SIGN_MASK != _PyLong_SIGN_MASK +# error "SIGN_MASK does not match _PyLong_SIGN_MASK" +#endif +#if NON_SIZE_BITS != _PyLong_NON_SIZE_BITS +# error "NON_SIZE_BITS does not match _PyLong_NON_SIZE_BITS" +#endif /* All *compact" values are guaranteed to fit into * a Py_ssize_t with at least one bit to spare. diff --git a/Include/internal/pycore_mimalloc.h b/Include/internal/pycore_mimalloc.h index d10b01d5b49..d870d01beb7 100644 --- a/Include/internal/pycore_mimalloc.h +++ b/Include/internal/pycore_mimalloc.h @@ -36,9 +36,18 @@ typedef enum { # define MI_TSAN 1 #endif +#ifdef __cplusplus +extern "C++" { +#endif + #include "mimalloc/mimalloc.h" #include "mimalloc/mimalloc/types.h" #include "mimalloc/mimalloc/internal.h" + +#ifdef __cplusplus +} +#endif + #endif #ifdef Py_GIL_DISABLED diff --git a/Include/internal/pycore_modsupport.h b/Include/internal/pycore_modsupport.h index 3d3cd672252..11fde814875 100644 --- a/Include/internal/pycore_modsupport.h +++ b/Include/internal/pycore_modsupport.h @@ -67,24 +67,6 @@ PyAPI_FUNC(void) _PyArg_BadArgument( // --- _PyArg_Parser API --------------------------------------------------- -typedef struct _PyArg_Parser { - const char *format; - const char * const *keywords; - const char *fname; - const char *custom_msg; - _PyOnceFlag once; /* atomic one-time initialization flag */ - int is_kwtuple_owned; /* does this parser own the kwtuple object? */ - int pos; /* number of positional-only arguments */ - int min; /* minimal number of arguments */ - int max; /* maximal number of positional arguments */ - PyObject *kwtuple; /* tuple of keyword parameter names */ - struct _PyArg_Parser *next; -} _PyArg_Parser; - -// Export for '_testclinic' shared extension -PyAPI_FUNC(int) _PyArg_ParseTupleAndKeywordsFast(PyObject *, PyObject *, - struct _PyArg_Parser *, ...); - // Export for '_dbm' shared extension PyAPI_FUNC(int) _PyArg_ParseStackAndKeywords( PyObject *const *args, diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index d1e2773a247..39f5600f7ae 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -15,6 +15,30 @@ extern "C" { #include "pycore_pyatomic_ft_wrappers.h" // FT_ATOMIC_STORE_PTR_RELAXED #include "pycore_pystate.h" // _PyInterpreterState_GET() + +#define _Py_IMMORTAL_REFCNT_LOOSE ((_Py_IMMORTAL_REFCNT >> 1) + 1) + +// gh-121528, gh-118997: Similar to _Py_IsImmortal() but be more loose when +// comparing the reference count to stay compatible with C extensions built +// with the stable ABI 3.11 or older. Such extensions implement INCREF/DECREF +// as refcnt++ and refcnt-- without taking in account immortal objects. For +// example, the reference count of an immortal object can change from +// _Py_IMMORTAL_REFCNT to _Py_IMMORTAL_REFCNT+1 (INCREF) or +// _Py_IMMORTAL_REFCNT-1 (DECREF). +// +// This function should only be used in assertions. Otherwise, _Py_IsImmortal() +// must be used instead. +static inline int _Py_IsImmortalLoose(PyObject *op) +{ +#if defined(Py_GIL_DISABLED) + return _Py_IsImmortal(op); +#else + return (op->ob_refcnt >= _Py_IMMORTAL_REFCNT_LOOSE); +#endif +} +#define _Py_IsImmortalLoose(op) _Py_IsImmortalLoose(_PyObject_CAST(op)) + + /* Check if an object is consistent. For example, ensure that the reference counter is greater than or equal to 1, and ensure that ob_type is not NULL. @@ -134,7 +158,7 @@ extern void _Py_SetImmortalUntracked(PyObject *op); static inline void _Py_SetMortal(PyObject *op, Py_ssize_t refcnt) { if (op) { - assert(_Py_IsImmortal(op)); + assert(_Py_IsImmortalLoose(op)); #ifdef Py_GIL_DISABLED op->ob_tid = _Py_UNOWNED_TID; op->ob_ref_local = 0; @@ -281,7 +305,7 @@ _PyObject_Init(PyObject *op, PyTypeObject *typeobj) { assert(op != NULL); Py_SET_TYPE(op, typeobj); - assert(_PyType_HasFeature(typeobj, Py_TPFLAGS_HEAPTYPE) || _Py_IsImmortal(typeobj)); + assert(_PyType_HasFeature(typeobj, Py_TPFLAGS_HEAPTYPE) || _Py_IsImmortalLoose(typeobj)); Py_INCREF(typeobj); _Py_NewReference(op); } @@ -329,12 +353,11 @@ static inline void _PyObject_GC_TRACK( filename, lineno, __func__); PyInterpreterState *interp = _PyInterpreterState_GET(); - PyGC_Head *generation0 = &interp->gc.young.head; + PyGC_Head *generation0 = interp->gc.generation0; PyGC_Head *last = (PyGC_Head*)(generation0->_gc_prev); _PyGCHead_SET_NEXT(last, gc); _PyGCHead_SET_PREV(gc, last); - /* Young objects will be moved into the visited space during GC, so set the bit here */ - gc->_gc_next = ((uintptr_t)generation0) | interp->gc.visited_space; + _PyGCHead_SET_NEXT(gc, generation0); generation0->_gc_prev = (uintptr_t)gc; #endif } @@ -628,6 +651,20 @@ _PyObject_IS_GC(PyObject *obj) && (type->tp_is_gc == NULL || type->tp_is_gc(obj))); } +// Fast inlined version of PyObject_Hash() +static inline Py_hash_t +_PyObject_HashFast(PyObject *op) +{ + if (PyUnicode_CheckExact(op)) { + Py_hash_t hash = FT_ATOMIC_LOAD_SSIZE_RELAXED( + _PyASCIIObject_CAST(op)->hash); + if (hash != -1) { + return hash; + } + } + return PyObject_Hash(op); +} + // Fast inlined version of PyType_IS_GC() #define _PyType_IS_GC(t) _PyType_HasFeature((t), Py_TPFLAGS_HAVE_GC) @@ -733,13 +770,7 @@ PyAPI_FUNC(PyObject*) _PyObject_GetState(PyObject *); * Third party code unintentionally rely on problematic fpcasts. The call * trampoline mitigates common occurrences of bad fpcasts on Emscripten. */ -#if defined(__EMSCRIPTEN__) && defined(PY_CALL_TRAMPOLINE) -#define _PyCFunction_TrampolineCall(meth, self, args) \ - _PyCFunctionWithKeywords_TrampolineCall( \ - (*(PyCFunctionWithKeywords)(void(*)(void))(meth)), (self), (args), NULL) -extern PyObject* _PyCFunctionWithKeywords_TrampolineCall( - PyCFunctionWithKeywords meth, PyObject *, PyObject *, PyObject *); -#else +#if !(defined(__EMSCRIPTEN__) && defined(PY_CALL_TRAMPOLINE)) #define _PyCFunction_TrampolineCall(meth, self, args) \ (meth)((self), (args)) #define _PyCFunctionWithKeywords_TrampolineCall(meth, self, args, kw) \ diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index 5b0d8dfd1b0..bbba0bbbf4b 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -1161,10 +1161,10 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [TO_BOOL] = { true, INSTR_FMT_IXC00, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [TO_BOOL_ALWAYS_TRUE] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG }, [TO_BOOL_BOOL] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG }, - [TO_BOOL_INT] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG }, + [TO_BOOL_INT] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, [TO_BOOL_LIST] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG }, [TO_BOOL_NONE] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG }, - [TO_BOOL_STR] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG }, + [TO_BOOL_STR] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, [UNARY_INVERT] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [UNARY_NEGATIVE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [UNARY_NOT] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, diff --git a/Include/internal/pycore_parser.h b/Include/internal/pycore_parser.h index 067b34c12c4..b16084aaa15 100644 --- a/Include/internal/pycore_parser.h +++ b/Include/internal/pycore_parser.h @@ -21,6 +21,9 @@ extern "C" { struct _parser_runtime_state { #ifdef Py_DEBUG long memo_statistics[_PYPEGEN_NSTATISTICS]; +#ifdef Py_GIL_DISABLED + PyMutex mutex; +#endif #else int _not_used; #endif @@ -28,8 +31,10 @@ struct _parser_runtime_state { }; _Py_DECLARE_STR(empty, "") +#if defined(Py_DEBUG) && defined(Py_GIL_DISABLED) #define _parser_runtime_state_INIT \ { \ + .mutex = {0}, \ .dummy_name = { \ .kind = Name_kind, \ .v.Name.id = &_Py_STR(empty), \ @@ -40,6 +45,20 @@ _Py_DECLARE_STR(empty, "") .end_col_offset = 0, \ }, \ } +#else +#define _parser_runtime_state_INIT \ + { \ + .dummy_name = { \ + .kind = Name_kind, \ + .v.Name.id = &_Py_STR(empty), \ + .v.Name.ctx = Load, \ + .lineno = 1, \ + .col_offset = 0, \ + .end_lineno = 1, \ + .end_col_offset = 0, \ + }, \ + } +#endif extern struct _mod* _PyParser_ASTFromString( const char *str, diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index f58eccf729c..d4291b87261 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -44,6 +44,15 @@ struct _gilstate_runtime_state { /* Runtime audit hook state */ +#define _Py_Debug_Cookie "xdebugpy" + +#ifdef Py_GIL_DISABLED +# define _Py_Debug_gilruntimestate_enabled offsetof(struct _gil_runtime_state, enabled) +# define _Py_Debug_Free_Threaded 1 +#else +# define _Py_Debug_gilruntimestate_enabled 0 +# define _Py_Debug_Free_Threaded 0 +#endif typedef struct _Py_AuditHookEntry { struct _Py_AuditHookEntry *next; Py_AuditHookFunction hookCFunction; @@ -53,14 +62,18 @@ typedef struct _Py_AuditHookEntry { typedef struct _Py_DebugOffsets { char cookie[8]; uint64_t version; + uint64_t free_threaded; // Runtime state offset; struct _runtime_state { + uint64_t size; uint64_t finalizing; uint64_t interpreters_head; } runtime_state; // Interpreter state offset; struct _interpreter_state { + uint64_t size; + uint64_t id; uint64_t next; uint64_t threads_head; uint64_t gc; @@ -68,22 +81,28 @@ typedef struct _Py_DebugOffsets { uint64_t sysdict; uint64_t builtins; uint64_t ceval_gil; + uint64_t gil_runtime_state; + uint64_t gil_runtime_state_enabled; uint64_t gil_runtime_state_locked; uint64_t gil_runtime_state_holder; } interpreter_state; // Thread state offset; struct _thread_state{ + uint64_t size; uint64_t prev; uint64_t next; uint64_t interp; uint64_t current_frame; uint64_t thread_id; uint64_t native_thread_id; + uint64_t datastack_chunk; + uint64_t status; } thread_state; // InterpreterFrame offset; struct _interpreter_frame { + uint64_t size; uint64_t previous; uint64_t executable; uint64_t instr_ptr; @@ -91,16 +110,12 @@ typedef struct _Py_DebugOffsets { uint64_t owner; } interpreter_frame; - // CFrame offset; - struct _cframe { - uint64_t current_frame; - uint64_t previous; - } cframe; - // Code object offset; struct _code_object { + uint64_t size; uint64_t filename; uint64_t name; + uint64_t qualname; uint64_t linetable; uint64_t firstlineno; uint64_t argcount; @@ -111,25 +126,72 @@ typedef struct _Py_DebugOffsets { // PyObject offset; struct _pyobject { + uint64_t size; uint64_t ob_type; } pyobject; // PyTypeObject object offset; struct _type_object { + uint64_t size; uint64_t tp_name; + uint64_t tp_repr; + uint64_t tp_flags; } type_object; // PyTuple object offset; struct _tuple_object { + uint64_t size; uint64_t ob_item; + uint64_t ob_size; } tuple_object; + // PyList object offset; + struct _list_object { + uint64_t size; + uint64_t ob_item; + uint64_t ob_size; + } list_object; + + // PyDict object offset; + struct _dict_object { + uint64_t size; + uint64_t ma_keys; + uint64_t ma_values; + } dict_object; + + // PyFloat object offset; + struct _float_object { + uint64_t size; + uint64_t ob_fval; + } float_object; + + // PyLong object offset; + struct _long_object { + uint64_t size; + uint64_t lv_tag; + uint64_t ob_digit; + } long_object; + + // PyBytes object offset; + struct _bytes_object { + uint64_t size; + uint64_t ob_size; + uint64_t ob_sval; + } bytes_object; + // Unicode object offset; struct _unicode_object { + uint64_t size; uint64_t state; uint64_t length; - size_t asciiobject_size; + uint64_t asciiobject_size; } unicode_object; + + // GC runtime state offset; + struct _gc { + uint64_t size; + uint64_t collecting; + } gc; } _Py_DebugOffsets; /* Reference tracer state */ diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h index 98920dbb7c7..7eef9edc0aa 100644 --- a/Include/internal/pycore_runtime_init.h +++ b/Include/internal/pycore_runtime_init.h @@ -29,16 +29,20 @@ extern PyTypeObject _PyExc_MemoryError; /* The static initializers defined here should only be used in the runtime init code (in pystate.c and pylifecycle.c). */ -#define _PyRuntimeState_INIT(runtime) \ +#define _PyRuntimeState_INIT(runtime, debug_cookie) \ { \ .debug_offsets = { \ - .cookie = "xdebugpy", \ + .cookie = debug_cookie, \ .version = PY_VERSION_HEX, \ + .free_threaded = _Py_Debug_Free_Threaded, \ .runtime_state = { \ + .size = sizeof(_PyRuntimeState), \ .finalizing = offsetof(_PyRuntimeState, _finalizing), \ .interpreters_head = offsetof(_PyRuntimeState, interpreters.head), \ }, \ .interpreter_state = { \ + .size = sizeof(PyInterpreterState), \ + .id = offsetof(PyInterpreterState, id), \ .next = offsetof(PyInterpreterState, next), \ .threads_head = offsetof(PyInterpreterState, threads.head), \ .gc = offsetof(PyInterpreterState, gc), \ @@ -46,18 +50,24 @@ extern PyTypeObject _PyExc_MemoryError; .sysdict = offsetof(PyInterpreterState, sysdict), \ .builtins = offsetof(PyInterpreterState, builtins), \ .ceval_gil = offsetof(PyInterpreterState, ceval.gil), \ + .gil_runtime_state = offsetof(PyInterpreterState, _gil), \ + .gil_runtime_state_enabled = _Py_Debug_gilruntimestate_enabled, \ .gil_runtime_state_locked = offsetof(PyInterpreterState, _gil.locked), \ .gil_runtime_state_holder = offsetof(PyInterpreterState, _gil.last_holder), \ }, \ .thread_state = { \ + .size = sizeof(PyThreadState), \ .prev = offsetof(PyThreadState, prev), \ .next = offsetof(PyThreadState, next), \ .interp = offsetof(PyThreadState, interp), \ .current_frame = offsetof(PyThreadState, current_frame), \ .thread_id = offsetof(PyThreadState, thread_id), \ .native_thread_id = offsetof(PyThreadState, native_thread_id), \ + .datastack_chunk = offsetof(PyThreadState, datastack_chunk), \ + .status = offsetof(PyThreadState, _status), \ }, \ .interpreter_frame = { \ + .size = sizeof(_PyInterpreterFrame), \ .previous = offsetof(_PyInterpreterFrame, previous), \ .executable = offsetof(_PyInterpreterFrame, f_executable), \ .instr_ptr = offsetof(_PyInterpreterFrame, instr_ptr), \ @@ -65,8 +75,10 @@ extern PyTypeObject _PyExc_MemoryError; .owner = offsetof(_PyInterpreterFrame, owner), \ }, \ .code_object = { \ + .size = sizeof(PyCodeObject), \ .filename = offsetof(PyCodeObject, co_filename), \ .name = offsetof(PyCodeObject, co_name), \ + .qualname = offsetof(PyCodeObject, co_qualname), \ .linetable = offsetof(PyCodeObject, co_linetable), \ .firstlineno = offsetof(PyCodeObject, co_firstlineno), \ .argcount = offsetof(PyCodeObject, co_argcount), \ @@ -75,19 +87,54 @@ extern PyTypeObject _PyExc_MemoryError; .co_code_adaptive = offsetof(PyCodeObject, co_code_adaptive), \ }, \ .pyobject = { \ + .size = sizeof(PyObject), \ .ob_type = offsetof(PyObject, ob_type), \ }, \ .type_object = { \ + .size = sizeof(PyTypeObject), \ .tp_name = offsetof(PyTypeObject, tp_name), \ + .tp_repr = offsetof(PyTypeObject, tp_repr), \ + .tp_flags = offsetof(PyTypeObject, tp_flags), \ }, \ .tuple_object = { \ + .size = sizeof(PyTupleObject), \ .ob_item = offsetof(PyTupleObject, ob_item), \ + .ob_size = offsetof(PyTupleObject, ob_base.ob_size), \ + }, \ + .list_object = { \ + .size = sizeof(PyListObject), \ + .ob_item = offsetof(PyListObject, ob_item), \ + .ob_size = offsetof(PyListObject, ob_base.ob_size), \ + }, \ + .dict_object = { \ + .size = sizeof(PyDictObject), \ + .ma_keys = offsetof(PyDictObject, ma_keys), \ + .ma_values = offsetof(PyDictObject, ma_values), \ + }, \ + .float_object = { \ + .size = sizeof(PyFloatObject), \ + .ob_fval = offsetof(PyFloatObject, ob_fval), \ + }, \ + .long_object = { \ + .size = sizeof(PyLongObject), \ + .lv_tag = offsetof(PyLongObject, long_value.lv_tag), \ + .ob_digit = offsetof(PyLongObject, long_value.ob_digit), \ + }, \ + .bytes_object = { \ + .size = sizeof(PyBytesObject), \ + .ob_size = offsetof(PyBytesObject, ob_base.ob_size), \ + .ob_sval = offsetof(PyBytesObject, ob_sval), \ }, \ .unicode_object = { \ + .size = sizeof(PyUnicodeObject), \ .state = offsetof(PyUnicodeObject, _base._base.state), \ .length = offsetof(PyUnicodeObject, _base._base.length), \ .asciiobject_size = sizeof(PyASCIIObject), \ }, \ + .gc = { \ + .size = sizeof(struct _gc_runtime_state), \ + .collecting = offsetof(struct _gc_runtime_state, collecting), \ + }, \ }, \ .allocators = { \ .standard = _pymem_allocators_standard_INIT(runtime), \ @@ -181,12 +228,12 @@ extern PyTypeObject _PyExc_MemoryError; }, \ .gc = { \ .enabled = 1, \ - .young = { .threshold = 2000, }, \ - .old = { \ + .generations = { \ + /* .head is set in _PyGC_InitState(). */ \ + { .threshold = 2000, }, \ + { .threshold = 10, }, \ { .threshold = 10, }, \ - { .threshold = 0, }, \ }, \ - .work_to_do = -5000, \ }, \ .qsbr = { \ .wr_seq = QSBR_INITIAL, \ diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h index f4d0ee4122e..6e62ecd4b3a 100644 --- a/Include/internal/pycore_runtime_init_generated.h +++ b/Include/internal/pycore_runtime_init_generated.h @@ -584,7 +584,6 @@ extern "C" { INIT_ID(__anext__), \ INIT_ID(__annotations__), \ INIT_ID(__args__), \ - INIT_ID(__asyncio_running_event_loop__), \ INIT_ID(__await__), \ INIT_ID(__bases__), \ INIT_ID(__bool__), \ diff --git a/Include/internal/pycore_time.h b/Include/internal/pycore_time.h index 15806552e0a..205ac5d3781 100644 --- a/Include/internal/pycore_time.h +++ b/Include/internal/pycore_time.h @@ -6,7 +6,7 @@ // Time formats: // // * Seconds. -// * Seconds as a floating point number (C double). +// * Seconds as a floating-point number (C double). // * Milliseconds (10^-3 seconds). // * Microseconds (10^-6 seconds). // * 100 nanoseconds (10^-7 seconds), used on Windows. diff --git a/Include/internal/pycore_tstate.h b/Include/internal/pycore_tstate.h index befca950920..1ed5b1d826a 100644 --- a/Include/internal/pycore_tstate.h +++ b/Include/internal/pycore_tstate.h @@ -21,6 +21,8 @@ typedef struct _PyThreadStateImpl { // semi-public fields are in PyThreadState. PyThreadState base; + PyObject *asyncio_running_loop; // Strong reference + struct _qsbr_thread_state *qsbr; // only used by free-threaded build struct llist_node mem_free_queue; // delayed free queue diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h index e1bc9cf1c7b..53373903095 100644 --- a/Include/internal/pycore_unicodeobject_generated.h +++ b/Include/internal/pycore_unicodeobject_generated.h @@ -104,10 +104,6 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { _PyUnicode_InternStatic(interp, &string); assert(_PyUnicode_CheckConsistency(string, 1)); assert(PyUnicode_GET_LENGTH(string) != 1); - string = &_Py_ID(__asyncio_running_event_loop__); - _PyUnicode_InternStatic(interp, &string); - assert(_PyUnicode_CheckConsistency(string, 1)); - assert(PyUnicode_GET_LENGTH(string) != 1); string = &_Py_ID(__await__); _PyUnicode_InternStatic(interp, &string); assert(_PyUnicode_CheckConsistency(string, 1)); diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h index bc0ba16e24f..02ffc769c18 100644 --- a/Include/internal/pycore_uop_metadata.h +++ b/Include/internal/pycore_uop_metadata.h @@ -52,10 +52,10 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_UNARY_NOT] = HAS_PURE_FLAG, [_TO_BOOL] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_TO_BOOL_BOOL] = HAS_EXIT_FLAG, - [_TO_BOOL_INT] = HAS_EXIT_FLAG, + [_TO_BOOL_INT] = HAS_EXIT_FLAG | HAS_ESCAPES_FLAG, [_TO_BOOL_LIST] = HAS_EXIT_FLAG, [_TO_BOOL_NONE] = HAS_EXIT_FLAG, - [_TO_BOOL_STR] = HAS_EXIT_FLAG, + [_TO_BOOL_STR] = HAS_EXIT_FLAG | HAS_ESCAPES_FLAG, [_REPLACE_WITH_TRUE] = 0, [_UNARY_INVERT] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_GUARD_BOTH_INT] = HAS_EXIT_FLAG, diff --git a/Include/modsupport.h b/Include/modsupport.h index ea4c0fce9f4..af995f567b0 100644 --- a/Include/modsupport.h +++ b/Include/modsupport.h @@ -134,6 +134,12 @@ PyAPI_FUNC(PyObject *) PyModule_FromDefAndSpec2(PyModuleDef *def, #endif /* New in 3.5 */ +#ifndef Py_LIMITED_API +# define Py_CPYTHON_MODSUPPORT_H +# include "cpython/modsupport.h" +# undef Py_CPYTHON_MODSUPPORT_H +#endif + #ifdef __cplusplus } #endif diff --git a/Include/object.h b/Include/object.h index fa9c2a51a95..78aa7ad0f45 100644 --- a/Include/object.h +++ b/Include/object.h @@ -247,6 +247,12 @@ _Py_ThreadId(void) tid = __readfsdword(24); #elif defined(_MSC_VER) && defined(_M_ARM64) tid = __getReg(18); +#elif defined(__MINGW32__) && defined(_M_X64) + tid = __readgsqword(48); +#elif defined(__MINGW32__) && defined(_M_IX86) + tid = __readfsdword(24); +#elif defined(__MINGW32__) && defined(_M_ARM64) + tid = __getReg(18); #elif defined(__i386__) __asm__("movl %%gs:0, %0" : "=r" (tid)); // 32-bit always uses GS #elif defined(__MACH__) && defined(__x86_64__) @@ -327,11 +333,7 @@ static inline Py_ssize_t Py_REFCNT(PyObject *ob) { // bpo-39573: The Py_SET_TYPE() function must be used to set an object type. static inline PyTypeObject* Py_TYPE(PyObject *ob) { -#ifdef Py_GIL_DISABLED - return (PyTypeObject *)_Py_atomic_load_ptr_relaxed(&ob->ob_type); -#else return ob->ob_type; -#endif } #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000 # define Py_TYPE(ob) Py_TYPE(_PyObject_CAST(ob)) @@ -421,11 +423,7 @@ static inline void Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) { static inline void Py_SET_TYPE(PyObject *ob, PyTypeObject *type) { -#ifdef Py_GIL_DISABLED - _Py_atomic_store_ptr(&ob->ob_type, type); -#else ob->ob_type = type; -#endif } #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000 # define Py_SET_TYPE(ob, type) Py_SET_TYPE(_PyObject_CAST(ob), type) diff --git a/Include/patchlevel.h b/Include/patchlevel.h index b2023d0d1ca..e8e7c72085e 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -19,11 +19,11 @@ #define PY_MAJOR_VERSION 3 #define PY_MINOR_VERSION 13 #define PY_MICRO_VERSION 0 -#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_BETA -#define PY_RELEASE_SERIAL 3 +#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL +#define PY_RELEASE_SERIAL 0 /* Version as a string */ -#define PY_VERSION "3.13.0b3+meta" +#define PY_VERSION "3.13.0+meta" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Include/py_curses.h b/Include/py_curses.h index a51d9980eee..79b1b01fcfa 100644 --- a/Include/py_curses.h +++ b/Include/py_curses.h @@ -36,13 +36,21 @@ #define NCURSES_OPAQUE 0 #endif -#ifdef HAVE_NCURSES_H -#include -#else -#include +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include #endif -#ifdef HAVE_NCURSES_H +#ifdef NCURSES_VERSION /* configure was checking , but we will use , which has some or all these features. */ #if !defined(WINDOW_HAS_FLAGS) && \ diff --git a/Include/pymacro.h b/Include/pymacro.h index b388c2a4a66..e3e9cd13594 100644 --- a/Include/pymacro.h +++ b/Include/pymacro.h @@ -15,11 +15,11 @@ // MSVC makes static_assert a keyword in C11-17, contrary to the standards. // // In C++11 and C2x, static_assert is a keyword, redefining is undefined -// behaviour. So only define if building as C (if __STDC_VERSION__ is defined), -// not C++, and only for C11-17. +// behaviour. So only define if building as C, not C++ (if __cplusplus is +// not defined), and only for C11-17. #if !defined(static_assert) && (defined(__GNUC__) || defined(__clang__)) \ - && defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L \ - && __STDC_VERSION__ <= 201710L + && !defined(__cplusplus) && defined(__STDC_VERSION__) \ + && __STDC_VERSION__ >= 201112L && __STDC_VERSION__ <= 201710L # define static_assert _Static_assert #endif @@ -46,7 +46,8 @@ /* Argument must be a char or an int in [-128, 127] or [0, 255]. */ #define Py_CHARMASK(c) ((unsigned char)((c) & 0xff)) -#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L +#if (defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L \ + && !defined(__cplusplus)) # define Py_BUILD_ASSERT_EXPR(cond) \ ((void)sizeof(struct { int dummy; _Static_assert(cond, #cond); }), \ 0) diff --git a/InternalDocs/string_interning.md b/InternalDocs/string_interning.md index 930ea110d85..358e2c070cd 100644 --- a/InternalDocs/string_interning.md +++ b/InternalDocs/string_interning.md @@ -8,51 +8,50 @@ This is used to optimize dict and attribute lookups, among other things. -Python uses three different mechanisms to intern strings: +Python uses two different mechanisms to intern strings: singletons and +dynamic interning. -- Singleton strings marked in C source with `_Py_STR` and `_Py_ID` macros. - These are statically allocated, and collected using `make regen-global-objects` - (`Tools/build/generate_global_objects.py`), which generates code - for declaration, initialization and finalization. +## Singletons - The difference between the two kinds is not important. (A `_Py_ID` string is - a valid C name, with which we can refer to it; a `_Py_STR` may e.g. contain - non-identifier characters, so it needs a separate C-compatible name.) +The 256 possible one-character latin-1 strings, which can be retrieved with +`_Py_LATIN1_CHR(c)`, are stored in statically allocated arrays, +`_PyRuntime.static_objects.strings.ascii` and +`_PyRuntime.static_objects.strings.latin1`. - The empty string is in this category (as `_Py_STR(empty)`). +Longer singleton strings are marked in C source with `_Py_ID` (if the string +is a valid C identifier fragment) or `_Py_STR` (if it needs a separate +C-compatible name.) +These are also stored in statically allocated arrays. +They are collected from CPython sources using `make regen-global-objects` +(`Tools/build/generate_global_objects.py`), which generates code +for declaration, initialization and finalization. - These singletons are interned in a runtime-global lookup table, - `_PyRuntime.cached_objects.interned_strings` (`INTERNED_STRINGS`), - at runtime initialization. +The empty string is one of the singletons: `_Py_STR(empty)`. -- The 256 possible one-character latin-1 strings are singletons, - which can be retrieved with `_Py_LATIN1_CHR(c)`, are stored in runtime-global - arrays, `_PyRuntime.static_objects.strings.ascii` and - `_PyRuntime.static_objects.strings.latin1`. +The three sets of singletons (`_Py_LATIN1_CHR`, `_Py_ID`, `_Py_STR`) +are disjoint. +If you have such a singleton, it (and no other copy) will be interned. - These are NOT interned at startup in the normal build. - In the free-threaded build, they are; this avoids modifying the - global lookup table after threads are started. +These singletons are interned in a runtime-global lookup table, +`_PyRuntime.cached_objects.interned_strings` (`INTERNED_STRINGS`), +at runtime initialization, and immutable until it's torn down +at runtime finalization. +It is shared across threads and interpreters without any synchronization. - Interning a one-char latin-1 string will always intern the corresponding - singleton. -- All other strings are allocated dynamically, and have their - `_PyUnicode_STATE(s).statically_allocated` flag set to zero. - When interned, such strings are added to an interpreter-wide dict, - `PyInterpreterState.cached_objects.interned_strings`. +## Dynamically allocated strings - The key and value of each entry in this dict reference the same object. +All other strings are allocated dynamically, and have their +`_PyUnicode_STATE(s).statically_allocated` flag set to zero. +When interned, such strings are added to an interpreter-wide dict, +`PyInterpreterState.cached_objects.interned_strings`. -The three sets of singletons (`_Py_STR`, `_Py_ID`, `_Py_LATIN1_CHR`) -are disjoint. -If you have such a singleton, it (and no other copy) will be interned. +The key and value of each entry in this dict reference the same object. ## Immortality and reference counting -Invariant: Every immortal string is interned, *except* the one-char latin-1 -singletons (which might but might not be interned). +Invariant: Every immortal string is interned. In practice, this means that you must not use `_Py_SetImmortal` on a string. (If you know it's already immortal, don't immortalize it; @@ -115,8 +114,5 @@ The valid transitions between these states are: Using `_PyUnicode_InternStatic` on these is an error; the other cases don't change the state. -- One-char latin-1 singletons can be interned (0 -> 3) using any interning - function; after that the functions don't change the state. - -- Other statically allocated strings are interned (0 -> 3) at runtime init; +- Singletons are interned (0 -> 3) at runtime init; after that all interning functions don't change the state. diff --git a/Lib/_android_support.py b/Lib/_android_support.py index 590e85ea8c2..353b34fa36a 100644 --- a/Lib/_android_support.py +++ b/Lib/_android_support.py @@ -1,19 +1,20 @@ import io import sys - +from threading import RLock +from time import sleep, time # The maximum length of a log message in bytes, including the level marker and -# tag, is defined as LOGGER_ENTRY_MAX_PAYLOAD in -# platform/system/logging/liblog/include/log/log.h. As of API level 30, messages -# longer than this will be be truncated by logcat. This limit has already been -# reduced at least once in the history of Android (from 4076 to 4068 between API -# level 23 and 26), so leave some headroom. +# tag, is defined as LOGGER_ENTRY_MAX_PAYLOAD at +# https://cs.android.com/android/platform/superproject/+/android-14.0.0_r1:system/logging/liblog/include/log/log.h;l=71. +# Messages longer than this will be be truncated by logcat. This limit has already +# been reduced at least once in the history of Android (from 4076 to 4068 between +# API level 23 and 26), so leave some headroom. MAX_BYTES_PER_WRITE = 4000 # UTF-8 uses a maximum of 4 bytes per character, so limiting text writes to this -# size ensures that TextIOWrapper can always avoid exceeding MAX_BYTES_PER_WRITE. +# size ensures that we can always avoid exceeding MAX_BYTES_PER_WRITE. # However, if the actual number of bytes per character is smaller than that, -# then TextIOWrapper may still join multiple consecutive text writes into binary +# then we may still join multiple consecutive text writes into binary # writes containing a larger number of characters. MAX_CHARS_PER_WRITE = MAX_BYTES_PER_WRITE // 4 @@ -26,18 +27,24 @@ def init_streams(android_log_write, stdout_prio, stderr_prio): if sys.executable: return # Not embedded in an app. + global logcat + logcat = Logcat(android_log_write) + sys.stdout = TextLogStream( - android_log_write, stdout_prio, "python.stdout", errors=sys.stdout.errors) + stdout_prio, "python.stdout", sys.stdout.fileno(), + errors=sys.stdout.errors) sys.stderr = TextLogStream( - android_log_write, stderr_prio, "python.stderr", errors=sys.stderr.errors) + stderr_prio, "python.stderr", sys.stderr.fileno(), + errors=sys.stderr.errors) class TextLogStream(io.TextIOWrapper): - def __init__(self, android_log_write, prio, tag, **kwargs): + def __init__(self, prio, tag, fileno=None, **kwargs): kwargs.setdefault("encoding", "UTF-8") - kwargs.setdefault("line_buffering", True) - super().__init__(BinaryLogStream(android_log_write, prio, tag), **kwargs) - self._CHUNK_SIZE = MAX_BYTES_PER_WRITE + super().__init__(BinaryLogStream(prio, tag, fileno), **kwargs) + self._lock = RLock() + self._pending_bytes = [] + self._pending_bytes_count = 0 def __repr__(self): return f"" @@ -52,21 +59,51 @@ def write(self, s): s = str.__str__(s) # We want to emit one log message per line wherever possible, so split - # the string before sending it to the superclass. Note that - # "".splitlines() == [], so nothing will be logged for an empty string. - for line in s.splitlines(keepends=True): - while line: - super().write(line[:MAX_CHARS_PER_WRITE]) - line = line[MAX_CHARS_PER_WRITE:] + # the string into lines first. Note that "".splitlines() == [], so + # nothing will be logged for an empty string. + with self._lock: + for line in s.splitlines(keepends=True): + while line: + chunk = line[:MAX_CHARS_PER_WRITE] + line = line[MAX_CHARS_PER_WRITE:] + self._write_chunk(chunk) return len(s) + # The size and behavior of TextIOWrapper's buffer is not part of its public + # API, so we handle buffering ourselves to avoid truncation. + def _write_chunk(self, s): + b = s.encode(self.encoding, self.errors) + if self._pending_bytes_count + len(b) > MAX_BYTES_PER_WRITE: + self.flush() + + self._pending_bytes.append(b) + self._pending_bytes_count += len(b) + if ( + self.write_through + or b.endswith(b"\n") + or self._pending_bytes_count > MAX_BYTES_PER_WRITE + ): + self.flush() + + def flush(self): + with self._lock: + self.buffer.write(b"".join(self._pending_bytes)) + self._pending_bytes.clear() + self._pending_bytes_count = 0 + + # Since this is a line-based logging system, line buffering cannot be turned + # off, i.e. a newline always causes a flush. + @property + def line_buffering(self): + return True + class BinaryLogStream(io.RawIOBase): - def __init__(self, android_log_write, prio, tag): - self.android_log_write = android_log_write + def __init__(self, prio, tag, fileno=None): self.prio = prio self.tag = tag + self._fileno = fileno def __repr__(self): return f"" @@ -85,10 +122,57 @@ def write(self, b): # Writing an empty string to the stream should have no effect. if b: - # Encode null bytes using "modified UTF-8" to avoid truncating the - # message. This should not affect the return value, as the caller - # may be expecting it to match the length of the input. - self.android_log_write(self.prio, self.tag, - b.replace(b"\x00", b"\xc0\x80")) - + logcat.write(self.prio, self.tag, b) return len(b) + + # This is needed by the test suite --timeout option, which uses faulthandler. + def fileno(self): + if self._fileno is None: + raise io.UnsupportedOperation("fileno") + return self._fileno + + +# When a large volume of data is written to logcat at once, e.g. when a test +# module fails in --verbose3 mode, there's a risk of overflowing logcat's own +# buffer and losing messages. We avoid this by imposing a rate limit using the +# token bucket algorithm, based on a conservative estimate of how fast `adb +# logcat` can consume data. +MAX_BYTES_PER_SECOND = 1024 * 1024 + +# The logcat buffer size of a device can be determined by running `logcat -g`. +# We set the token bucket size to half of the buffer size of our current minimum +# API level, because other things on the system will be producing messages as +# well. +BUCKET_SIZE = 128 * 1024 + +# https://cs.android.com/android/platform/superproject/+/android-14.0.0_r1:system/logging/liblog/include/log/log_read.h;l=39 +PER_MESSAGE_OVERHEAD = 28 + + +class Logcat: + def __init__(self, android_log_write): + self.android_log_write = android_log_write + self._lock = RLock() + self._bucket_level = 0 + self._prev_write_time = time() + + def write(self, prio, tag, message): + # Encode null bytes using "modified UTF-8" to avoid them truncating the + # message. + message = message.replace(b"\x00", b"\xc0\x80") + + with self._lock: + now = time() + self._bucket_level += ( + (now - self._prev_write_time) * MAX_BYTES_PER_SECOND) + + # If the bucket level is still below zero, the clock must have gone + # backwards, so reset it to zero and continue. + self._bucket_level = max(0, min(self._bucket_level, BUCKET_SIZE)) + self._prev_write_time = now + + self._bucket_level -= PER_MESSAGE_OVERHEAD + len(tag) + len(message) + if self._bucket_level < 0: + sleep(-self._bucket_level / MAX_BYTES_PER_SECOND) + + self.android_log_write(prio, tag, message) diff --git a/Lib/_collections_abc.py b/Lib/_collections_abc.py index 601107d2d86..036254869d5 100644 --- a/Lib/_collections_abc.py +++ b/Lib/_collections_abc.py @@ -85,6 +85,10 @@ def _f(): pass dict_items = type({}.items()) ## misc ## mappingproxy = type(type.__dict__) +def _get_framelocalsproxy(): + return type(sys._getframe().f_locals) +framelocalsproxy = _get_framelocalsproxy() +del _get_framelocalsproxy generator = type((lambda: (yield))()) ## coroutine ## async def _coro(): pass @@ -836,6 +840,7 @@ def __eq__(self, other): __reversed__ = None Mapping.register(mappingproxy) +Mapping.register(framelocalsproxy) class MappingView(Sized): diff --git a/Lib/_pydecimal.py b/Lib/_pydecimal.py index 613123ec7b4..75df3db2624 100644 --- a/Lib/_pydecimal.py +++ b/Lib/_pydecimal.py @@ -424,7 +424,7 @@ def sin(x): # numbers.py for more detail. class Decimal(object): - """Floating point class for decimal arithmetic.""" + """Floating-point class for decimal arithmetic.""" __slots__ = ('_exp','_int','_sign', '_is_special') # Generally, the value of the Decimal instance is given by diff --git a/Lib/_pyrepl/__main__.py b/Lib/_pyrepl/__main__.py index efb6d343cc9..3fa992eee8e 100644 --- a/Lib/_pyrepl/__main__.py +++ b/Lib/_pyrepl/__main__.py @@ -1,3 +1,6 @@ +# Important: don't add things to this module, as they will end up in the REPL's +# default globals. Use _pyrepl.main instead. + if __name__ == "__main__": from .main import interactive_console as __pyrepl_interactive_console __pyrepl_interactive_console() diff --git a/Lib/_pyrepl/_threading_handler.py b/Lib/_pyrepl/_threading_handler.py new file mode 100644 index 00000000000..82f5e8650a2 --- /dev/null +++ b/Lib/_pyrepl/_threading_handler.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +from dataclasses import dataclass, field +import traceback + + +TYPE_CHECKING = False +if TYPE_CHECKING: + from threading import Thread + from types import TracebackType + from typing import Protocol + + class ExceptHookArgs(Protocol): + @property + def exc_type(self) -> type[BaseException]: ... + @property + def exc_value(self) -> BaseException | None: ... + @property + def exc_traceback(self) -> TracebackType | None: ... + @property + def thread(self) -> Thread | None: ... + + class ShowExceptions(Protocol): + def __call__(self) -> int: ... + def add(self, s: str) -> None: ... + + from .reader import Reader + + +def install_threading_hook(reader: Reader) -> None: + import threading + + @dataclass + class ExceptHookHandler: + lock: threading.Lock = field(default_factory=threading.Lock) + messages: list[str] = field(default_factory=list) + + def show(self) -> int: + count = 0 + with self.lock: + if not self.messages: + return 0 + reader.restore() + for tb in self.messages: + count += 1 + if tb: + print(tb) + self.messages.clear() + reader.scheduled_commands.append("ctrl-c") + reader.prepare() + return count + + def add(self, s: str) -> None: + with self.lock: + self.messages.append(s) + + def exception(self, args: ExceptHookArgs) -> None: + lines = traceback.format_exception( + args.exc_type, + args.exc_value, + args.exc_traceback, + colorize=reader.can_colorize, + ) # type: ignore[call-overload] + pre = f"\nException in {args.thread.name}:\n" if args.thread else "\n" + tb = pre + "".join(lines) + self.add(tb) + + def __call__(self) -> int: + return self.show() + + + handler = ExceptHookHandler() + reader.threading_hook = handler + threading.excepthook = handler.exception diff --git a/Lib/_pyrepl/console.py b/Lib/_pyrepl/console.py index a8d3f520340..3e72a56807f 100644 --- a/Lib/_pyrepl/console.py +++ b/Lib/_pyrepl/console.py @@ -161,17 +161,22 @@ def __init__( super().__init__(locals=locals, filename=filename, local_exit=local_exit) # type: ignore[call-arg] self.can_colorize = _colorize.can_colorize() - def showsyntaxerror(self, filename=None): - super().showsyntaxerror(colorize=self.can_colorize) + def showsyntaxerror(self, filename=None, **kwargs): + super().showsyntaxerror(filename=filename, **kwargs) - def showtraceback(self): - super().showtraceback(colorize=self.can_colorize) + def _excepthook(self, typ, value, tb): + import traceback + lines = traceback.format_exception( + typ, value, tb, + colorize=self.can_colorize, + limit=traceback.BUILTIN_EXCEPTION_LIMIT) + self.write(''.join(lines)) def runsource(self, source, filename="", symbol="single"): try: tree = ast.parse(source) except (SyntaxError, OverflowError, ValueError): - self.showsyntaxerror(filename) + self.showsyntaxerror(filename, source=source) return False if tree.body: *_, last_stmt = tree.body @@ -188,10 +193,10 @@ def runsource(self, source, filename="", symbol="single"): f"Try the asyncio REPL ({python} -m asyncio) to use" f" top-level 'await' and run background asyncio tasks." ) - self.showsyntaxerror(filename) + self.showsyntaxerror(filename, source=source) return False except (OverflowError, ValueError): - self.showsyntaxerror(filename) + self.showsyntaxerror(filename, source=source) return False if code is None: diff --git a/Lib/_pyrepl/historical_reader.py b/Lib/_pyrepl/historical_reader.py index dd90912d1d6..5d416f336ad 100644 --- a/Lib/_pyrepl/historical_reader.py +++ b/Lib/_pyrepl/historical_reader.py @@ -71,6 +71,18 @@ def do(self) -> None: r.select_item(r.historyi - 1) +class history_search_backward(commands.Command): + def do(self) -> None: + r = self.reader + r.search_next(forwards=False) + + +class history_search_forward(commands.Command): + def do(self) -> None: + r = self.reader + r.search_next(forwards=True) + + class restore_history(commands.Command): def do(self) -> None: r = self.reader @@ -234,6 +246,8 @@ def __post_init__(self) -> None: isearch_forwards, isearch_backwards, operate_and_get_next, + history_search_backward, + history_search_forward, ]: self.commands[c.__name__] = c self.commands[c.__name__.replace("_", "-")] = c @@ -251,8 +265,10 @@ def collect_keymap(self) -> tuple[tuple[KeySpec, CommandName], ...]: (r"\C-s", "forward-history-isearch"), (r"\M-r", "restore-history"), (r"\M-.", "yank-arg"), - (r"\", "last-history"), - (r"\", "first-history"), + (r"\", "history-search-forward"), + (r"\x1b[6~", "history-search-forward"), + (r"\", "history-search-backward"), + (r"\x1b[5~", "history-search-backward"), ) def select_item(self, i: int) -> None: @@ -264,6 +280,7 @@ def select_item(self, i: int) -> None: self.historyi = i self.pos = len(self.buffer) self.dirty = True + self.last_refresh_cache.invalidated = True def get_item(self, i: int) -> str: if i != len(self.history): @@ -304,6 +321,59 @@ def get_prompt(self, lineno: int, cursor_on_line: bool) -> str: else: return super().get_prompt(lineno, cursor_on_line) + def search_next(self, *, forwards: bool) -> None: + """Search history for the current line contents up to the cursor. + + Selects the first item found. If nothing is under the cursor, any next + item in history is selected. + """ + pos = self.pos + s = self.get_unicode() + history_index = self.historyi + + # In multiline contexts, we're only interested in the current line. + nl_index = s.rfind('\n', 0, pos) + prefix = s[nl_index + 1:pos] + pos = len(prefix) + + match_prefix = len(prefix) + len_item = 0 + if history_index < len(self.history): + len_item = len(self.get_item(history_index)) + if len_item and pos == len_item: + match_prefix = False + elif not pos: + match_prefix = False + + while 1: + if forwards: + out_of_bounds = history_index >= len(self.history) - 1 + else: + out_of_bounds = history_index == 0 + if out_of_bounds: + if forwards and not match_prefix: + self.pos = 0 + self.buffer = [] + self.dirty = True + else: + self.error("not found") + return + + history_index += 1 if forwards else -1 + s = self.get_item(history_index) + + if not match_prefix: + self.select_item(history_index) + return + + len_acc = 0 + for i, line in enumerate(s.splitlines(keepends=True)): + if line.startswith(prefix): + self.select_item(history_index) + self.pos = pos + len_acc + return + len_acc += len(line) + def isearch_next(self) -> None: st = self.isearch_term p = self.pos diff --git a/Lib/_pyrepl/main.py b/Lib/_pyrepl/main.py index 041a4009f42..a6f824dcc4a 100644 --- a/Lib/_pyrepl/main.py +++ b/Lib/_pyrepl/main.py @@ -1,16 +1,32 @@ +import errno import os import sys + CAN_USE_PYREPL: bool -if sys.platform != "win32": - CAN_USE_PYREPL = True +FAIL_REASON: str +try: + if sys.platform == "win32" and sys.getwindowsversion().build < 10586: + raise RuntimeError("Windows 10 TH2 or later required") + if not os.isatty(sys.stdin.fileno()): + raise OSError(errno.ENOTTY, "tty required", "stdin") + from .simple_interact import check + if err := check(): + raise RuntimeError(err) +except Exception as e: + CAN_USE_PYREPL = False + FAIL_REASON = f"warning: can't use pyrepl: {e}" else: - CAN_USE_PYREPL = sys.getwindowsversion().build >= 10586 # Windows 10 TH2 + CAN_USE_PYREPL = True + FAIL_REASON = "" def interactive_console(mainmodule=None, quiet=False, pythonstartup=False): - global CAN_USE_PYREPL if not CAN_USE_PYREPL: + if not os.getenv('PYTHON_BASIC_REPL') and FAIL_REASON: + from .trace import trace + trace(FAIL_REASON) + print(FAIL_REASON, file=sys.stderr) return sys._baserepl() if mainmodule: @@ -20,8 +36,11 @@ def interactive_console(mainmodule=None, quiet=False, pythonstartup=False): namespace = __main__.__dict__ namespace.pop("__pyrepl_interactive_console", None) + # sys._baserepl() above does this internally, we do it here startup_path = os.getenv("PYTHONSTARTUP") if pythonstartup and startup_path: + sys.audit("cpython.run_startup", startup_path) + import tokenize with tokenize.open(startup_path) as f: startup_code = compile(f.read(), startup_path, "exec") @@ -34,22 +53,7 @@ def interactive_console(mainmodule=None, quiet=False, pythonstartup=False): if not hasattr(sys, "ps2"): sys.ps2 = "... " - run_interactive = None - try: - import errno - if not os.isatty(sys.stdin.fileno()): - raise OSError(errno.ENOTTY, "tty required", "stdin") - from .simple_interact import check - if err := check(): - raise RuntimeError(err) - from .simple_interact import run_multiline_interactive_console - run_interactive = run_multiline_interactive_console - except Exception as e: - from .trace import trace - msg = f"warning: can't use pyrepl: {e}" - trace(msg) - print(msg, file=sys.stderr) - CAN_USE_PYREPL = False - if run_interactive is None: - return sys._baserepl() - run_interactive(namespace) + from .console import InteractiveColoredConsole + from .simple_interact import run_multiline_interactive_console + console = InteractiveColoredConsole(namespace, filename="") + run_multiline_interactive_console(console) diff --git a/Lib/_pyrepl/reader.py b/Lib/_pyrepl/reader.py index 20eff916ec4..935c520c5ad 100644 --- a/Lib/_pyrepl/reader.py +++ b/Lib/_pyrepl/reader.py @@ -21,6 +21,8 @@ from __future__ import annotations +import sys + from contextlib import contextmanager from dataclasses import dataclass, field, fields import unicodedata @@ -34,8 +36,7 @@ # types Command = commands.Command -if False: - from .types import Callback, SimpleContextManager, KeySpec, CommandName +from .types import Callback, SimpleContextManager, KeySpec, CommandName def disp_str(buffer: str) -> tuple[str, list[int]]: @@ -52,13 +53,15 @@ def disp_str(buffer: str) -> tuple[str, list[int]]: b: list[int] = [] s: list[str] = [] for c in buffer: - if ord(c) < 128: + if c == '\x1a': + s.append(c) + b.append(2) + elif ord(c) < 128: s.append(c) b.append(1) elif unicodedata.category(c).startswith("C"): c = r"\u%04x" % ord(c) s.append(c) - b.append(str_width(c)) b.extend([0] * (len(c) - 1)) else: s.append(c) @@ -111,7 +114,7 @@ def make_default_commands() -> dict[CommandName, type[Command]]: (r"\C-w", "unix-word-rubout"), (r"\C-x\C-u", "upcase-region"), (r"\C-y", "yank"), - (r"\C-z", "suspend"), + *(() if sys.platform == "win32" else ((r"\C-z", "suspend"), )), (r"\M-b", "backward-word"), (r"\M-c", "capitalize-word"), (r"\M-d", "kill-word"), @@ -131,7 +134,7 @@ def make_default_commands() -> dict[CommandName, type[Command]]: (r"\M-7", "digit-arg"), (r"\M-8", "digit-arg"), (r"\M-9", "digit-arg"), - # (r'\M-\n', 'insert-nl'), + (r"\M-\n", "accept"), ("\\\\", "self-insert"), (r"\x1b[200~", "enable_bracketed_paste"), (r"\x1b[201~", "disable_bracketed_paste"), @@ -147,6 +150,7 @@ def make_default_commands() -> dict[CommandName, type[Command]]: (r"\", "right"), (r"\C-\", "forward-word"), (r"\", "delete"), + (r"\x1b[3~", "delete"), (r"\", "backspace"), (r"\M-\", "backward-kill-word"), (r"\", "end-of-line"), # was 'end' @@ -243,6 +247,7 @@ class Reader: lxy: tuple[int, int] = field(init=False) scheduled_commands: list[str] = field(default_factory=list) can_colorize: bool = False + threading_hook: Callback | None = None ## cached metadata to speed up screen refreshes @dataclass @@ -254,6 +259,7 @@ class RefreshCache: pos: int = field(init=False) cxy: tuple[int, int] = field(init=False) dimensions: tuple[int, int] = field(init=False) + invalidated: bool = False def update_cache(self, reader: Reader, @@ -266,14 +272,19 @@ def update_cache(self, self.pos = reader.pos self.cxy = reader.cxy self.dimensions = reader.console.width, reader.console.height + self.invalidated = False def valid(self, reader: Reader) -> bool: + if self.invalidated: + return False dimensions = reader.console.width, reader.console.height dimensions_changed = dimensions != self.dimensions paste_changed = reader.in_bracketed_paste != self.in_bracketed_paste return not (dimensions_changed or paste_changed) def get_cached_location(self, reader: Reader) -> tuple[int, int]: + if self.invalidated: + raise ValueError("Cache is invalidated") offset = 0 earliest_common_pos = min(reader.pos, self.pos) num_common_lines = len(self.line_end_offsets) @@ -335,7 +346,10 @@ def calc_screen(self) -> list[str]: pos = self.pos pos -= offset + prompt_from_cache = (offset and self.buffer[offset - 1] != "\n") + lines = "".join(self.buffer[offset:]).split("\n") + cursor_found = False lines_beyond_cursor = 0 for ln, line in enumerate(lines, num_common_lines): @@ -349,7 +363,12 @@ def calc_screen(self) -> list[str]: # No need to keep formatting lines. # The console can't show them. break - prompt = self.get_prompt(ln, ll >= pos >= 0) + if prompt_from_cache: + # Only the first line's prompt can come from the cache + prompt_from_cache = False + prompt = "" + else: + prompt = self.get_prompt(ln, ll >= pos >= 0) while "\n" in prompt: pre_prompt, _, prompt = prompt.partition("\n") last_refresh_line_end_offsets.append(offset) @@ -704,6 +723,24 @@ def do_cmd(self, cmd: tuple[str, list[str]]) -> None: self.console.finish() self.finish() + def run_hooks(self) -> None: + threading_hook = self.threading_hook + if threading_hook is None and 'threading' in sys.modules: + from ._threading_handler import install_threading_hook + install_threading_hook(self) + if threading_hook is not None: + try: + threading_hook() + except Exception: + pass + + input_hook = self.console.input_hook + if input_hook: + try: + input_hook() + except Exception: + pass + def handle1(self, block: bool = True) -> bool: """Handle a single event. Wait as long as it takes if block is true (the default), otherwise return False if no event is @@ -714,16 +751,13 @@ def handle1(self, block: bool = True) -> bool: self.dirty = True while True: - input_hook = self.console.input_hook - if input_hook: - input_hook() - # We use the same timeout as in readline.c: 100ms - while not self.console.wait(100): - input_hook() - event = self.console.get_event(block=False) - else: - event = self.console.get_event(block) - if not event: # can only happen if we're not blocking + # We use the same timeout as in readline.c: 100ms + self.run_hooks() + self.console.wait(100) + event = self.console.get_event(block=False) + if not event: + if block: + continue return False translate = True @@ -745,8 +779,7 @@ def handle1(self, block: bool = True) -> bool: if cmd is None: if block: continue - else: - return False + return False self.do_cmd(cmd) return True diff --git a/Lib/_pyrepl/readline.py b/Lib/_pyrepl/readline.py index 28f592d80b1..5e1d3085874 100644 --- a/Lib/_pyrepl/readline.py +++ b/Lib/_pyrepl/readline.py @@ -58,7 +58,7 @@ TYPE_CHECKING = False if TYPE_CHECKING: - from typing import Any + from typing import Any, Mapping MoreLinesCallable = Callable[[str], bool] @@ -249,7 +249,7 @@ def _should_auto_indent(buffer: list[str], pos: int) -> bool: while pos > 0: pos -= 1 if last_char is None: - if buffer[pos] not in " \t\n": # ignore whitespaces + if buffer[pos] not in " \t\n#": # ignore whitespaces and comments last_char = buffer[pos] else: # even if we found a non-whitespace character before @@ -342,10 +342,10 @@ def do(self) -> None: class _ReadlineWrapper: f_in: int = -1 f_out: int = -1 - reader: ReadlineAlikeReader | None = None + reader: ReadlineAlikeReader | None = field(default=None, repr=False) saved_history_length: int = -1 startup_hook: Callback | None = None - config: ReadlineConfig = field(default_factory=ReadlineConfig) + config: ReadlineConfig = field(default_factory=ReadlineConfig, repr=False) def __post_init__(self) -> None: if self.f_in == -1: @@ -365,8 +365,12 @@ def input(self, prompt: object = "") -> str: except _error: assert raw_input is not None return raw_input(prompt) - reader.ps1 = str(prompt) - return reader.readline(startup_hook=self.startup_hook) + prompt_str = str(prompt) + reader.ps1 = prompt_str + sys.audit("builtins.input", prompt_str) + result = reader.readline(startup_hook=self.startup_hook) + sys.audit("builtins.input/result", result) + return result def multiline_input(self, more_lines: MoreLinesCallable, ps1: str, ps2: str) -> str: """Read an input on possibly multiple lines, asking for more @@ -423,18 +427,22 @@ def read_history_file(self, filename: str = gethistoryfile()) -> None: history = self.get_reader().history with open(os.path.expanduser(filename), 'rb') as f: - lines = [line.decode('utf-8', errors='replace') for line in f.read().split(b'\n')] + is_editline = f.readline().startswith(b"_HiStOrY_V2_") + if is_editline: + encoding = "unicode-escape" + else: + f.seek(0) + encoding = "utf-8" + + lines = [line.decode(encoding, errors='replace') for line in f.read().split(b'\n')] buffer = [] for line in lines: - # Ignore readline history file header - if line.startswith("_HiStOrY_V2_"): - continue if line.endswith("\r"): buffer.append(line+'\n') else: line = self._histline(line) if buffer: - line = "".join(buffer).replace("\r", "") + line + line = self._histline("".join(buffer).replace("\r", "") + line) del buffer[:] if line: history.append(line) @@ -479,15 +487,14 @@ def add_history(self, line: str) -> None: def set_startup_hook(self, function: Callback | None = None) -> None: self.startup_hook = function - def get_line_buffer(self) -> bytes: - buf_str = self.get_reader().get_unicode() - return buf_str.encode(ENCODING) + def get_line_buffer(self) -> str: + return self.get_reader().get_unicode() def _get_idxs(self) -> tuple[int, int]: start = cursor = self.get_reader().pos buf = self.get_line_buffer() for i in range(cursor - 1, -1, -1): - if str(buf[i]) in self.get_completer_delims(): + if buf[i] in self.get_completer_delims(): break start = i return start, cursor @@ -559,7 +566,7 @@ def stub(*args: object, **kwds: object) -> None: # ____________________________________________________________ -def _setup(namespace: dict[str, Any]) -> None: +def _setup(namespace: Mapping[str, Any]) -> None: global raw_input if raw_input is not None: return # don't run _setup twice @@ -575,7 +582,9 @@ def _setup(namespace: dict[str, Any]) -> None: _wrapper.f_in = f_in _wrapper.f_out = f_out - # set up namespace in rlcompleter + # set up namespace in rlcompleter, which requires it to be a bona fide dict + if not isinstance(namespace, dict): + namespace = dict(namespace) _wrapper.config.readline_completer = RLCompleter(namespace).complete # this is not really what readline.c does. Better than nothing I guess diff --git a/Lib/_pyrepl/simple_interact.py b/Lib/_pyrepl/simple_interact.py index bc16c1f6a23..342a4b58bfd 100644 --- a/Lib/_pyrepl/simple_interact.py +++ b/Lib/_pyrepl/simple_interact.py @@ -27,12 +27,11 @@ import _sitebuiltins import linecache -import builtins +import functools +import os import sys import code -from types import ModuleType -from .console import InteractiveColoredConsole from .readline import _get_reader, multiline_input TYPE_CHECKING = False @@ -52,7 +51,9 @@ def check() -> str: try: _get_reader() except _error as e: - return str(e) or repr(e) or "unknown error" + if term := os.environ.get("TERM", ""): + term = f"; TERM={term}" + return str(str(e) or repr(e) or "unknown error") + term return "" @@ -78,24 +79,40 @@ def _clear_screen(): "copyright": _sitebuiltins._Printer('copyright', sys.copyright), "help": "help", "clear": _clear_screen, + "\x1a": _sitebuiltins.Quitter('\x1a', ''), } +def _more_lines(console: code.InteractiveConsole, unicodetext: str) -> bool: + # ooh, look at the hack: + src = _strip_final_indent(unicodetext) + try: + code = console.compile(src, "", "single") + except (OverflowError, SyntaxError, ValueError): + lines = src.splitlines(keepends=True) + if len(lines) == 1: + return False + + last_line = lines[-1] + was_indented = last_line.startswith((" ", "\t")) + not_empty = last_line.strip() != "" + incomplete = not last_line.endswith("\n") + return (was_indented or not_empty) and incomplete + else: + return code is None + + def run_multiline_interactive_console( - namespace: dict[str, Any], + console: code.InteractiveConsole, + *, future_flags: int = 0, - console: code.InteractiveConsole | None = None, ) -> None: from .readline import _setup - _setup(namespace) - - if console is None: - console = InteractiveColoredConsole( - namespace, filename="" - ) + _setup(console.locals) if future_flags: console.compile.compiler.flags |= future_flags + more_lines = functools.partial(_more_lines, console) input_n = 0 def maybe_run_command(statement: str) -> bool: @@ -121,16 +138,6 @@ def maybe_run_command(statement: str) -> bool: return False - def more_lines(unicodetext: str) -> bool: - # ooh, look at the hack: - src = _strip_final_indent(unicodetext) - try: - code = console.compile(src, "", "single") - except (OverflowError, SyntaxError, ValueError): - return False - else: - return code is None - while 1: try: try: @@ -154,7 +161,14 @@ def more_lines(unicodetext: str) -> bool: assert not more input_n += 1 except KeyboardInterrupt: - console.write("KeyboardInterrupt\n") + r = _get_reader() + if r.input_trans is r.isearch_trans: + r.do_cmd(("isearch-end", [""])) + r.pos = len(r.get_unicode()) + r.dirty = True + r.refresh() + r.in_bracketed_paste = False + console.write("\nKeyboardInterrupt\n") console.resetbuffer() except MemoryError: console.write("\nMemoryError\n") diff --git a/Lib/_pyrepl/unix_console.py b/Lib/_pyrepl/unix_console.py index c4dedd97d1e..09b5094f986 100644 --- a/Lib/_pyrepl/unix_console.py +++ b/Lib/_pyrepl/unix_console.py @@ -29,6 +29,7 @@ import struct import termios import time +import platform from fcntl import ioctl from . import curses @@ -198,8 +199,14 @@ def _my_getstr(cap: str, optional: bool = False) -> bytes | None: self.event_queue = EventQueue(self.input_fd, self.encoding) self.cursor_visible = 1 + def more_in_buffer(self) -> bool: + return bool( + self.input_buffer + and self.input_buffer_pos < len(self.input_buffer) + ) + def __read(self, n: int) -> bytes: - if not self.input_buffer or self.input_buffer_pos >= len(self.input_buffer): + if not self.more_in_buffer(): self.input_buffer = os.read(self.input_fd, 10000) ret = self.input_buffer[self.input_buffer_pos : self.input_buffer_pos + n] @@ -334,6 +341,10 @@ def prepare(self): raw.cc[termios.VTIME] = 0 tcsetattr(self.input_fd, termios.TCSADRAIN, raw) + # In macOS terminal we need to deactivate line wrap via ANSI escape code + if platform.system() == "Darwin" and os.getenv("TERM_PROGRAM") == "Apple_Terminal": + os.write(self.output_fd, b"\033[?7l") + self.screen = [] self.height, self.width = self.getheightwidth() @@ -362,6 +373,9 @@ def restore(self): self.flushoutput() tcsetattr(self.input_fd, termios.TCSADRAIN, self.__svtermstate) + if platform.system() == "Darwin" and os.getenv("TERM_PROGRAM") == "Apple_Terminal": + os.write(self.output_fd, b"\033[?7h") + if hasattr(self, "old_sigwinch"): signal.signal(signal.SIGWINCH, self.old_sigwinch) del self.old_sigwinch @@ -383,6 +397,9 @@ def get_event(self, block: bool = True) -> Event | None: Returns: - Event: Event object from the event queue. """ + if not block and not self.wait(timeout=0): + return None + while self.event_queue.empty(): while True: try: @@ -397,15 +414,17 @@ def get_event(self, block: bool = True) -> Event | None: raise else: break - if not block: - break return self.event_queue.get() def wait(self, timeout: float | None = None) -> bool: """ Wait for events on the console. """ - return bool(self.pollob.poll(timeout)) + return ( + not self.event_queue.empty() + or self.more_in_buffer() + or bool(self.pollob.poll(timeout)) + ) def set_cursor_vis(self, visible): """ diff --git a/Lib/_pyrepl/utils.py b/Lib/_pyrepl/utils.py index 20dbb1f7e17..0f36083b6ff 100644 --- a/Lib/_pyrepl/utils.py +++ b/Lib/_pyrepl/utils.py @@ -21,4 +21,5 @@ def wlen(s: str) -> int: length = sum(str_width(i) for i in s) # remove lengths of any escape sequences sequence = ANSI_ESCAPE_SEQUENCE.findall(s) - return length - sum(len(i) for i in sequence) + ctrl_z_cnt = s.count('\x1a') + return length - sum(len(i) for i in sequence) + ctrl_z_cnt diff --git a/Lib/_pyrepl/windows_console.py b/Lib/_pyrepl/windows_console.py index 9e97b1524e2..ea6a9b4e8ad 100644 --- a/Lib/_pyrepl/windows_console.py +++ b/Lib/_pyrepl/windows_console.py @@ -63,6 +63,7 @@ def __init__(self, err: int | None, descr: str | None = None) -> None: if TYPE_CHECKING: from typing import IO +# Virtual-Key Codes: https://learn.microsoft.com/en-us/windows/win32/inputdev/virtual-key-codes VK_MAP: dict[int, str] = { 0x23: "end", # VK_END 0x24: "home", # VK_HOME @@ -87,10 +88,10 @@ def __init__(self, err: int | None, descr: str | None = None) -> None: 0x7D: "f14", # VK_F14 0x7E: "f15", # VK_F15 0x7F: "f16", # VK_F16 - 0x79: "f17", # VK_F17 - 0x80: "f18", # VK_F18 - 0x81: "f19", # VK_F19 - 0x82: "f20", # VK_F20 + 0x80: "f17", # VK_F17 + 0x81: "f18", # VK_F18 + 0x82: "f19", # VK_F19 + 0x83: "f20", # VK_F20 } # Console escape codes: https://learn.microsoft.com/en-us/windows/console/console-virtual-terminal-sequences @@ -253,7 +254,7 @@ def __write_changed_line( else: self.__posxy = wlen(newline), y - if "\x1b" in newline or y != self.__posxy[1]: + if "\x1b" in newline or y != self.__posxy[1] or '\x1a' in newline: # ANSI escape characters are present, so we can't assume # anything about the position of the cursor. Moving the cursor # to the left margin should work to get to a known position. @@ -291,6 +292,9 @@ def _disable_blinking(self): self.__write("\x1b[?12l") def __write(self, text: str) -> None: + if "\x1a" in text: + text = ''.join(["^Z" if x == '\x1a' else x for x in text]) + if self.out is not None: self.out.write(text.encode(self.encoding, "replace")) self.out.flush() @@ -367,15 +371,19 @@ def _getscrollbacksize(self) -> int: return info.srWindow.Bottom # type: ignore[no-any-return] - def _read_input(self) -> INPUT_RECORD | None: + def _read_input(self, block: bool = True) -> INPUT_RECORD | None: + if not block: + events = DWORD() + if not GetNumberOfConsoleInputEvents(InHandle, events): + raise WinError(GetLastError()) + if not events.value: + return None + rec = INPUT_RECORD() read = DWORD() if not ReadConsoleInput(InHandle, rec, 1, read): raise WinError(GetLastError()) - if read.value == 0: - return None - return rec def get_event(self, block: bool = True) -> Event | None: @@ -386,10 +394,8 @@ def get_event(self, block: bool = True) -> Event | None: return self.event_queue.pop() while True: - rec = self._read_input() + rec = self._read_input(block) if rec is None: - if block: - continue return None if rec.EventType == WINDOW_BUFFER_SIZE_EVENT: @@ -460,8 +466,8 @@ def flushoutput(self) -> None: def forgetinput(self) -> None: """Forget all pending, but not yet processed input.""" - while self._read_input() is not None: - pass + if not FlushConsoleInputBuffer(InHandle): + raise WinError(GetLastError()) def getpending(self) -> Event: """Return the characters that have been typed but not yet @@ -475,7 +481,7 @@ def wait(self, timeout: float | None) -> bool: while True: if msvcrt.kbhit(): # type: ignore[attr-defined] return True - if timeout and time.time() - start_time > timeout: + if timeout and time.time() - start_time > timeout / 1000: return False time.sleep(0.01) @@ -586,6 +592,14 @@ class INPUT_RECORD(Structure): ReadConsoleInput.argtypes = [HANDLE, POINTER(INPUT_RECORD), DWORD, POINTER(DWORD)] ReadConsoleInput.restype = BOOL + GetNumberOfConsoleInputEvents = _KERNEL32.GetNumberOfConsoleInputEvents + GetNumberOfConsoleInputEvents.argtypes = [HANDLE, POINTER(DWORD)] + GetNumberOfConsoleInputEvents.restype = BOOL + + FlushConsoleInputBuffer = _KERNEL32.FlushConsoleInputBuffer + FlushConsoleInputBuffer.argtypes = [HANDLE] + FlushConsoleInputBuffer.restype = BOOL + OutHandle = GetStdHandle(STD_OUTPUT_HANDLE) InHandle = GetStdHandle(STD_INPUT_HANDLE) else: @@ -598,5 +612,7 @@ def _win_only(*args, **kwargs): ScrollConsoleScreenBuffer = _win_only SetConsoleMode = _win_only ReadConsoleInput = _win_only + GetNumberOfConsoleInputEvents = _win_only + FlushConsoleInputBuffer = _win_only OutHandle = 0 InHandle = 0 diff --git a/Lib/argparse.py b/Lib/argparse.py index 7e5e3129dbd..b0f96565eba 100644 --- a/Lib/argparse.py +++ b/Lib/argparse.py @@ -261,13 +261,12 @@ def add_argument(self, action): # find all invocations get_invocation = self._format_action_invocation - invocations = [get_invocation(action)] + invocation_lengths = [len(get_invocation(action)) + self._current_indent] for subaction in self._iter_indented_subactions(action): - invocations.append(get_invocation(subaction)) + invocation_lengths.append(len(get_invocation(subaction)) + self._current_indent) # update the maximum item length - invocation_length = max(map(len, invocations)) - action_length = invocation_length + self._current_indent + action_length = max(invocation_lengths) self._action_max_length = max(self._action_max_length, action_length) @@ -447,15 +446,24 @@ def _get_actions_usage_parts(self, actions, groups): parts.append(part) # group mutually exclusive actions + inserted_separators_indices = set() for start, end in sorted(inserts, reverse=True): group = inserts[start, end] group_parts = [item for item in parts[start:end] if item is not None] + group_size = len(group_parts) if group.required: - open, close = "()" if len(group_parts) > 1 else ("", "") + open, close = "()" if group_size > 1 else ("", "") else: open, close = "[]" - parts[start] = open + " | ".join(group_parts) + close - for i in range(start + 1, end): + group_parts[0] = open + group_parts[0] + group_parts[-1] = group_parts[-1] + close + for i, part in enumerate(group_parts[:-1], start=start): + # insert a separator if not already done in a nested group + if i not in inserted_separators_indices: + parts[i] = part + ' |' + inserted_separators_indices.add(i) + parts[start + group_size - 1] = group_parts[-1] + for i in range(start + group_size, end): parts[i] = None # return the usage parts @@ -1825,8 +1833,8 @@ def add_subparsers(self, **kwargs): kwargs.setdefault('parser_class', type(self)) if 'title' in kwargs or 'description' in kwargs: - title = _(kwargs.pop('title', 'subcommands')) - description = _(kwargs.pop('description', None)) + title = kwargs.pop('title', _('subcommands')) + description = kwargs.pop('description', None) self._subparsers = self.add_argument_group(title, description) else: self._subparsers = self._positionals diff --git a/Lib/asyncio/__main__.py b/Lib/asyncio/__main__.py index 91fff9aaee3..5120140e061 100644 --- a/Lib/asyncio/__main__.py +++ b/Lib/asyncio/__main__.py @@ -91,35 +91,24 @@ def run(self): console.write(banner) if startup_path := os.getenv("PYTHONSTARTUP"): + sys.audit("cpython.run_startup", startup_path) + import tokenize with tokenize.open(startup_path) as f: startup_code = compile(f.read(), startup_path, "exec") exec(startup_code, console.locals) ps1 = getattr(sys, "ps1", ">>> ") - if can_colorize(): + if can_colorize() and CAN_USE_PYREPL: ps1 = f"{ANSIColors.BOLD_MAGENTA}{ps1}{ANSIColors.RESET}" console.write(f"{ps1}import asyncio\n") - try: - import errno - if os.getenv("PYTHON_BASIC_REPL"): - raise RuntimeError("user environment requested basic REPL") - if not os.isatty(sys.stdin.fileno()): - raise OSError(errno.ENOTTY, "tty required", "stdin") - - # This import will fail on operating systems with no termios. + if CAN_USE_PYREPL: from _pyrepl.simple_interact import ( - check, run_multiline_interactive_console, ) - if err := check(): - raise RuntimeError(err) - except Exception as e: - console.interact(banner="", exitmsg=exit_message) - else: try: - run_multiline_interactive_console(console=console) + run_multiline_interactive_console(console) except SystemExit: # expected via the `exit` and `quit` commands pass @@ -128,6 +117,8 @@ def run(self): console.showtraceback() console.write("Internal error, ") return_code = 1 + else: + console.interact(banner="", exitmsg="") finally: warnings.filterwarnings( 'ignore', @@ -136,9 +127,23 @@ def run(self): loop.call_soon_threadsafe(loop.stop) + def interrupt(self) -> None: + if not CAN_USE_PYREPL: + return + + from _pyrepl.simple_interact import _get_reader + r = _get_reader() + if r.threading_hook is not None: + r.threading_hook.add("") # type: ignore + if __name__ == '__main__': - CAN_USE_PYREPL = True + sys.audit("cpython.run_stdin") + + if os.getenv('PYTHON_BASIC_REPL'): + CAN_USE_PYREPL = False + else: + from _pyrepl.main import CAN_USE_PYREPL return_code = 0 loop = asyncio.new_event_loop() @@ -163,6 +168,7 @@ def run(self): interactive_hook = getattr(sys, "__interactivehook__", None) if interactive_hook is not None: + sys.audit("cpython.run_interactivehook", interactive_hook) interactive_hook() if interactive_hook is site.register_readline: @@ -187,6 +193,7 @@ def run(self): keyboard_interrupted = True if repl_future and not repl_future.done(): repl_future.cancel() + repl_thread.interrupt() continue else: break diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index f0e690b61a7..e4a39f4d345 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -1028,8 +1028,7 @@ async def _connect_sock(self, exceptions, addr_info, local_addr_infos=None): except OSError as exc: msg = ( f'error while attempting to bind on ' - f'address {laddr!r}: ' - f'{exc.strerror.lower()}' + f'address {laddr!r}: {str(exc).lower()}' ) exc = OSError(exc.errno, msg) my_exceptions.append(exc) @@ -1599,7 +1598,7 @@ async def create_server( except OSError as err: msg = ('error while attempting ' 'to bind on address %r: %s' - % (sa, err.strerror.lower())) + % (sa, str(err).lower())) if err.errno == errno.EADDRNOTAVAIL: # Assume the family is not enabled (bpo-30945) sockets.pop() diff --git a/Lib/asyncio/base_subprocess.py b/Lib/asyncio/base_subprocess.py index 6dbde2b696a..9c2ba679ce2 100644 --- a/Lib/asyncio/base_subprocess.py +++ b/Lib/asyncio/base_subprocess.py @@ -1,6 +1,9 @@ import collections import subprocess import warnings +import os +import signal +import sys from . import protocols from . import transports @@ -142,17 +145,31 @@ def _check_proc(self): if self._proc is None: raise ProcessLookupError() - def send_signal(self, signal): - self._check_proc() - self._proc.send_signal(signal) + if sys.platform == 'win32': + def send_signal(self, signal): + self._check_proc() + self._proc.send_signal(signal) + + def terminate(self): + self._check_proc() + self._proc.terminate() + + def kill(self): + self._check_proc() + self._proc.kill() + else: + def send_signal(self, signal): + self._check_proc() + try: + os.kill(self._proc.pid, signal) + except ProcessLookupError: + pass - def terminate(self): - self._check_proc() - self._proc.terminate() + def terminate(self): + self.send_signal(signal.SIGTERM) - def kill(self): - self._check_proc() - self._proc.kill() + def kill(self): + self.send_signal(signal.SIGKILL) async def _connect_pipes(self, waiter): try: diff --git a/Lib/code.py b/Lib/code.py index b93902ccf54..a70d8ccb29e 100644 --- a/Lib/code.py +++ b/Lib/code.py @@ -13,6 +13,7 @@ __all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact", "compile_command"] + class InteractiveInterpreter: """Base class for InteractiveConsole. @@ -64,7 +65,7 @@ def runsource(self, source, filename="", symbol="single"): code = self.compile(source, filename, symbol) except (OverflowError, SyntaxError, ValueError): # Case 1 - self.showsyntaxerror(filename) + self.showsyntaxerror(filename, source=source) return False if code is None: @@ -106,32 +107,16 @@ def showsyntaxerror(self, filename=None, **kwargs): The output is written by self.write(), below. """ - colorize = kwargs.pop('colorize', False) - type, value, tb = sys.exc_info() - sys.last_exc = value - sys.last_type = type - sys.last_value = value - sys.last_traceback = tb - if filename and type is SyntaxError: - # Work hard to stuff the correct filename in the exception - try: - msg, (dummy_filename, lineno, offset, line) = value.args - except ValueError: - # Not the format we expect; leave it alone - pass - else: - # Stuff in the right filename - value = SyntaxError(msg, (filename, lineno, offset, line)) - sys.last_exc = sys.last_value = value - if sys.excepthook is sys.__excepthook__: - lines = traceback.format_exception_only(type, value, colorize=colorize) - self.write(''.join(lines)) - else: - # If someone has set sys.excepthook, we let that take precedence - # over self.write - sys.excepthook(type, value, tb) + try: + typ, value, tb = sys.exc_info() + if filename and issubclass(typ, SyntaxError): + value.filename = filename + source = kwargs.pop('source', "") + self._showtraceback(typ, value, None, source) + finally: + typ = value = tb = None - def showtraceback(self, **kwargs): + def showtraceback(self): """Display the exception that just occurred. We remove the first stack item because it is our own code. @@ -139,20 +124,45 @@ def showtraceback(self, **kwargs): The output is written by self.write(), below. """ - colorize = kwargs.pop('colorize', False) - sys.last_type, sys.last_value, last_tb = ei = sys.exc_info() - sys.last_traceback = last_tb - sys.last_exc = ei[1] try: - lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next, colorize=colorize) - if sys.excepthook is sys.__excepthook__: - self.write(''.join(lines)) - else: - # If someone has set sys.excepthook, we let that take precedence - # over self.write - sys.excepthook(ei[0], ei[1], last_tb) + typ, value, tb = sys.exc_info() + self._showtraceback(typ, value, tb.tb_next, '') finally: - last_tb = ei = None + typ = value = tb = None + + def _showtraceback(self, typ, value, tb, source): + sys.last_type = typ + sys.last_traceback = tb + value = value.with_traceback(tb) + # Set the line of text that the exception refers to + lines = source.splitlines() + if (source and typ is SyntaxError + and not value.text and len(lines) >= value.lineno): + value.text = lines[value.lineno - 1] + sys.last_exc = sys.last_value = value = value.with_traceback(tb) + if sys.excepthook is sys.__excepthook__: + self._excepthook(typ, value, tb) + else: + # If someone has set sys.excepthook, we let that take precedence + # over self.write + try: + sys.excepthook(typ, value, tb) + except SystemExit: + raise + except BaseException as e: + e.__context__ = None + e = e.with_traceback(e.__traceback__.tb_next) + print('Error in sys.excepthook:', file=sys.stderr) + sys.__excepthook__(type(e), e, e.__traceback__) + print(file=sys.stderr) + print('Original exception was:', file=sys.stderr) + sys.__excepthook__(typ, value, tb) + + def _excepthook(self, typ, value, tb): + # This method is being overwritten in + # _pyrepl.console.InteractiveColoredConsole + lines = traceback.format_exception(typ, value, tb) + self.write(''.join(lines)) def write(self, data): """Write a string. @@ -366,7 +376,7 @@ def interact(banner=None, readfunc=None, local=None, exitmsg=None, local_exit=Fa parser = argparse.ArgumentParser() parser.add_argument('-q', action='store_true', - help="don't print version and copyright messages") + help="don't print version and copyright messages") args = parser.parse_args() if args.q or sys.flags.quiet: banner = '' diff --git a/Lib/collections/abc.py b/Lib/collections/abc.py index 86ca8b8a841..034ba377a0d 100644 --- a/Lib/collections/abc.py +++ b/Lib/collections/abc.py @@ -1,3 +1,3 @@ -from _collections_abc import * -from _collections_abc import __all__ -from _collections_abc import _CallableGenericAlias +import _collections_abc +import sys +sys.modules[__name__] = _collections_abc diff --git a/Lib/colorsys.py b/Lib/colorsys.py index bc897bd0f99..e97f91718a3 100644 --- a/Lib/colorsys.py +++ b/Lib/colorsys.py @@ -24,7 +24,7 @@ __all__ = ["rgb_to_yiq","yiq_to_rgb","rgb_to_hls","hls_to_rgb", "rgb_to_hsv","hsv_to_rgb"] -# Some floating point constants +# Some floating-point constants ONE_THIRD = 1.0/3.0 ONE_SIXTH = 1.0/6.0 diff --git a/Lib/copy.py b/Lib/copy.py index a69bc4e78c2..2a4606246aa 100644 --- a/Lib/copy.py +++ b/Lib/copy.py @@ -4,8 +4,9 @@ import copy - x = copy.copy(y) # make a shallow copy of y - x = copy.deepcopy(y) # make a deep copy of y + x = copy.copy(y) # make a shallow copy of y + x = copy.deepcopy(y) # make a deep copy of y + x = copy.replace(y, a=1, b=2) # new object with fields replaced, as defined by `__replace__` For module specific errors, copy.Error is raised. @@ -56,7 +57,7 @@ class Error(Exception): pass error = Error # backward compatibility -__all__ = ["Error", "copy", "deepcopy"] +__all__ = ["Error", "copy", "deepcopy", "replace"] def copy(x): """Shallow copy operation on arbitrary Python objects. diff --git a/Lib/csv.py b/Lib/csv.py index 75e35b23236..cd202659873 100644 --- a/Lib/csv.py +++ b/Lib/csv.py @@ -47,7 +47,7 @@ class excel: field contains either the quotechar or the delimiter csv.QUOTE_ALL means that quotes are always placed around fields. csv.QUOTE_NONNUMERIC means that quotes are always placed around - fields which do not parse as integers or floating point + fields which do not parse as integers or floating-point numbers. csv.QUOTE_STRINGS means that quotes are always placed around fields which are strings. Note that the Python value None diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py index b7ee46d664a..8261773cef9 100644 --- a/Lib/ctypes/__init__.py +++ b/Lib/ctypes/__init__.py @@ -314,8 +314,6 @@ def SetPointerType(pointer, cls): del _pointer_type_cache[id(pointer)] def ARRAY(typ, len): - import warnings - warnings._deprecated("ctypes.ARRAY", remove=(3, 15)) return typ * len ################################################################ diff --git a/Lib/decimal.py b/Lib/decimal.py index d61e374b9f9..ee3147f5dde 100644 --- a/Lib/decimal.py +++ b/Lib/decimal.py @@ -1,6 +1,6 @@ -"""Decimal fixed point and floating point arithmetic. +"""Decimal fixed-point and floating-point arithmetic. -This is an implementation of decimal floating point arithmetic based on +This is an implementation of decimal floating-point arithmetic based on the General Decimal Arithmetic Specification: http://speleotrove.com/decimal/decarith.html @@ -103,6 +103,7 @@ from _decimal import __version__ from _decimal import __libmpdec_version__ except ImportError: - from _pydecimal import * - from _pydecimal import __version__ - from _pydecimal import __libmpdec_version__ + import _pydecimal + import sys + _pydecimal.__doc__ = __doc__ + sys.modules[__name__] = _pydecimal diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py index ab3c3031ef5..ec2215a5e5f 100644 --- a/Lib/email/_header_value_parser.py +++ b/Lib/email/_header_value_parser.py @@ -92,6 +92,8 @@ ASPECIALS = TSPECIALS | set("*'%") ATTRIBUTE_ENDS = ASPECIALS | WSP EXTENDED_ATTRIBUTE_ENDS = ATTRIBUTE_ENDS - set('%') +NLSET = {'\n', '\r'} +SPECIALSNL = SPECIALS | NLSET def quote_string(value): return '"'+str(value).replace('\\', '\\\\').replace('"', r'\"')+'"' @@ -2802,9 +2804,13 @@ def _refold_parse_tree(parse_tree, *, policy): wrap_as_ew_blocked -= 1 continue tstr = str(part) - if part.token_type == 'ptext' and set(tstr) & SPECIALS: - # Encode if tstr contains special characters. - want_encoding = True + if not want_encoding: + if part.token_type == 'ptext': + # Encode if tstr contains special characters. + want_encoding = not SPECIALSNL.isdisjoint(tstr) + else: + # Encode if tstr contains newlines. + want_encoding = not NLSET.isdisjoint(tstr) try: tstr.encode(encoding) charset = encoding @@ -2988,6 +2994,7 @@ def _fold_as_ew(to_encode, lines, maxlen, last_ew, ew_combine_allowed, charset, excess = len(encoded_word) - remaining_space lines[-1] += encoded_word to_encode = to_encode[len(to_encode_word):] + leading_whitespace = '' if to_encode: lines.append(' ') diff --git a/Lib/email/_policybase.py b/Lib/email/_policybase.py index 2ec54fbabae..5f9aa9fb091 100644 --- a/Lib/email/_policybase.py +++ b/Lib/email/_policybase.py @@ -157,6 +157,13 @@ class Policy(_PolicyBase, metaclass=abc.ABCMeta): message_factory -- the class to use to create new message objects. If the value is None, the default is Message. + verify_generated_headers + -- if true, the generator verifies that each header + they are properly folded, so that a parser won't + treat it as multiple headers, start-of-body, or + part of another header. + This is a check against custom Header & fold() + implementations. """ raise_on_defect = False @@ -165,6 +172,7 @@ class Policy(_PolicyBase, metaclass=abc.ABCMeta): max_line_length = 78 mangle_from_ = False message_factory = None + verify_generated_headers = True def handle_defect(self, obj, defect): """Based on policy, either raise defect or call register_defect. diff --git a/Lib/email/errors.py b/Lib/email/errors.py index 3ad00565549..02aa5eced6a 100644 --- a/Lib/email/errors.py +++ b/Lib/email/errors.py @@ -29,6 +29,10 @@ class CharsetError(MessageError): """An illegal charset was given.""" +class HeaderWriteError(MessageError): + """Error while writing headers.""" + + # These are parsing defects which the parser was able to work around. class MessageDefect(ValueError): """Base class for a message defect.""" diff --git a/Lib/email/generator.py b/Lib/email/generator.py index c8056ad47ba..47b9df8f4e6 100644 --- a/Lib/email/generator.py +++ b/Lib/email/generator.py @@ -14,12 +14,14 @@ from copy import deepcopy from io import StringIO, BytesIO from email.utils import _has_surrogates +from email.errors import HeaderWriteError UNDERSCORE = '_' NL = '\n' # XXX: no longer used by the code below. NLCRE = re.compile(r'\r\n|\r|\n') fcre = re.compile(r'^From ', re.MULTILINE) +NEWLINE_WITHOUT_FWSP = re.compile(r'\r\n[^ \t]|\r[^ \n\t]|\n[^ \t]') class Generator: @@ -222,7 +224,16 @@ def _dispatch(self, msg): def _write_headers(self, msg): for h, v in msg.raw_items(): - self.write(self.policy.fold(h, v)) + folded = self.policy.fold(h, v) + if self.policy.verify_generated_headers: + linesep = self.policy.linesep + if not folded.endswith(self.policy.linesep): + raise HeaderWriteError( + f'folded header does not end with {linesep!r}: {folded!r}') + if NEWLINE_WITHOUT_FWSP.search(folded.removesuffix(linesep)): + raise HeaderWriteError( + f'folded header contains newline: {folded!r}') + self.write(folded) # A blank line always separates headers from body self.write(self._NL) diff --git a/Lib/email/utils.py b/Lib/email/utils.py index 103cef61a83..e42674fa4f3 100644 --- a/Lib/email/utils.py +++ b/Lib/email/utils.py @@ -241,7 +241,7 @@ def formatdate(timeval=None, localtime=False, usegmt=False): Fri, 09 Nov 2001 01:08:47 -0000 - Optional timeval if given is a floating point time value as accepted by + Optional timeval if given is a floating-point time value as accepted by gmtime() and localtime(), otherwise the current time is used. Optional localtime is a flag that when True, interprets timeval, and diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 23fe7a82eb0..c5350df2704 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -10,7 +10,7 @@ __all__ = ["version", "bootstrap"] -_PIP_VERSION = "24.1.1" +_PIP_VERSION = "24.2" # Directory of system wheel packages. Some Linux distribution packaging # policies recommend against bundling dependencies. For example, Fedora diff --git a/Lib/ensurepip/_bundled/pip-24.1.1-py3-none-any.whl b/Lib/ensurepip/_bundled/pip-24.2-py3-none-any.whl similarity index 76% rename from Lib/ensurepip/_bundled/pip-24.1.1-py3-none-any.whl rename to Lib/ensurepip/_bundled/pip-24.2-py3-none-any.whl index e27568eb8b3..542cdd1e728 100644 Binary files a/Lib/ensurepip/_bundled/pip-24.1.1-py3-none-any.whl and b/Lib/ensurepip/_bundled/pip-24.2-py3-none-any.whl differ diff --git a/Lib/filecmp.py b/Lib/filecmp.py index 6ffc71fc059..c5b8d854d77 100644 --- a/Lib/filecmp.py +++ b/Lib/filecmp.py @@ -88,7 +88,7 @@ def _do_cmp(f1, f2): class dircmp: """A class that manages the comparison of 2 directories. - dircmp(a, b, ignore=None, hide=None, shallow=True) + dircmp(a, b, ignore=None, hide=None, *, shallow=True) A and B are directories. IGNORE is a list of names to ignore, defaults to DEFAULT_IGNORES. @@ -124,7 +124,7 @@ class dircmp: in common_dirs. """ - def __init__(self, a, b, ignore=None, hide=None, shallow=True): # Initialize + def __init__(self, a, b, ignore=None, hide=None, *, shallow=True): # Initialize self.left = a self.right = b if hide is None: @@ -164,12 +164,14 @@ def phase2(self): # Distinguish files, directories, funnies ok = True try: a_stat = os.stat(a_path) - except OSError: + except (OSError, ValueError): + # See https://github.com/python/cpython/issues/122400 + # for the rationale for protecting against ValueError. # print('Can\'t stat', a_path, ':', why.args[1]) ok = False try: b_stat = os.stat(b_path) - except OSError: + except (OSError, ValueError): # print('Can\'t stat', b_path, ':', why.args[1]) ok = False @@ -201,7 +203,7 @@ def phase4(self): # Find out differences between common subdirectories a_x = os.path.join(self.left, x) b_x = os.path.join(self.right, x) self.subdirs[x] = self.__class__(a_x, b_x, self.ignore, self.hide, - self.shallow) + shallow=self.shallow) def phase4_closure(self): # Recursively call phase4() on subdirectories self.phase4() @@ -285,12 +287,12 @@ def cmpfiles(a, b, common, shallow=True): # Return: # 0 for equal # 1 for different -# 2 for funny cases (can't stat, etc.) +# 2 for funny cases (can't stat, NUL bytes, etc.) # def _cmp(a, b, sh, abs=abs, cmp=cmp): try: return not abs(cmp(a, b, sh)) - except OSError: + except (OSError, ValueError): return 2 diff --git a/Lib/fractions.py b/Lib/fractions.py index f8c6c9c438c..e71ef58f18b 100644 --- a/Lib/fractions.py +++ b/Lib/fractions.py @@ -875,8 +875,10 @@ def __pow__(a, b): # A fractional power will generally produce an # irrational number. return float(a) ** float(b) - else: + elif isinstance(b, (float, complex)): return float(a) ** b + else: + return NotImplemented def __rpow__(b, a): """a ** b""" diff --git a/Lib/http/cookies.py b/Lib/http/cookies.py index 35ac2dc6ae2..6b9ed24ad8e 100644 --- a/Lib/http/cookies.py +++ b/Lib/http/cookies.py @@ -184,8 +184,13 @@ def _quote(str): return '"' + str.translate(_Translator) + '"' -_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]") -_QuotePatt = re.compile(r"[\\].") +_unquote_sub = re.compile(r'\\(?:([0-3][0-7][0-7])|(.))').sub + +def _unquote_replace(m): + if m[1]: + return chr(int(m[1], 8)) + else: + return m[2] def _unquote(str): # If there aren't any doublequotes, @@ -205,36 +210,13 @@ def _unquote(str): # \012 --> \n # \" --> " # - i = 0 - n = len(str) - res = [] - while 0 <= i < n: - o_match = _OctalPatt.search(str, i) - q_match = _QuotePatt.search(str, i) - if not o_match and not q_match: # Neither matched - res.append(str[i:]) - break - # else: - j = k = -1 - if o_match: - j = o_match.start(0) - if q_match: - k = q_match.start(0) - if q_match and (not o_match or k < j): # QuotePatt matched - res.append(str[i:k]) - res.append(str[k+1]) - i = k + 2 - else: # OctalPatt matched - res.append(str[i:j]) - res.append(chr(int(str[j+1:j+4], 8))) - i = j + 4 - return _nulljoin(res) + return _unquote_sub(_unquote_replace, str) # The _getdate() routine is used to set the expiration time in the cookie's HTTP # header. By default, _getdate() returns the current time in the appropriate # "expires" format for a Set-Cookie header. The one optional argument is an # offset from now, in seconds. For example, an offset of -3600 means "one hour -# ago". The offset may be a floating point number. +# ago". The offset may be a floating-point number. # _weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] diff --git a/Lib/idlelib/Icons/README.txt b/Lib/idlelib/Icons/README.txt index d91c4d5d8d8..e245bc0b26e 100644 --- a/Lib/idlelib/Icons/README.txt +++ b/Lib/idlelib/Icons/README.txt @@ -1,13 +1,51 @@ -The IDLE icons are from https://bugs.python.org/issue1490384 +IDLE-PYTHON LOGOS -Created by Andrew Clover. +These are sent to tk on Windows, *NIX, and non-Aqua macOS +in pyshell following "# set application icon". -The original sources are available from Andrew's website: + +2006?: Andrew Clover made variously sized python icons for win23. https://www.doxdesk.com/software/py/pyicons.html -Various different formats and sizes are available at this GitHub Pull Request: -https://github.com/python/cpython/pull/17473 +2006: 16, 32, and 48 bit .png versions were copied to CPython +as Python application icons, maybe in PC/icons/py.ico. +https://github.com/python/cpython/issues/43372 + +2014: They were copied (perhaps a bit revised) to idlelib/Icons. +https://github.com/python/cpython/issues/64605 +.gif versions were also added. + +2020: Add Clover's 256-bit image. +https://github.com/python/cpython/issues/82620 +Other fixups were done. + +The idle.ico file used for Windows was created with ImageMagick: + $ convert idle_16.png idle_32.png idle_48.png idle_256.png idle.ico +** This needs redoing whenever files are changed. +?? Do Start, Desktop, and Taskbar use idlelib/Icons files? + +Issue added Windows Store PC/icons/idlex44.png and .../idlex150.png. +https://github.com/python/cpython/pull/22817 +?? Should these be updated with major changes? + +2022: Optimize .png images in CPython repository with external program. +https://github.com/python/cpython/pull/21348 +idle.ico (and idlex##) were not updated. + +The idlexx.gif files are only needed for *nix running tcl/tk 8.5. +As of 2022, this was known true for 1 'major' Linux distribution. +(Same would be true for any non-Aqua macOS with 8.5, but now none?) +Can be deleted when we require 8.6 or it is known always used. + +Future: Derivatives of Python logo should be submitted for approval. +PSF Trademark Working Group / Committee psf-trademarks@python.org +https://www.python.org/community/logos/ # Original files +https://www.python.org/psf/trademarks-faq/ +https://www.python.org/psf/trademarks/ # Usage. + + +OTHER GIFS: These are used by browsers using idlelib.tree. +At least some will not be used when tree is replaced by ttk.Treeview. -The idle.ico file was created with ImageMagick: - $ convert idle_16.png idle_32.png idle_48.png idle_256.png idle.ico +Edited 2024 August 26 by TJR. diff --git a/Lib/idlelib/News3.txt b/Lib/idlelib/News3.txt index b1b652dc562..37ff93f9866 100644 --- a/Lib/idlelib/News3.txt +++ b/Lib/idlelib/News3.txt @@ -4,6 +4,16 @@ Released on 2024-10-xx ========================= +gh-120083: Add explicit black IDLE Hovertip foreground color needed for +recent macOS. Fixes Sonoma showing unreadable white on pale yellow. +Patch by John Riggles. + +gh-122482: Change About IDLE to direct users to discuss.python.org +instead of the now unused idle-dev email and mailing list. + +gh-78889: Stop Shell freezes by blocking user access to non-method +sys.stdout.shell attributes, which are all private. + gh-78955: Use user-selected color theme for Help => IDLE Doc. gh-96905: In idlelib code, stop redefining built-ins 'dict' and 'object'. diff --git a/Lib/idlelib/config.py b/Lib/idlelib/config.py index 6a5acac9be8..d10c88a43f9 100644 --- a/Lib/idlelib/config.py +++ b/Lib/idlelib/config.py @@ -600,7 +600,7 @@ def GetCoreKeys(self, keySetName=None): """ # TODO: = dict(sorted([(v-event, keys), ...]))? keyBindings={ - # vitual-event: list of key events. + # virtual-event: list of key events. '<>': ['', ''], '<>': ['', ''], '<>': ['', ''], diff --git a/Lib/idlelib/configdialog.py b/Lib/idlelib/configdialog.py index eedf97bf74f..4d2adb48570 100644 --- a/Lib/idlelib/configdialog.py +++ b/Lib/idlelib/configdialog.py @@ -111,7 +111,7 @@ def create_widgets(self): load_configs: Load pages except for extensions. activate_config_changes: Tell editors to reload. """ - self.frame = frame = Frame(self, padding="5px") + self.frame = frame = Frame(self, padding=5) self.frame.grid(sticky="nwes") self.note = note = Notebook(frame) self.extpage = ExtPage(note) diff --git a/Lib/idlelib/editor.py b/Lib/idlelib/editor.py index 7bfa0932500..c76db20c587 100644 --- a/Lib/idlelib/editor.py +++ b/Lib/idlelib/editor.py @@ -914,7 +914,7 @@ def RemoveKeybindings(self): def ApplyKeybindings(self): """Apply the virtual, configurable keybindings. - Alse update hotkeys to current keyset. + Also update hotkeys to current keyset. """ # Called from configdialog.activate_config_changes. self.mainmenu.default_keydefs = keydefs = idleConf.GetCurrentKeySet() diff --git a/Lib/idlelib/extend.txt b/Lib/idlelib/extend.txt index b482f76c4fb..2522758ceb4 100644 --- a/Lib/idlelib/extend.txt +++ b/Lib/idlelib/extend.txt @@ -52,7 +52,7 @@ should probably be refined in the future.) Extensions are not required to define menu entries for all the events they implement. (They are also not required to create keybindings, but in that -case there must be empty bindings in cofig-extensions.def) +case there must be empty bindings in config-extensions.def) Here is a partial example from zzdummy.py: diff --git a/Lib/idlelib/help_about.py b/Lib/idlelib/help_about.py index aa1c352897f..81c65f6264e 100644 --- a/Lib/idlelib/help_about.py +++ b/Lib/idlelib/help_about.py @@ -85,15 +85,18 @@ def create_widgets(self): byline = Label(frame_background, text=byline_text, justify=LEFT, fg=self.fg, bg=self.bg) byline.grid(row=2, column=0, sticky=W, columnspan=3, padx=10, pady=5) - email = Label(frame_background, text='email: idle-dev@python.org', - justify=LEFT, fg=self.fg, bg=self.bg) - email.grid(row=6, column=0, columnspan=2, sticky=W, padx=10, pady=0) + + forums_url = "https://discuss.python.org" + forums = Label(frame_background, text="Python forums: "+forums_url, + justify=LEFT, fg=self.fg, bg=self.bg) + forums.grid(row=6, column=0, sticky=W, padx=10, pady=0) + forums.bind("", lambda event: webbrowser.open(forums_url)) docs_url = ("https://docs.python.org/%d.%d/library/idle.html" % sys.version_info[:2]) docs = Label(frame_background, text=docs_url, justify=LEFT, fg=self.fg, bg=self.bg) docs.grid(row=7, column=0, columnspan=2, sticky=W, padx=10, pady=0) - docs.bind("", lambda event: webbrowser.open(docs['text'])) + docs.bind("", lambda event: webbrowser.open(docs_url)) Frame(frame_background, borderwidth=1, relief=SUNKEN, height=2, bg=self.bg).grid(row=8, column=0, sticky=EW, @@ -123,9 +126,7 @@ def create_widgets(self): height=2, bg=self.bg).grid(row=11, column=0, sticky=EW, columnspan=3, padx=5, pady=5) - idle = Label(frame_background, - text='IDLE', - fg=self.fg, bg=self.bg) + idle = Label(frame_background, text='IDLE', fg=self.fg, bg=self.bg) idle.grid(row=12, column=0, sticky=W, padx=10, pady=0) idle_buttons = Frame(frame_background, bg=self.bg) idle_buttons.grid(row=13, column=0, columnspan=3, sticky=NSEW) diff --git a/Lib/idlelib/idle_test/example_stub.pyi b/Lib/idlelib/idle_test/example_stub.pyi index 17b58010a9d..abcdbc17529 100644 --- a/Lib/idlelib/idle_test/example_stub.pyi +++ b/Lib/idlelib/idle_test/example_stub.pyi @@ -1,4 +1,4 @@ -" Example to test recognition of .pyi file as Python source code. +# An example file to test recognition of a .pyi file as Python source code. class Example: def method(self, argument1: str, argument2: list[int]) -> None: ... diff --git a/Lib/idlelib/idle_test/test_outwin.py b/Lib/idlelib/idle_test/test_outwin.py index d6e85ad6744..81f4aad7e95 100644 --- a/Lib/idlelib/idle_test/test_outwin.py +++ b/Lib/idlelib/idle_test/test_outwin.py @@ -1,6 +1,7 @@ "Test outwin, coverage 76%." from idlelib import outwin +import sys import unittest from test.support import requires from tkinter import Tk, Text @@ -18,6 +19,10 @@ def setUpClass(cls): root.withdraw() w = cls.window = outwin.OutputWindow(None, None, None, root) cls.text = w.text = Text(root) + if sys.platform == 'darwin': # Issue 112938 + cls.text.update = cls.text.update_idletasks + # Without this, test write, writelines, and goto... fail. + # The reasons and why macOS-specific are unclear. @classmethod def tearDownClass(cls): diff --git a/Lib/idlelib/outwin.py b/Lib/idlelib/outwin.py index 5ed3f35a7af..8baa657550d 100644 --- a/Lib/idlelib/outwin.py +++ b/Lib/idlelib/outwin.py @@ -112,7 +112,7 @@ def write(self, s, tags=(), mark="insert"): assert isinstance(s, str) self.text.insert(mark, s, tags) self.text.see(mark) - self.text.update_idletasks() + self.text.update() return len(s) def writelines(self, lines): diff --git a/Lib/idlelib/pyshell.py b/Lib/idlelib/pyshell.py index d8b2652d5d7..e882c6cb3b8 100755 --- a/Lib/idlelib/pyshell.py +++ b/Lib/idlelib/pyshell.py @@ -706,7 +706,7 @@ def prepend_syspath(self, filename): del _filename, _sys, _dirname, _dir \n""".format(filename)) - def showsyntaxerror(self, filename=None): + def showsyntaxerror(self, filename=None, **kwargs): """Override Interactive Interpreter method: Use Colorizing Color the offending position instead of printing it and pointing at it diff --git a/Lib/idlelib/run.py b/Lib/idlelib/run.py index 8974b52674f..8f98e73258e 100644 --- a/Lib/idlelib/run.py +++ b/Lib/idlelib/run.py @@ -443,6 +443,9 @@ class StdioFile(io.TextIOBase): def __init__(self, shell, tags, encoding='utf-8', errors='strict'): self.shell = shell + # GH-78889: accessing unpickleable attributes freezes Shell. + # IDLE only needs methods; allow 'width' for possible use. + self.shell._RPCProxy__attributes = {'width': 1} self.tags = tags self._encoding = encoding self._errors = errors diff --git a/Lib/idlelib/searchbase.py b/Lib/idlelib/searchbase.py index 64ed50c7364..c68a6ca339a 100644 --- a/Lib/idlelib/searchbase.py +++ b/Lib/idlelib/searchbase.py @@ -86,7 +86,7 @@ def create_widgets(self): top.wm_iconname(self.icon) _setup_dialog(top) self.top = top - self.frame = Frame(top, padding="5px") + self.frame = Frame(top, padding=5) self.frame.grid(sticky="nwes") top.grid_columnconfigure(0, weight=100) top.grid_rowconfigure(0, weight=100) diff --git a/Lib/idlelib/tooltip.py b/Lib/idlelib/tooltip.py index 3983690dd41..df5b1fe1dcf 100644 --- a/Lib/idlelib/tooltip.py +++ b/Lib/idlelib/tooltip.py @@ -144,7 +144,8 @@ def hidetip(self): class Hovertip(OnHoverTooltipBase): "A tooltip that pops up when a mouse hovers over an anchor widget." - def __init__(self, anchor_widget, text, hover_delay=1000): + def __init__(self, anchor_widget, text, hover_delay=1000, + foreground="#000000", background="#ffffe0"): """Create a text tooltip with a mouse hover delay. anchor_widget: the widget next to which the tooltip will be shown @@ -156,10 +157,13 @@ def __init__(self, anchor_widget, text, hover_delay=1000): """ super().__init__(anchor_widget, hover_delay=hover_delay) self.text = text + self.foreground = foreground + self.background = background def showcontents(self): label = Label(self.tipwindow, text=self.text, justify=LEFT, - background="#ffffe0", relief=SOLID, borderwidth=1) + relief=SOLID, borderwidth=1, + foreground=self.foreground, background=self.background) label.pack() diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py index 16b96266b51..37fef357fe2 100644 --- a/Lib/importlib/abc.py +++ b/Lib/importlib/abc.py @@ -13,6 +13,7 @@ _frozen_importlib_external = _bootstrap_external from ._abc import Loader import abc +import warnings from .resources import abc as _resources_abc diff --git a/Lib/importlib/resources/_functional.py b/Lib/importlib/resources/_functional.py index 9e3ea1547d4..f59416f2dd6 100644 --- a/Lib/importlib/resources/_functional.py +++ b/Lib/importlib/resources/_functional.py @@ -57,11 +57,7 @@ def contents(anchor, *path_names): DeprecationWarning, stacklevel=1, ) - return ( - resource.name - for resource - in _get_resource(anchor, path_names).iterdir() - ) + return (resource.name for resource in _get_resource(anchor, path_names).iterdir()) def _get_encoding_arg(path_names, encoding): diff --git a/Lib/importlib/resources/readers.py b/Lib/importlib/resources/readers.py index b86cdeff57c..ccc5abbeb4e 100644 --- a/Lib/importlib/resources/readers.py +++ b/Lib/importlib/resources/readers.py @@ -34,8 +34,10 @@ def files(self): class ZipReader(abc.TraversableResources): def __init__(self, loader, module): - _, _, name = module.rpartition('.') - self.prefix = loader.prefix.replace('\\', '/') + name + '/' + self.prefix = loader.prefix.replace('\\', '/') + if loader.is_package(module): + _, _, name = module.rpartition('.') + self.prefix += name + '/' self.archive = loader.archive def open_resource(self, resource): diff --git a/Lib/importlib/util.py b/Lib/importlib/util.py index c94a148e4c5..284206b62f9 100644 --- a/Lib/importlib/util.py +++ b/Lib/importlib/util.py @@ -13,7 +13,6 @@ import _imp import sys -import threading import types @@ -257,6 +256,9 @@ def create_module(self, spec): def exec_module(self, module): """Make the module load lazily.""" + # Threading is only needed for lazy loading, and importlib.util can + # be pulled in at interpreter startup, so defer until needed. + import threading module.__spec__.loader = self.loader module.__loader__ = self.loader # Don't need to worry about deep-copying as trying to set an attribute diff --git a/Lib/inspect.py b/Lib/inspect.py index bf979e8e63f..8df2383f60b 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -325,6 +325,11 @@ def ismethoddescriptor(object): if isclass(object) or ismethod(object) or isfunction(object): # mutual exclusion return False + if isinstance(object, functools.partial): + # Lie for children. The addition of partial.__get__ + # doesn't currently change the partial objects behaviour, + # not counting a warning about future changes. + return False tp = type(object) return (hasattr(tp, "__get__") and not hasattr(tp, "__set__") @@ -1077,10 +1082,12 @@ def findsource(object): if isclass(object): try: - firstlineno = object.__firstlineno__ - except AttributeError: + lnum = vars(object)['__firstlineno__'] - 1 + except (TypeError, KeyError): raise OSError('source code not available') - return lines, object.__firstlineno__ - 1 + if lnum >= len(lines): + raise OSError('lineno is out of bounds') + return lines, lnum if ismethod(object): object = object.__func__ diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py index 9cef275f7ae..c165505a533 100644 --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -1970,12 +1970,21 @@ def __init__(self, address): def _explode_shorthand_ip_string(self): ipv4_mapped = self.ipv4_mapped if ipv4_mapped is None: - long_form = super()._explode_shorthand_ip_string() - else: - prefix_len = 30 - raw_exploded_str = super()._explode_shorthand_ip_string() - long_form = "%s%s" % (raw_exploded_str[:prefix_len], str(ipv4_mapped)) - return long_form + return super()._explode_shorthand_ip_string() + prefix_len = 30 + raw_exploded_str = super()._explode_shorthand_ip_string() + return f"{raw_exploded_str[:prefix_len]}{ipv4_mapped!s}" + + def _reverse_pointer(self): + ipv4_mapped = self.ipv4_mapped + if ipv4_mapped is None: + return super()._reverse_pointer() + prefix_len = 30 + raw_exploded_str = super()._explode_shorthand_ip_string()[:prefix_len] + # ipv4 encoded using hexadecimal nibbles instead of decimals + ipv4_int = ipv4_mapped._ip + reverse_chars = f"{raw_exploded_str}{ipv4_int:008x}"[::-1].replace(':', '') + return '.'.join(reverse_chars) + '.ip6.arpa' def _ipv4_mapped_ipv6_to_str(self): """Return convenient text representation of IPv4-mapped IPv6 address diff --git a/Lib/linecache.py b/Lib/linecache.py index 3462f1c451b..4b38a0464d8 100644 --- a/Lib/linecache.py +++ b/Lib/linecache.py @@ -70,7 +70,7 @@ def checkcache(filename=None): return try: stat = os.stat(fullname) - except OSError: + except (OSError, ValueError): cache.pop(filename, None) continue if size != stat.st_size or mtime != stat.st_mtime: @@ -135,10 +135,12 @@ def updatecache(filename, module_globals=None): try: stat = os.stat(fullname) break - except OSError: + except (OSError, ValueError): pass else: return [] + except ValueError: # may be raised by os.stat() + return [] try: with tokenize.open(fullname) as fp: lines = fp.readlines() diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py index 3f4144226b4..aa9b79d8cab 100644 --- a/Lib/logging/__init__.py +++ b/Lib/logging/__init__.py @@ -37,7 +37,7 @@ 'captureWarnings', 'critical', 'debug', 'disable', 'error', 'exception', 'fatal', 'getLevelName', 'getLogger', 'getLoggerClass', 'info', 'log', 'makeLogRecord', 'setLoggerClass', 'shutdown', - 'warning', 'getLogRecordFactory', 'setLogRecordFactory', + 'warn', 'warning', 'getLogRecordFactory', 'setLogRecordFactory', 'lastResort', 'raiseExceptions', 'getLevelNamesMapping', 'getHandlerByName', 'getHandlerNames'] @@ -1530,6 +1530,11 @@ def warning(self, msg, *args, **kwargs): if self.isEnabledFor(WARNING): self._log(WARNING, msg, args, **kwargs) + def warn(self, msg, *args, **kwargs): + warnings.warn("The 'warn' method is deprecated, " + "use 'warning' instead", DeprecationWarning, 2) + self.warning(msg, *args, **kwargs) + def error(self, msg, *args, **kwargs): """ Log 'msg % args' with severity 'ERROR'. @@ -1906,6 +1911,11 @@ def warning(self, msg, *args, **kwargs): """ self.log(WARNING, msg, *args, **kwargs) + def warn(self, msg, *args, **kwargs): + warnings.warn("The 'warn' method is deprecated, " + "use 'warning' instead", DeprecationWarning, 2) + self.warning(msg, *args, **kwargs) + def error(self, msg, *args, **kwargs): """ Delegate an error call to the underlying logger. @@ -2169,6 +2179,11 @@ def warning(msg, *args, **kwargs): basicConfig() root.warning(msg, *args, **kwargs) +def warn(msg, *args, **kwargs): + warnings.warn("The 'warn' function is deprecated, " + "use 'warning' instead", DeprecationWarning, 2) + warning(msg, *args, **kwargs) + def info(msg, *args, **kwargs): """ Log a message with severity 'INFO' on the root logger. If the logger has diff --git a/Lib/logging/config.py b/Lib/logging/config.py index 3cc4c57dd8e..735bffeaa09 100644 --- a/Lib/logging/config.py +++ b/Lib/logging/config.py @@ -497,6 +497,33 @@ def as_tuple(self, value): value = tuple(value) return value +def _is_queue_like_object(obj): + """Check that *obj* implements the Queue API.""" + if isinstance(obj, queue.Queue): + return True + # defer importing multiprocessing as much as possible + from multiprocessing.queues import Queue as MPQueue + if isinstance(obj, MPQueue): + return True + # Depending on the multiprocessing start context, we cannot create + # a multiprocessing.managers.BaseManager instance 'mm' to get the + # runtime type of mm.Queue() or mm.JoinableQueue() (see gh-119819). + # + # Since we only need an object implementing the Queue API, we only + # do a protocol check, but we do not use typing.runtime_checkable() + # and typing.Protocol to reduce import time (see gh-121723). + # + # Ideally, we would have wanted to simply use strict type checking + # instead of a protocol-based type checking since the latter does + # not check the method signatures. + queue_interface = [ + 'empty', 'full', 'get', 'get_nowait', + 'put', 'put_nowait', 'join', 'qsize', + 'task_done', + ] + return all(callable(getattr(obj, method, None)) + for method in queue_interface) + class DictConfigurator(BaseConfigurator): """ Configure logging using a dictionary-like object to describe the @@ -791,32 +818,8 @@ def configure_handler(self, config): if '()' not in qspec: raise TypeError('Invalid queue specifier %r' % qspec) config['queue'] = self.configure_custom(dict(qspec)) - else: - from multiprocessing.queues import Queue as MPQueue - - if not isinstance(qspec, (queue.Queue, MPQueue)): - # Safely check if 'qspec' is an instance of Manager.Queue - # / Manager.JoinableQueue - - from multiprocessing import Manager as MM - from multiprocessing.managers import BaseProxy - - # if it's not an instance of BaseProxy, it also can't be - # an instance of Manager.Queue / Manager.JoinableQueue - if isinstance(qspec, BaseProxy): - # Sometimes manager or queue creation might fail - # (e.g. see issue gh-120868). In that case, any - # exception during the creation of these queues will - # propagate up to the caller and be wrapped in a - # `ValueError`, whose cause will indicate the details of - # the failure. - mm = MM() - proxy_queue = mm.Queue() - proxy_joinable_queue = mm.JoinableQueue() - if not isinstance(qspec, (type(proxy_queue), type(proxy_joinable_queue))): - raise TypeError('Invalid queue specifier %r' % qspec) - else: - raise TypeError('Invalid queue specifier %r' % qspec) + elif not _is_queue_like_object(qspec): + raise TypeError('Invalid queue specifier %r' % qspec) if 'listener' in config: lspec = config['listener'] diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py index 0fa40f56e99..1cba64fd554 100644 --- a/Lib/logging/handlers.py +++ b/Lib/logging/handlers.py @@ -196,9 +196,12 @@ def shouldRollover(self, record): if self.stream is None: # delay was set... self.stream = self._open() if self.maxBytes > 0: # are we rolling over? + pos = self.stream.tell() + if not pos: + # gh-116263: Never rollover an empty file + return False msg = "%s\n" % self.format(record) - self.stream.seek(0, 2) #due to non-posix-compliant Windows feature - if self.stream.tell() + len(msg) >= self.maxBytes: + if pos + len(msg) >= self.maxBytes: # See bpo-45401: Never rollover anything other than regular files if os.path.exists(self.baseFilename) and not os.path.isfile(self.baseFilename): return False diff --git a/Lib/pdb.py b/Lib/pdb.py index 87837737dcd..a42b8881f03 100755 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -2460,9 +2460,12 @@ def main(): traceback.print_exception(e, colorize=_colorize.can_colorize()) print("Uncaught exception. Entering post mortem debugging") print("Running 'cont' or 'step' will restart the program") - pdb.interaction(None, e) - print(f"Post mortem debugger finished. The {target} will " - "be restarted") + try: + pdb.interaction(None, e) + except Restart: + print("Restarting", target, "with arguments:") + print("\t" + " ".join(sys.argv[1:])) + continue if pdb._user_requested_quit: break print("The program finished and will be restarted") diff --git a/Lib/pickle.py b/Lib/pickle.py index d719ceb7a0b..3d4c1a7c984 100644 --- a/Lib/pickle.py +++ b/Lib/pickle.py @@ -314,16 +314,17 @@ def load_frame(self, frame_size): # Tools used for pickling. def _getattribute(obj, name): + top = obj for subpath in name.split('.'): if subpath == '': raise AttributeError("Can't get local attribute {!r} on {!r}" - .format(name, obj)) + .format(name, top)) try: parent = obj obj = getattr(obj, subpath) except AttributeError: raise AttributeError("Can't get attribute {!r} on {!r}" - .format(name, obj)) from None + .format(name, top)) from None return obj, parent def whichmodule(obj, name): @@ -832,7 +833,7 @@ def save_bytearray(self, obj): if _HAVE_PICKLE_BUFFER: def save_picklebuffer(self, obj): if self.proto < 5: - raise PicklingError("PickleBuffer can only pickled with " + raise PicklingError("PickleBuffer can only be pickled with " "protocol >= 5") with obj.raw() as m: if not m.contiguous: @@ -1092,11 +1093,16 @@ def save_global(self, obj, name=None): (obj, module_name, name)) if self.proto >= 2: - code = _extension_registry.get((module_name, name)) - if code: - assert code > 0 + code = _extension_registry.get((module_name, name), _NoValue) + if code is not _NoValue: if code <= 0xff: - write(EXT1 + pack("= 3: - write(GLOBAL + bytes(module_name, "utf-8") + b'\n' + - bytes(name, "utf-8") + b'\n') + elif '.' in name: + # In protocol < 4, objects with multi-part __qualname__ + # are represented as + # getattr(getattr(..., attrname1), attrname2). + dotted_path = name.split('.') + name = dotted_path.pop(0) + save = self.save + for attrname in dotted_path: + save(getattr) + if self.proto < 2: + write(MARK) + self._save_toplevel_by_name(module_name, name) + for attrname in dotted_path: + save(attrname) + if self.proto < 2: + write(TUPLE) + else: + write(TUPLE2) + write(REDUCE) + else: + self._save_toplevel_by_name(module_name, name) + + self.memoize(obj) + + def _save_toplevel_by_name(self, module_name, name): + if self.proto >= 3: + # Non-ASCII identifiers are supported only with protocols >= 3. + self.write(GLOBAL + bytes(module_name, "utf-8") + b'\n' + + bytes(name, "utf-8") + b'\n') else: if self.fix_imports: r_name_mapping = _compat_pickle.REVERSE_NAME_MAPPING @@ -1124,14 +1154,12 @@ def save_global(self, obj, name=None): elif module_name in r_import_mapping: module_name = r_import_mapping[module_name] try: - write(GLOBAL + bytes(module_name, "ascii") + b'\n' + - bytes(name, "ascii") + b'\n') + self.write(GLOBAL + bytes(module_name, "ascii") + b'\n' + + bytes(name, "ascii") + b'\n') except UnicodeEncodeError: raise PicklingError( "can't pickle global identifier '%s.%s' using " - "pickle protocol %i" % (module, name, self.proto)) from None - - self.memoize(obj) + "pickle protocol %i" % (module_name, name, self.proto)) from None def save_type(self, obj): if obj is type(None): @@ -1568,9 +1596,8 @@ def load_ext4(self): dispatch[EXT4[0]] = load_ext4 def get_extension(self, code): - nil = [] - obj = _extension_cache.get(code, nil) - if obj is not nil: + obj = _extension_cache.get(code, _NoValue) + if obj is not _NoValue: self.append(obj) return key = _inverted_registry.get(code) diff --git a/Lib/pstats.py b/Lib/pstats.py index 2f054bb4011..d21abe21523 100644 --- a/Lib/pstats.py +++ b/Lib/pstats.py @@ -83,7 +83,7 @@ class Stats: method now take arbitrarily many file names as arguments. All the print methods now take an argument that indicates how many lines - to print. If the arg is a floating point number between 0 and 1.0, then + to print. If the arg is a floating-point number between 0 and 1.0, then it is taken as a decimal percentage of the available lines to be printed (e.g., .1 means print 10% of all available lines). If it is an integer, it is taken to mean the number of lines of data that you wish to have diff --git a/Lib/pydoc.py b/Lib/pydoc.py index 40ec650b39d..30cd0b3fa3c 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -1861,6 +1861,7 @@ class Helper: ':': 'SLICINGS DICTIONARYLITERALS', '@': 'def class', '\\': 'STRINGS', + ':=': 'ASSIGNMENTEXPRESSIONS', '_': 'PRIVATENAMES', '__': 'PRIVATENAMES SPECIALMETHODS', '`': 'BACKQUOTES', @@ -1954,6 +1955,7 @@ class Helper: 'ASSERTION': 'assert', 'ASSIGNMENT': ('assignment', 'AUGMENTEDASSIGNMENT'), 'AUGMENTEDASSIGNMENT': ('augassign', 'NUMBERMETHODS'), + 'ASSIGNMENTEXPRESSIONS': ('assignment-expressions', ''), 'DELETION': 'del', 'RETURNING': 'return', 'IMPORTING': 'import', diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index ca2010c3357..dbfeb55998c 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Thu Jun 27 15:02:53 2024 +# Autogenerated by Sphinx on Mon Oct 7 06:59:36 2024 # as part of the release process. topics = {'assert': 'The "assert" statement\n' '**********************\n' @@ -308,10 +308,10 @@ 'target.\n' 'The target is only evaluated once.\n' '\n' - 'An augmented assignment expression like "x += 1" can be ' - 'rewritten as\n' - '"x = x + 1" to achieve a similar, but not exactly equal ' - 'effect. In the\n' + 'An augmented assignment statement like "x += 1" can be ' + 'rewritten as "x\n' + '= x + 1" to achieve a similar, but not exactly equal effect. ' + 'In the\n' 'augmented version, "x" is only evaluated once. Also, when ' 'possible,\n' 'the actual operation is performed *in-place*, meaning that ' @@ -560,31 +560,67 @@ 'evaluate it\n' 'raises a "NameError" exception.\n' '\n' - '**Private name mangling:** When an identifier that ' - 'textually occurs in\n' - 'a class definition begins with two or more underscore ' - 'characters and\n' - 'does not end in two or more underscores, it is ' - 'considered a *private\n' - 'name* of that class. Private names are transformed to a ' - 'longer form\n' - 'before code is generated for them. The transformation ' - 'inserts the\n' - 'class name, with leading underscores removed and a ' - 'single underscore\n' - 'inserted, in front of the name. For example, the ' - 'identifier "__spam"\n' - 'occurring in a class named "Ham" will be transformed to ' - '"_Ham__spam".\n' - 'This transformation is independent of the syntactical ' + '\n' + 'Private name mangling\n' + '=====================\n' + '\n' + 'When an identifier that textually occurs in a class ' + 'definition begins\n' + 'with two or more underscore characters and does not end ' + 'in two or more\n' + 'underscores, it is considered a *private name* of that ' + 'class.\n' + '\n' + 'See also: The class specifications.\n' + '\n' + 'More precisely, private names are transformed to a ' + 'longer form before\n' + 'code is generated for them. If the transformed name is ' + 'longer than\n' + '255 characters, implementation-defined truncation may ' + 'happen.\n' + '\n' + 'The transformation is independent of the syntactical ' 'context in which\n' - 'the identifier is used. If the transformed name is ' - 'extremely long\n' - '(longer than 255 characters), implementation defined ' - 'truncation may\n' - 'happen. If the class name consists only of underscores, ' - 'no\n' - 'transformation is done.\n', + 'the identifier is used but only the following private ' + 'identifiers are\n' + 'mangled:\n' + '\n' + '* Any name used as the name of a variable that is ' + 'assigned or read or\n' + ' any name of an attribute being accessed.\n' + '\n' + ' The "__name__" attribute of nested functions, classes, ' + 'and type\n' + ' aliases is however not mangled.\n' + '\n' + '* The name of imported modules, e.g., "__spam" in ' + '"import __spam". If\n' + ' the module is part of a package (i.e., its name ' + 'contains a dot), the\n' + ' name is *not* mangled, e.g., the "__foo" in "import ' + '__foo.bar" is\n' + ' not mangled.\n' + '\n' + '* The name of an imported member, e.g., "__f" in "from ' + 'spam import\n' + ' __f".\n' + '\n' + 'The transformation rule is defined as follows:\n' + '\n' + '* The class name, with leading underscores removed and a ' + 'single\n' + ' leading underscore inserted, is inserted in front of ' + 'the identifier,\n' + ' e.g., the identifier "__spam" occurring in a class ' + 'named "Foo",\n' + ' "_Foo" or "__Foo" is transformed to "_Foo__spam".\n' + '\n' + '* If the class name consists only of underscores, the ' + 'transformation\n' + ' is the identity, e.g., the identifier "__spam" ' + 'occurring in a class\n' + ' named "_" or "__" is left as is.\n', 'atom-literals': 'Literals\n' '********\n' '\n' @@ -597,10 +633,10 @@ '\n' 'Evaluation of a literal yields an object of the given type ' '(string,\n' - 'bytes, integer, floating point number, complex number) with ' + 'bytes, integer, floating-point number, complex number) with ' 'the given\n' 'value. The value may be approximated in the case of ' - 'floating point\n' + 'floating-point\n' 'and imaginary (complex) literals. See section Literals for ' 'details.\n' '\n' @@ -1058,11 +1094,13 @@ 'to the class\n' ' where it is defined. *__slots__* declared in parents ' 'are available\n' - ' in child classes. However, child subclasses will get a ' - '"__dict__"\n' - ' and *__weakref__* unless they also define *__slots__* ' - '(which should\n' - ' only contain names of any *additional* slots).\n' + ' in child classes. However, instances of a child ' + 'subclass will get a\n' + ' "__dict__" and *__weakref__* unless the subclass also ' + 'defines\n' + ' *__slots__* (which should only contain names of any ' + '*additional*\n' + ' slots).\n' '\n' '* If a class defines a slot also defined in a base ' 'class, the instance\n' @@ -1168,10 +1206,10 @@ 'target.\n' 'The target is only evaluated once.\n' '\n' - 'An augmented assignment expression like "x += 1" can be ' - 'rewritten as\n' - '"x = x + 1" to achieve a similar, but not exactly equal effect. ' - 'In the\n' + 'An augmented assignment statement like "x += 1" can be ' + 'rewritten as "x\n' + '= x + 1" to achieve a similar, but not exactly equal effect. In ' + 'the\n' 'augmented version, "x" is only evaluated once. Also, when ' 'possible,\n' 'the actual operation is performed *in-place*, meaning that ' @@ -1244,6 +1282,10 @@ 'The "@" (at) operator is intended to be used for matrix\n' 'multiplication. No builtin Python types implement this operator.\n' '\n' + 'This operation can be customized using the special "__matmul__()" ' + 'and\n' + '"__rmatmul__()" methods.\n' + '\n' 'Added in version 3.5.\n' '\n' 'The "/" (division) and "//" (floor division) operators yield the\n' @@ -1256,17 +1298,19 @@ 'result. Division by zero raises the "ZeroDivisionError" ' 'exception.\n' '\n' - 'This operation can be customized using the special "__truediv__()" ' + 'The division operation can be customized using the special\n' + '"__truediv__()" and "__rtruediv__()" methods. The floor division\n' + 'operation can be customized using the special "__floordiv__()" ' 'and\n' - '"__floordiv__()" methods.\n' + '"__rfloordiv__()" methods.\n' '\n' 'The "%" (modulo) operator yields the remainder from the division ' 'of\n' 'the first argument by the second. The numeric arguments are ' 'first\n' 'converted to a common type. A zero right argument raises the\n' - '"ZeroDivisionError" exception. The arguments may be floating ' - 'point\n' + '"ZeroDivisionError" exception. The arguments may be ' + 'floating-point\n' 'numbers, e.g., "3.14%0.7" equals "0.34" (since "3.14" equals ' '"4*0.7 +\n' '0.34".) The modulo operator always yields a result with the same ' @@ -1293,13 +1337,13 @@ '\n' 'The *modulo* operation can be customized using the special ' '"__mod__()"\n' - 'method.\n' + 'and "__rmod__()" methods.\n' '\n' 'The floor division operator, the modulo operator, and the ' '"divmod()"\n' 'function are not defined for complex numbers. Instead, convert to ' 'a\n' - 'floating point number using the "abs()" function if appropriate.\n' + 'floating-point number using the "abs()" function if appropriate.\n' '\n' 'The "+" (addition) operator yields the sum of its arguments. The\n' 'arguments must either both be numbers or both be sequences of the ' @@ -1318,7 +1362,8 @@ 'The numeric arguments are first converted to a common type.\n' '\n' 'This operation can be customized using the special "__sub__()" ' - 'method.\n', + 'and\n' + '"__rsub__()" methods.\n', 'bitwise': 'Binary bitwise operations\n' '*************************\n' '\n' @@ -3058,7 +3103,7 @@ ' | "None"\n' ' | "True"\n' ' | "False"\n' - ' | signed_number: NUMBER | "-" NUMBER\n' + ' signed_number ::= ["-"] NUMBER\n' '\n' 'The rule "strings" and the token "NUMBER" are defined in the ' 'standard\n' @@ -3528,10 +3573,12 @@ ' parameter_list_no_posonly ::= defparameter ("," ' 'defparameter)* ["," [parameter_list_starargs]]\n' ' | parameter_list_starargs\n' - ' parameter_list_starargs ::= "*" [parameter] ("," ' + ' parameter_list_starargs ::= "*" [star_parameter] ("," ' 'defparameter)* ["," ["**" parameter [","]]]\n' ' | "**" parameter [","]\n' ' parameter ::= identifier [":" expression]\n' + ' star_parameter ::= identifier [":" ["*"] ' + 'expression]\n' ' defparameter ::= parameter ["=" expression]\n' ' funcname ::= identifier\n' '\n' @@ -3659,27 +3706,31 @@ 'expression"”\n' 'following the parameter name. Any parameter may have an ' 'annotation,\n' - 'even those of the form "*identifier" or "**identifier". ' - 'Functions may\n' - 'have “return” annotation of the form “"-> expression"” after ' - 'the\n' - 'parameter list. These annotations can be any valid Python ' - 'expression.\n' - 'The presence of annotations does not change the semantics of a\n' - 'function. The annotation values are available as values of a\n' - 'dictionary keyed by the parameters’ names in the ' - '"__annotations__"\n' - 'attribute of the function object. If the "annotations" import ' - 'from\n' - '"__future__" is used, annotations are preserved as strings at ' - 'runtime\n' - 'which enables postponed evaluation. Otherwise, they are ' - 'evaluated\n' - 'when the function definition is executed. In this case ' - 'annotations\n' - 'may be evaluated in a different order than they appear in the ' - 'source\n' - 'code.\n' + 'even those of the form "*identifier" or "**identifier". (As a ' + 'special\n' + 'case, parameters of the form "*identifier" may have an ' + 'annotation “":\n' + '*expression"”.) Functions may have “return” annotation of the ' + 'form\n' + '“"-> expression"” after the parameter list. These annotations ' + 'can be\n' + 'any valid Python expression. The presence of annotations does ' + 'not\n' + 'change the semantics of a function. The annotation values are\n' + 'available as values of a dictionary keyed by the parameters’ ' + 'names in\n' + 'the "__annotations__" attribute of the function object. If the\n' + '"annotations" import from "__future__" is used, annotations are\n' + 'preserved as strings at runtime which enables postponed ' + 'evaluation.\n' + 'Otherwise, they are evaluated when the function definition is\n' + 'executed. In this case annotations may be evaluated in a ' + 'different\n' + 'order than they appear in the source code.\n' + '\n' + 'Changed in version 3.11: Parameters of the form “"*identifier"” ' + 'may\n' + 'have an annotation “": *expression"”. See **PEP 646**.\n' '\n' 'It is also possible to create anonymous functions (functions not ' 'bound\n' @@ -4431,7 +4482,7 @@ 'converted to\n' ' complex;\n' '\n' - '* otherwise, if either argument is a floating point number, ' + '* otherwise, if either argument is a floating-point number, ' 'the other\n' ' is converted to floating point;\n' '\n' @@ -6305,11 +6356,11 @@ '\n' '* While annotation scopes have an internal name, that name is ' 'not\n' - ' reflected in the *__qualname__* of objects defined within the ' - 'scope.\n' - ' Instead, the "__qualname__" of such objects is as if the ' - 'object were\n' - ' defined in the enclosing scope.\n' + ' reflected in the *qualified name* of objects defined within ' + 'the\n' + ' scope. Instead, the "__qualname__" of such objects is as if ' + 'the\n' + ' object were defined in the enclosing scope.\n' '\n' 'Added in version 3.12: Annotation scopes were introduced in ' 'Python\n' @@ -6511,12 +6562,17 @@ 'exprlists': 'Expression lists\n' '****************\n' '\n' - ' expression_list ::= expression ("," expression)* [","]\n' - ' starred_list ::= starred_item ("," starred_item)* ' + ' starred_expression ::= ["*"] or_expr\n' + ' flexible_expression ::= assignment_expression | ' + 'starred_expression\n' + ' flexible_expression_list ::= flexible_expression ("," ' + 'flexible_expression)* [","]\n' + ' starred_expression_list ::= starred_expression ("," ' + 'starred_expression)* [","]\n' + ' expression_list ::= expression ("," expression)* ' '[","]\n' - ' starred_expression ::= expression | (starred_item ",")* ' - '[starred_item]\n' - ' starred_item ::= assignment_expression | "*" or_expr\n' + ' yield_list ::= expression_list | ' + 'starred_expression "," [starred_expression_list]\n' '\n' 'Except when part of a list or set display, an expression list\n' 'containing at least one comma yields a tuple. The length of ' @@ -6535,6 +6591,10 @@ 'Added in version 3.5: Iterable unpacking in expression lists,\n' 'originally proposed by **PEP 448**.\n' '\n' + 'Added in version 3.11: Any item in an expression list may be ' + 'starred.\n' + 'See **PEP 646**.\n' + '\n' 'A trailing comma is required only to create a one-item tuple, ' 'such as\n' '"1,"; it is optional in all other cases. A single expression ' @@ -6544,10 +6604,10 @@ 'that expression. (To create an empty tuple, use an empty pair ' 'of\n' 'parentheses: "()".)\n', - 'floating': 'Floating point literals\n' + 'floating': 'Floating-point literals\n' '***********************\n' '\n' - 'Floating point literals are described by the following lexical\n' + 'Floating-point literals are described by the following lexical\n' 'definitions:\n' '\n' ' floatnumber ::= pointfloat | exponentfloat\n' @@ -6561,12 +6621,12 @@ 'using\n' 'radix 10. For example, "077e010" is legal, and denotes the same ' 'number\n' - 'as "77e10". The allowed range of floating point literals is\n' + 'as "77e10". The allowed range of floating-point literals is\n' 'implementation-dependent. As in integer literals, underscores ' 'are\n' 'supported for digit grouping.\n' '\n' - 'Some examples of floating point literals:\n' + 'Some examples of floating-point literals:\n' '\n' ' 3.14 10. .001 1e100 3.14e-10 0e0 ' '3.14_15_93\n' @@ -6949,7 +7009,7 @@ '\n' 'The "\'_\'" option signals the use of an underscore for a ' 'thousands\n' - 'separator for floating point presentation types and for ' + 'separator for floating-point presentation types and for ' 'integer\n' 'presentation type "\'d\'". For integer presentation types ' '"\'b\'", "\'o\'",\n' @@ -7076,11 +7136,11 @@ '\n' 'In addition to the above presentation types, integers can ' 'be formatted\n' - 'with the floating point presentation types listed below ' + 'with the floating-point presentation types listed below ' '(except "\'n\'"\n' 'and "None"). When doing so, "float()" is used to convert ' 'the integer\n' - 'to a floating point number before formatting.\n' + 'to a floating-point number before formatting.\n' '\n' 'The available presentation types for "float" and "Decimal" ' 'values are:\n' @@ -7220,18 +7280,22 @@ 'percent sign. |\n' ' ' '+-----------+------------------------------------------------------------+\n' - ' | None | For "float" this is the same as "\'g\'", ' - 'except that when |\n' - ' | | fixed-point notation is used to format the ' - 'result, it |\n' + ' | None | For "float" this is like the "\'g\'" type, ' + 'except that when |\n' + ' | | fixed- point notation is used to format ' + 'the result, it |\n' ' | | always includes at least one digit past ' - 'the decimal point. |\n' - ' | | The precision used is as large as needed ' - 'to represent the |\n' - ' | | given value faithfully. For "Decimal", ' - 'this is the same |\n' - ' | | as either "\'g\'" or "\'G\'" depending on ' - 'the value of |\n' + 'the decimal point, |\n' + ' | | and switches to the scientific notation ' + 'when "exp >= p - |\n' + ' | | 1". When the precision is not specified, ' + 'the latter will |\n' + ' | | be as large as needed to represent the ' + 'given value |\n' + ' | | faithfully. For "Decimal", this is the ' + 'same as either |\n' + ' | | "\'g\'" or "\'G\'" depending on the value ' + 'of |\n' ' | | "context.capitals" for the current decimal ' 'context. The |\n' ' | | overall effect is to match the output of ' @@ -7421,10 +7485,12 @@ ' parameter_list_no_posonly ::= defparameter ("," ' 'defparameter)* ["," [parameter_list_starargs]]\n' ' | parameter_list_starargs\n' - ' parameter_list_starargs ::= "*" [parameter] ("," ' + ' parameter_list_starargs ::= "*" [star_parameter] ("," ' 'defparameter)* ["," ["**" parameter [","]]]\n' ' | "**" parameter [","]\n' ' parameter ::= identifier [":" expression]\n' + ' star_parameter ::= identifier [":" ["*"] ' + 'expression]\n' ' defparameter ::= parameter ["=" expression]\n' ' funcname ::= identifier\n' '\n' @@ -7552,27 +7618,31 @@ 'expression"”\n' 'following the parameter name. Any parameter may have an ' 'annotation,\n' - 'even those of the form "*identifier" or "**identifier". ' - 'Functions may\n' - 'have “return” annotation of the form “"-> expression"” after ' - 'the\n' - 'parameter list. These annotations can be any valid Python ' - 'expression.\n' - 'The presence of annotations does not change the semantics of a\n' - 'function. The annotation values are available as values of a\n' - 'dictionary keyed by the parameters’ names in the ' - '"__annotations__"\n' - 'attribute of the function object. If the "annotations" import ' - 'from\n' - '"__future__" is used, annotations are preserved as strings at ' - 'runtime\n' - 'which enables postponed evaluation. Otherwise, they are ' - 'evaluated\n' - 'when the function definition is executed. In this case ' - 'annotations\n' - 'may be evaluated in a different order than they appear in the ' - 'source\n' - 'code.\n' + 'even those of the form "*identifier" or "**identifier". (As a ' + 'special\n' + 'case, parameters of the form "*identifier" may have an ' + 'annotation “":\n' + '*expression"”.) Functions may have “return” annotation of the ' + 'form\n' + '“"-> expression"” after the parameter list. These annotations ' + 'can be\n' + 'any valid Python expression. The presence of annotations does ' + 'not\n' + 'change the semantics of a function. The annotation values are\n' + 'available as values of a dictionary keyed by the parameters’ ' + 'names in\n' + 'the "__annotations__" attribute of the function object. If the\n' + '"annotations" import from "__future__" is used, annotations are\n' + 'preserved as strings at runtime which enables postponed ' + 'evaluation.\n' + 'Otherwise, they are evaluated when the function definition is\n' + 'executed. In this case annotations may be evaluated in a ' + 'different\n' + 'order than they appear in the source code.\n' + '\n' + 'Changed in version 3.11: Parameters of the form “"*identifier"” ' + 'may\n' + 'have an annotation “": *expression"”. See **PEP 646**.\n' '\n' 'It is also possible to create anonymous functions (functions not ' 'bound\n' @@ -7938,11 +8008,11 @@ '\n' 'An imaginary literal yields a complex number with a real part ' 'of 0.0.\n' - 'Complex numbers are represented as a pair of floating point ' + 'Complex numbers are represented as a pair of floating-point ' 'numbers\n' 'and have the same restrictions on their range. To create a ' 'complex\n' - 'number with a nonzero real part, add a floating point number to ' + 'number with a nonzero real part, add a floating-point number to ' 'it,\n' 'e.g., "(3+4j)". Some examples of imaginary literals:\n' '\n' @@ -8328,7 +8398,8 @@ 'in\n' 'square brackets:\n' '\n' - ' list_display ::= "[" [starred_list | comprehension] "]"\n' + ' list_display ::= "[" [flexible_expression_list | comprehension] ' + '"]"\n' '\n' 'A list display yields a new list object, the contents being ' 'specified\n' @@ -8579,11 +8650,9 @@ ' can introduce new names.\n' '\n' '* While annotation scopes have an internal name, that name is not\n' - ' reflected in the *__qualname__* of objects defined within the ' - 'scope.\n' - ' Instead, the "__qualname__" of such objects is as if the object ' - 'were\n' - ' defined in the enclosing scope.\n' + ' reflected in the *qualified name* of objects defined within the\n' + ' scope. Instead, the "__qualname__" of such objects is as if the\n' + ' object were defined in the enclosing scope.\n' '\n' 'Added in version 3.12: Annotation scopes were introduced in ' 'Python\n' @@ -8739,8 +8808,8 @@ 'numbers': 'Numeric literals\n' '****************\n' '\n' - 'There are three types of numeric literals: integers, floating ' - 'point\n' + 'There are three types of numeric literals: integers, ' + 'floating-point\n' 'numbers, and imaginary numbers. There are no complex literals\n' '(complex numbers can be formed by adding a real number and an\n' 'imaginary number).\n' @@ -9072,16 +9141,22 @@ 'types, operations that compute new values may actually return a\n' 'reference to any existing object with the same type and value, ' 'while\n' - 'for mutable objects this is not allowed. E.g., after "a = 1; b = ' - '1",\n' - '"a" and "b" may or may not refer to the same object with the ' - 'value\n' - 'one, depending on the implementation, but after "c = []; d = []", ' - '"c"\n' - 'and "d" are guaranteed to refer to two different, unique, newly\n' - 'created empty lists. (Note that "c = d = []" assigns the same ' - 'object\n' - 'to both "c" and "d".)\n', + 'for mutable objects this is not allowed. For example, after "a = ' + '1; b\n' + '= 1", *a* and *b* may or may not refer to the same object with ' + 'the\n' + 'value one, depending on the implementation. This is because "int" ' + 'is\n' + 'an immutable type, so the reference to "1" can be reused. This\n' + 'behaviour depends on the implementation used, so should not be ' + 'relied\n' + 'upon, but is something to be aware of when making use of object\n' + 'identity tests. However, after "c = []; d = []", *c* and *d* are\n' + 'guaranteed to refer to two different, unique, newly created ' + 'empty\n' + 'lists. (Note that "e = f = []" assigns the *same* object to both ' + '*e*\n' + 'and *f*.)\n', 'operator-summary': 'Operator precedence\n' '*******************\n' '\n' @@ -9310,8 +9385,8 @@ '"complex"\n' 'number. (In earlier versions it raised a "ValueError".)\n' '\n' - 'This operation can be customized using the special "__pow__()" ' - 'method.\n', + 'This operation can be customized using the special "__pow__()" and\n' + '"__rpow__()" methods.\n', 'raise': 'The "raise" statement\n' '*********************\n' '\n' @@ -9725,9 +9800,12 @@ 'the\n' 'second argument.\n' '\n' - 'This operation can be customized using the special ' - '"__lshift__()" and\n' - '"__rshift__()" methods.\n' + 'The left shift operation can be customized using the special\n' + '"__lshift__()" and "__rlshift__()" methods. The right shift ' + 'operation\n' + 'can be customized using the special "__rshift__()" and ' + '"__rrshift__()"\n' + 'methods.\n' '\n' 'A right shift by *n* bits is defined as floor division by ' '"pow(2,n)".\n' @@ -9793,20 +9871,6 @@ 'not reported\n' 'by the "dir()" built-in function.\n' '\n' - 'object.__dict__\n' - '\n' - ' A dictionary or other mapping object used to store an ' - 'object’s\n' - ' (writable) attributes.\n' - '\n' - 'instance.__class__\n' - '\n' - ' The class to which a class instance belongs.\n' - '\n' - 'class.__bases__\n' - '\n' - ' The tuple of base classes of a class object.\n' - '\n' 'definition.__name__\n' '\n' ' The name of the class, function, method, descriptor, or ' @@ -9821,47 +9885,26 @@ '\n' ' Added in version 3.3.\n' '\n' - 'definition.__type_params__\n' - '\n' - ' The type parameters of generic classes, functions, and ' - 'type\n' - ' aliases.\n' - '\n' - ' Added in version 3.12.\n' - '\n' - 'class.__mro__\n' - '\n' - ' This attribute is a tuple of classes that are considered ' - 'when\n' - ' looking for base classes during method resolution.\n' - '\n' - 'class.mro()\n' - '\n' - ' This method can be overridden by a metaclass to customize ' - 'the\n' - ' method resolution order for its instances. It is called ' - 'at class\n' - ' instantiation, and its result is stored in "__mro__".\n' + 'definition.__module__\n' '\n' - 'class.__subclasses__()\n' + ' The name of the module in which a class or function was ' + 'defined.\n' '\n' - ' Each class keeps a list of weak references to its ' - 'immediate\n' - ' subclasses. This method returns a list of all those ' - 'references\n' - ' still alive. The list is in definition order. Example:\n' + 'definition.__doc__\n' '\n' - ' >>> int.__subclasses__()\n' - " [, , , " - "]\n" + ' The documentation string of a class or function, or ' + '"None" if\n' + ' undefined.\n' '\n' - 'class.__static_attributes__\n' + 'definition.__type_params__\n' '\n' - ' A tuple containing names of attributes of this class ' - 'which are\n' - ' accessed through "self.X" from any function in its body.\n' + ' The type parameters of generic classes, functions, and ' + 'type\n' + ' aliases. For classes and functions that are not generic, ' + 'this will\n' + ' be an empty tuple.\n' '\n' - ' Added in version 3.13.\n', + ' Added in version 3.12.\n', 'specialnames': 'Special method names\n' '********************\n' '\n' @@ -10807,11 +10850,13 @@ 'the class\n' ' where it is defined. *__slots__* declared in parents are ' 'available\n' - ' in child classes. However, child subclasses will get a ' - '"__dict__"\n' - ' and *__weakref__* unless they also define *__slots__* ' - '(which should\n' - ' only contain names of any *additional* slots).\n' + ' in child classes. However, instances of a child subclass ' + 'will get a\n' + ' "__dict__" and *__weakref__* unless the subclass also ' + 'defines\n' + ' *__slots__* (which should only contain names of any ' + '*additional*\n' + ' slots).\n' '\n' '* If a class defines a slot also defined in a base class, ' 'the instance\n' @@ -11228,7 +11273,7 @@ 'built-in\n' 'types), including other ABCs.\n' '\n' - 'class.__instancecheck__(self, instance)\n' + 'type.__instancecheck__(self, instance)\n' '\n' ' Return true if *instance* should be considered a (direct ' 'or\n' @@ -11236,7 +11281,7 @@ 'implement\n' ' "isinstance(instance, class)".\n' '\n' - 'class.__subclasscheck__(self, subclass)\n' + 'type.__subclasscheck__(self, subclass)\n' '\n' ' Return true if *subclass* should be considered a (direct ' 'or\n' @@ -12809,11 +12854,11 @@ ' and are deemed to delimit empty strings (for example,\n' ' "\'1,,2\'.split(\',\')" returns "[\'1\', \'\', ' '\'2\']"). The *sep* argument\n' - ' may consist of multiple characters (for example,\n' - ' "\'1<>2<>3\'.split(\'<>\')" returns "[\'1\', \'2\', ' - '\'3\']"). Splitting an\n' - ' empty string with a specified separator returns ' - '"[\'\']".\n' + ' may consist of multiple characters as a single ' + 'delimiter (to split\n' + ' with multiple delimiters, use "re.split()"). Splitting ' + 'an empty\n' + ' string with a specified separator returns "[\'\']".\n' '\n' ' For example:\n' '\n' @@ -12823,6 +12868,8 @@ " ['1', '2,3']\n" " >>> '1,2,,3,'.split(',')\n" " ['1', '2', '', '3', '']\n" + " >>> '1<>2<>3<4'.split('<>')\n" + " ['1', '2', '3<4']\n" '\n' ' If *sep* is not specified or is "None", a different ' 'splitting\n' @@ -13165,15 +13212,13 @@ 'greater must be expressed with escapes.\n' '\n' 'Both string and bytes literals may optionally be prefixed with a\n' - 'letter "\'r\'" or "\'R\'"; such strings are called *raw strings* ' - 'and treat\n' - 'backslashes as literal characters. As a result, in string ' - 'literals,\n' - '"\'\\U\'" and "\'\\u\'" escapes in raw strings are not treated ' - 'specially.\n' - 'Given that Python 2.x’s raw unicode literals behave differently ' - 'than\n' - 'Python 3.x’s the "\'ur\'" syntax is not supported.\n' + 'letter "\'r\'" or "\'R\'"; such constructs are called *raw ' + 'string\n' + 'literals* and *raw bytes literals* respectively and treat ' + 'backslashes\n' + 'as literal characters. As a result, in raw string literals, ' + '"\'\\U\'"\n' + 'and "\'\\u\'" escapes are not treated specially.\n' '\n' 'Added in version 3.3: The "\'rb\'" prefix of raw bytes literals ' 'has been\n' @@ -13364,7 +13409,8 @@ '*generic\n' 'class* will generally return a GenericAlias object.\n' '\n' - ' subscription ::= primary "[" expression_list "]"\n' + ' subscription ::= primary "[" flexible_expression_list ' + '"]"\n' '\n' 'When an object is subscripted, the interpreter will ' 'evaluate the\n' @@ -13383,13 +13429,18 @@ 'see\n' '__class_getitem__ versus __getitem__.\n' '\n' - 'If the expression list contains at least one comma, it will ' - 'evaluate\n' - 'to a "tuple" containing the items of the expression list. ' - 'Otherwise,\n' - 'the expression list will evaluate to the value of the ' - 'list’s sole\n' - 'member.\n' + 'If the expression list contains at least one comma, or if ' + 'any of the\n' + 'expressions are starred, the expression list will evaluate ' + 'to a\n' + '"tuple" containing the items of the expression list. ' + 'Otherwise, the\n' + 'expression list will evaluate to the value of the list’s ' + 'sole member.\n' + '\n' + 'Changed in version 3.11: Expressions in an expression list ' + 'may be\n' + 'starred. See **PEP 646**.\n' '\n' 'For built-in objects, there are two types of objects that ' 'support\n' @@ -13807,7 +13858,7 @@ '\n' '* A sign is shown only when the number is negative.\n' '\n' - 'Python distinguishes between integers, floating point numbers, and\n' + 'Python distinguishes between integers, floating-point numbers, and\n' 'complex numbers:\n' '\n' '\n' @@ -13852,28 +13903,28 @@ '"numbers.Real" ("float")\n' '------------------------\n' '\n' - 'These represent machine-level double precision floating point ' + 'These represent machine-level double precision floating-point ' 'numbers.\n' 'You are at the mercy of the underlying machine architecture (and C ' 'or\n' 'Java implementation) for the accepted range and handling of ' 'overflow.\n' - 'Python does not support single-precision floating point numbers; ' + 'Python does not support single-precision floating-point numbers; ' 'the\n' 'savings in processor and memory usage that are usually the reason ' 'for\n' 'using these are dwarfed by the overhead of using objects in Python, ' 'so\n' 'there is no reason to complicate the language with two kinds of\n' - 'floating point numbers.\n' + 'floating-point numbers.\n' '\n' '\n' '"numbers.Complex" ("complex")\n' '-----------------------------\n' '\n' 'These represent complex numbers as a pair of machine-level double\n' - 'precision floating point numbers. The same caveats apply as for\n' - 'floating point numbers. The real and imaginary parts of a complex\n' + 'precision floating-point numbers. The same caveats apply as for\n' + 'floating-point numbers. The real and imaginary parts of a complex\n' 'number "z" can be retrieved through the read-only attributes ' '"z.real"\n' 'and "z.imag".\n' @@ -14067,8 +14118,7 @@ 'however removing a key and re-inserting it will add it to the end\n' 'instead of keeping its old place.\n' '\n' - 'Dictionaries are mutable; they can be created by the "{...}" ' - 'notation\n' + 'Dictionaries are mutable; they can be created by the "{}" notation\n' '(see section Dictionary displays).\n' '\n' 'The extension modules "dbm.ndbm" and "dbm.gnu" provide additional\n' @@ -14139,8 +14189,8 @@ '|====================================================|====================================================|\n' '| function.__doc__ | The ' 'function’s documentation string, or "None" if |\n' - '| | unavailable. ' - 'Not inherited by subclasses. |\n' + '| | ' + 'unavailable. |\n' '+----------------------------------------------------+----------------------------------------------------+\n' '| function.__name__ | The ' 'function’s name. See also: "__name__ |\n' @@ -14288,21 +14338,10 @@ 'to\n' 'calling "f(C,1)" where "f" is the underlying function.\n' '\n' - 'Note that the transformation from function object to instance ' - 'method\n' - 'object happens each time the attribute is retrieved from the ' - 'instance.\n' - 'In some cases, a fruitful optimization is to assign the attribute ' - 'to a\n' - 'local variable and call that local variable. Also notice that this\n' - 'transformation only happens for user-defined functions; other ' - 'callable\n' - 'objects (and all non-callable objects) are retrieved without\n' - 'transformation. It is also important to note that user-defined\n' - 'functions which are attributes of a class instance are not ' - 'converted\n' - 'to bound methods; this *only* happens when the function is an\n' - 'attribute of the class.\n' + 'It is important to note that user-defined functions which are\n' + 'attributes of a class instance are not converted to bound methods;\n' + 'this *only* happens when the function is an attribute of the ' + 'class.\n' '\n' '\n' 'Generator functions\n' @@ -14521,41 +14560,122 @@ 'A class object can be called (see above) to yield a class instance\n' '(see below).\n' '\n' - 'Special attributes:\n' '\n' - ' "__name__"\n' - ' The class name.\n' + 'Special attributes\n' + '------------------\n' + '\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| Attribute | ' + 'Meaning |\n' + '|====================================================|====================================================|\n' + '| type.__name__ | The class’s ' + 'name. See also: "__name__ attributes". |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| type.__qualname__ | The class’s ' + '*qualified name*. See also: |\n' + '| | ' + '"__qualname__ attributes". |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| type.__module__ | The name of ' + 'the module in which the class was |\n' + '| | ' + 'defined. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| type.__dict__ | A "mapping ' + 'proxy" providing a read-only view of |\n' + '| | the class’s ' + 'namespace. See also: "__dict__ |\n' + '| | ' + 'attributes". |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| type.__bases__ | A "tuple" ' + 'containing the class’s bases. In most |\n' + '| | cases, for a ' + 'class defined as "class X(A, B, C)", |\n' + '| | ' + '"X.__bases__" will be exactly equal to "(A, B, |\n' + '| | ' + 'C)". |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| type.__doc__ | The class’s ' + 'documentation string, or "None" if |\n' + '| | undefined. ' + 'Not inherited by subclasses. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| type.__annotations__ | A dictionary ' + 'containing *variable annotations* |\n' + '| | collected ' + 'during class body execution. For best |\n' + '| | practices on ' + 'working with "__annotations__", |\n' + '| | please see ' + 'Annotations Best Practices. Caution: |\n' + '| | Accessing ' + 'the "__annotations__" attribute of a |\n' + '| | class object ' + 'directly may yield incorrect results |\n' + '| | in the ' + 'presence of metaclasses. In addition, the |\n' + '| | attribute ' + 'may not exist for some classes. Use |\n' + '| | ' + '"inspect.get_annotations()" to retrieve class |\n' + '| | annotations ' + 'safely. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| type.__type_params__ | A "tuple" ' + 'containing the type parameters of a |\n' + '| | generic ' + 'class. Added in version 3.12. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| type.__static_attributes__ | A "tuple" ' + 'containing names of attributes of this |\n' + '| | class which ' + 'are assigned through "self.X" from any |\n' + '| | function in ' + 'its body. Added in version 3.13. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| type.__firstlineno__ | The line ' + 'number of the first line of the class |\n' + '| | definition, ' + 'including decorators. Setting the |\n' + '| | "__module__" ' + 'attribute removes the |\n' + '| | ' + '"__firstlineno__" item from the type’s dictionary. |\n' + '| | Added in ' + 'version 3.13. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| type.__mro__ | The "tuple" ' + 'of classes that are considered when |\n' + '| | looking for ' + 'base classes during method resolution. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' '\n' - ' "__module__"\n' - ' The name of the module in which the class was defined.\n' '\n' - ' "__dict__"\n' - ' The dictionary containing the class’s namespace.\n' + 'Special methods\n' + '---------------\n' '\n' - ' "__bases__"\n' - ' A tuple containing the base classes, in the order of their\n' - ' occurrence in the base class list.\n' + 'In addition to the special attributes described above, all Python\n' + 'classes also have the following two methods available:\n' '\n' - ' "__doc__"\n' - ' The class’s documentation string, or "None" if undefined.\n' + 'type.mro()\n' '\n' - ' "__annotations__"\n' - ' A dictionary containing *variable annotations* collected ' - 'during\n' - ' class body execution. For best practices on working with\n' - ' "__annotations__", please see Annotations Best Practices.\n' + ' This method can be overridden by a metaclass to customize the\n' + ' method resolution order for its instances. It is called at ' + 'class\n' + ' instantiation, and its result is stored in "__mro__".\n' '\n' - ' "__type_params__"\n' - ' A tuple containing the type parameters of a generic class.\n' + 'type.__subclasses__()\n' '\n' - ' "__static_attributes__"\n' - ' A tuple containing names of attributes of this class which ' - 'are\n' - ' accessed through "self.X" from any function in its body.\n' + ' Each class keeps a list of weak references to its immediate\n' + ' subclasses. This method returns a list of all those references\n' + ' still alive. The list is in definition order. Example:\n' '\n' - ' "__firstlineno__"\n' - ' The line number of the first line of the class definition,\n' - ' including decorators.\n' + ' >>> class A: pass\n' + ' >>> class B(A): pass\n' + ' >>> A.__subclasses__()\n' + " []\n" '\n' '\n' 'Class instances\n' @@ -14595,8 +14715,19 @@ 'they have methods with certain special names. See section Special\n' 'method names.\n' '\n' - 'Special attributes: "__dict__" is the attribute dictionary;\n' - '"__class__" is the instance’s class.\n' + '\n' + 'Special attributes\n' + '------------------\n' + '\n' + 'object.__class__\n' + '\n' + ' The class to which a class instance belongs.\n' + '\n' + 'object.__dict__\n' + '\n' + ' A dictionary or other mapping object used to store an object’s\n' + ' (writable) attributes. Not all instances have a "__dict__"\n' + ' attribute; see the section on __slots__ for more details.\n' '\n' '\n' 'I/O objects (also known as file objects)\n' @@ -15333,7 +15464,7 @@ '\n' ' Return a shallow copy of the dictionary.\n' '\n' - ' classmethod fromkeys(iterable, value=None)\n' + ' classmethod fromkeys(iterable, value=None, /)\n' '\n' ' Create a new dictionary with keys from *iterable* and ' 'values set\n' @@ -16088,8 +16219,8 @@ '| | also removes it from ' '*s* | |\n' '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.remove(x)" | remove the first item from ' - '*s* | (3) |\n' + '| "s.remove(x)" | removes the first item from ' + '*s* | (3) |\n' '| | where "s[i]" is equal to ' '*x* | |\n' '+--------------------------------+----------------------------------+-----------------------+\n' @@ -16460,7 +16591,7 @@ '\n' ' * The linspace recipe shows how to implement a lazy version of ' 'range\n' - ' suitable for floating point applications.\n', + ' suitable for floating-point applications.\n', 'typesseq-mutable': 'Mutable Sequence Types\n' '**********************\n' '\n' @@ -16553,8 +16684,8 @@ '| | also removes it from ' '*s* | |\n' '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.remove(x)" | remove the first item ' - 'from *s* | (3) |\n' + '| "s.remove(x)" | removes the first ' + 'item from *s* | (3) |\n' '| | where "s[i]" is equal ' 'to *x* | |\n' '+--------------------------------+----------------------------------+-----------------------+\n' diff --git a/Lib/random.py b/Lib/random.py index bcc11c7cd3c..8b9a270c429 100644 --- a/Lib/random.py +++ b/Lib/random.py @@ -1013,7 +1013,7 @@ def _parse_args(arg_list: list[str] | None): help="print a random integer between 1 and N inclusive") group.add_argument( "-f", "--float", type=float, metavar="N", - help="print a random floating point number between 1 and N inclusive") + help="print a random floating-point number between 0 and N inclusive") group.add_argument( "--test", type=int, const=10_000, nargs="?", help=argparse.SUPPRESS) @@ -1038,7 +1038,7 @@ def main(arg_list: list[str] | None = None) -> int | str: return randint(1, args.integer) if args.float is not None: - return uniform(1, args.float) + return uniform(0, args.float) if args.test: _test(args.test) @@ -1055,7 +1055,7 @@ def main(arg_list: list[str] | None = None) -> int | str: try: # Is it a float? val = float(val) - return uniform(1, val) + return uniform(0, val) except ValueError: # Split in case of space-separated string: "a b c" return choice(val.split()) diff --git a/Lib/re/_casefix.py b/Lib/re/_casefix.py index 06507d08bee..fed2d84fc01 100644 --- a/Lib/re/_casefix.py +++ b/Lib/re/_casefix.py @@ -1,4 +1,4 @@ -# Auto-generated by Tools/scripts/generate_re_casefix.py. +# Auto-generated by Tools/build/generate_re_casefix.py. # Maps the code of lowercased character to codes of different lowercased # characters which have the same uppercase. diff --git a/Lib/sched.py b/Lib/sched.py index 14613cf2987..fb20639d459 100644 --- a/Lib/sched.py +++ b/Lib/sched.py @@ -11,7 +11,7 @@ implement simulated time by writing your own functions. This can also be used to integrate scheduling with STDWIN events; the delay function is allowed to modify the queue. Time can be expressed as -integers or floating point numbers, as long as it is consistent. +integers or floating-point numbers, as long as it is consistent. Events are specified by tuples (time, priority, action, argument, kwargs). As in UNIX, lower priority numbers mean higher priority; in this diff --git a/Lib/site.py b/Lib/site.py index 7eace190f5a..0a0dc47b174 100644 --- a/Lib/site.py +++ b/Lib/site.py @@ -312,6 +312,10 @@ def joinuser(*args): # Same to sysconfig.get_path('purelib', os.name+'_user') def _get_path(userbase): version = sys.version_info + if hasattr(sys, 'abiflags') and 't' in sys.abiflags: + abi_thread = 't' + else: + abi_thread = '' implementation = _get_implementation() implementation_lower = implementation.lower() @@ -322,7 +326,7 @@ def _get_path(userbase): if sys.platform == 'darwin' and sys._framework: return f'{userbase}/lib/{implementation_lower}/site-packages' - return f'{userbase}/lib/python{version[0]}.{version[1]}/site-packages' + return f'{userbase}/lib/python{version[0]}.{version[1]}{abi_thread}/site-packages' def getuserbase(): @@ -390,6 +394,10 @@ def getsitepackages(prefixes=None): implementation = _get_implementation().lower() ver = sys.version_info + if hasattr(sys, 'abiflags') and 't' in sys.abiflags: + abi_thread = 't' + else: + abi_thread = '' if os.sep == '/': libdirs = [sys.platlibdir] if sys.platlibdir != "lib": @@ -397,7 +405,7 @@ def getsitepackages(prefixes=None): for libdir in libdirs: path = os.path.join(prefix, libdir, - f"{implementation}{ver[0]}.{ver[1]}", + f"{implementation}{ver[0]}.{ver[1]}{abi_thread}", "site-packages") sitepackages.append(path) else: @@ -509,6 +517,7 @@ def register_readline(): pass if readline.get_current_history_length() == 0: + from _pyrepl.main import CAN_USE_PYREPL # If no history was loaded, default to .python_history, # or PYTHON_HISTORY. # The guard is necessary to avoid doubling history size at @@ -516,26 +525,18 @@ def register_readline(): # through a PYTHONSTARTUP hook, see: # http://bugs.python.org/issue5845#msg198636 history = gethistoryfile() + if os.getenv("PYTHON_BASIC_REPL") or not CAN_USE_PYREPL: + readline_module = readline + else: + readline_module = _pyrepl.readline try: - if os.getenv("PYTHON_BASIC_REPL"): - readline.read_history_file(history) - else: - _pyrepl.readline.read_history_file(history) + readline_module.read_history_file(history) except (OSError,* _pyrepl.unix_console._error): pass def write_history(): try: - # _pyrepl.__main__ is executed as the __main__ module - from __main__ import CAN_USE_PYREPL - except ImportError: - CAN_USE_PYREPL = False - - try: - if os.getenv("PYTHON_BASIC_REPL") or not CAN_USE_PYREPL: - readline.write_history_file(history) - else: - _pyrepl.readline.write_history_file(history) + readline_module.write_history_file(history) except (FileNotFoundError, PermissionError): # home directory does not exist or is not writable # https://bugs.python.org/issue19891 diff --git a/Lib/socket.py b/Lib/socket.py index 524ce1361b9..9207101dcf9 100644 --- a/Lib/socket.py +++ b/Lib/socket.py @@ -592,16 +592,65 @@ def fromshare(info): return socket(0, 0, 0, info) __all__.append("fromshare") -if hasattr(_socket, "socketpair"): +# Origin: https://gist.github.com/4325783, by Geert Jansen. Public domain. +# This is used if _socket doesn't natively provide socketpair. It's +# always defined so that it can be patched in for testing purposes. +def _fallback_socketpair(family=AF_INET, type=SOCK_STREAM, proto=0): + if family == AF_INET: + host = _LOCALHOST + elif family == AF_INET6: + host = _LOCALHOST_V6 + else: + raise ValueError("Only AF_INET and AF_INET6 socket address families " + "are supported") + if type != SOCK_STREAM: + raise ValueError("Only SOCK_STREAM socket type is supported") + if proto != 0: + raise ValueError("Only protocol zero is supported") + + # We create a connected TCP socket. Note the trick with + # setblocking(False) that prevents us from having to create a thread. + lsock = socket(family, type, proto) + try: + lsock.bind((host, 0)) + lsock.listen() + # On IPv6, ignore flow_info and scope_id + addr, port = lsock.getsockname()[:2] + csock = socket(family, type, proto) + try: + csock.setblocking(False) + try: + csock.connect((addr, port)) + except (BlockingIOError, InterruptedError): + pass + csock.setblocking(True) + ssock, _ = lsock.accept() + except: + csock.close() + raise + finally: + lsock.close() - def socketpair(family=None, type=SOCK_STREAM, proto=0): - """socketpair([family[, type[, proto]]]) -> (socket object, socket object) + # Authenticating avoids using a connection from something else + # able to connect to {host}:{port} instead of us. + # We expect only AF_INET and AF_INET6 families. + try: + if ( + ssock.getsockname() != csock.getpeername() + or csock.getsockname() != ssock.getpeername() + ): + raise ConnectionError("Unexpected peer connection") + except: + # getsockname() and getpeername() can fail + # if either socket isn't connected. + ssock.close() + csock.close() + raise - Create a pair of socket objects from the sockets returned by the platform - socketpair() function. - The arguments are the same as for socket() except the default family is - AF_UNIX if defined on the platform; otherwise, the default is AF_INET. - """ + return (ssock, csock) + +if hasattr(_socket, "socketpair"): + def socketpair(family=None, type=SOCK_STREAM, proto=0): if family is None: try: family = AF_UNIX @@ -613,44 +662,7 @@ def socketpair(family=None, type=SOCK_STREAM, proto=0): return a, b else: - - # Origin: https://gist.github.com/4325783, by Geert Jansen. Public domain. - def socketpair(family=AF_INET, type=SOCK_STREAM, proto=0): - if family == AF_INET: - host = _LOCALHOST - elif family == AF_INET6: - host = _LOCALHOST_V6 - else: - raise ValueError("Only AF_INET and AF_INET6 socket address families " - "are supported") - if type != SOCK_STREAM: - raise ValueError("Only SOCK_STREAM socket type is supported") - if proto != 0: - raise ValueError("Only protocol zero is supported") - - # We create a connected TCP socket. Note the trick with - # setblocking(False) that prevents us from having to create a thread. - lsock = socket(family, type, proto) - try: - lsock.bind((host, 0)) - lsock.listen() - # On IPv6, ignore flow_info and scope_id - addr, port = lsock.getsockname()[:2] - csock = socket(family, type, proto) - try: - csock.setblocking(False) - try: - csock.connect((addr, port)) - except (BlockingIOError, InterruptedError): - pass - csock.setblocking(True) - ssock, _ = lsock.accept() - except: - csock.close() - raise - finally: - lsock.close() - return (ssock, csock) + socketpair = _fallback_socketpair __all__.append("socketpair") socketpair.__doc__ = """socketpair([family[, type[, proto]]]) -> (socket object, socket object) diff --git a/Lib/ssl.py b/Lib/ssl.py index cc685c2cc40..c8703b046cf 100644 --- a/Lib/ssl.py +++ b/Lib/ssl.py @@ -513,18 +513,17 @@ def set_alpn_protocols(self, alpn_protocols): self._set_alpn_protocols(protos) def _load_windows_store_certs(self, storename, purpose): - certs = bytearray() try: for cert, encoding, trust in enum_certificates(storename): # CA certs are never PKCS#7 encoded if encoding == "x509_asn": if trust is True or purpose.oid in trust: - certs.extend(cert) + try: + self.load_verify_locations(cadata=cert) + except SSLError as exc: + warnings.warn(f"Bad certificate in Windows certificate store: {exc!s}") except PermissionError: warnings.warn("unable to enumerate Windows certificate store") - if certs: - self.load_verify_locations(cadata=certs) - return certs def load_default_certs(self, purpose=Purpose.SERVER_AUTH): if not isinstance(purpose, _ASN1Object): @@ -1165,11 +1164,21 @@ def getpeercert(self, binary_form=False): @_sslcopydoc def get_verified_chain(self): - return self._sslobj.get_verified_chain() + chain = self._sslobj.get_verified_chain() + + if chain is None: + return [] + + return [cert.public_bytes(_ssl.ENCODING_DER) for cert in chain] @_sslcopydoc def get_unverified_chain(self): - return self._sslobj.get_unverified_chain() + chain = self._sslobj.get_unverified_chain() + + if chain is None: + return [] + + return [cert.public_bytes(_ssl.ENCODING_DER) for cert in chain] @_sslcopydoc def selected_npn_protocol(self): diff --git a/Lib/statistics.py b/Lib/statistics.py index c2f4fe8e054..ad4a94219cf 100644 --- a/Lib/statistics.py +++ b/Lib/statistics.py @@ -11,7 +11,7 @@ Function Description ================== ================================================== mean Arithmetic mean (average) of data. -fmean Fast, floating point arithmetic mean. +fmean Fast, floating-point arithmetic mean. geometric_mean Geometric mean of data. harmonic_mean Harmonic mean of data. median Median (middle value) of data. diff --git a/Lib/symtable.py b/Lib/symtable.py index 73e9fb318ad..672ec0ce1ff 100644 --- a/Lib/symtable.py +++ b/Lib/symtable.py @@ -238,6 +238,11 @@ def is_local_symbol(ident): if is_local_symbol(st.name): match st.type: case _symtable.TYPE_FUNCTION: + # generators are of type TYPE_FUNCTION with a ".0" + # parameter as a first parameter (which makes them + # distinguishable from a function named 'genexpr') + if st.name == 'genexpr' and '.0' in st.varnames: + continue d[st.name] = 1 case _symtable.TYPE_TYPE_PARAMETERS: # Get the function-def block in the annotation @@ -245,7 +250,14 @@ def is_local_symbol(ident): scope_name = st.name for c in st.children: if c.name == scope_name and c.type == _symtable.TYPE_FUNCTION: - d[st.name] = 1 + # A generic generator of type TYPE_FUNCTION + # cannot be a direct child of 'st' (but it + # can be a descendant), e.g.: + # + # class A: + # type genexpr[genexpr] = (x for x in []) + assert scope_name != 'genexpr' or '.0' not in c.varnames + d[scope_name] = 1 break self.__methods = tuple(d) return self.__methods diff --git a/Lib/sysconfig/__init__.py b/Lib/sysconfig/__init__.py index 98a14e5d3a3..80aef344711 100644 --- a/Lib/sysconfig/__init__.py +++ b/Lib/sysconfig/__init__.py @@ -27,10 +27,10 @@ _INSTALL_SCHEMES = { 'posix_prefix': { - 'stdlib': '{installed_base}/{platlibdir}/{implementation_lower}{py_version_short}', - 'platstdlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}', - 'purelib': '{base}/lib/{implementation_lower}{py_version_short}/site-packages', - 'platlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}/site-packages', + 'stdlib': '{installed_base}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}', + 'platstdlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}', + 'purelib': '{base}/lib/{implementation_lower}{py_version_short}{abi_thread}/site-packages', + 'platlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}/site-packages', 'include': '{installed_base}/include/{implementation_lower}{py_version_short}{abiflags}', 'platinclude': @@ -77,10 +77,10 @@ # Downstream distributors who patch posix_prefix/nt scheme are encouraged to # leave the following schemes unchanged 'posix_venv': { - 'stdlib': '{installed_base}/{platlibdir}/{implementation_lower}{py_version_short}', - 'platstdlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}', - 'purelib': '{base}/lib/{implementation_lower}{py_version_short}/site-packages', - 'platlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}/site-packages', + 'stdlib': '{installed_base}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}', + 'platstdlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}', + 'purelib': '{base}/lib/{implementation_lower}{py_version_short}{abi_thread}/site-packages', + 'platlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}/site-packages', 'include': '{installed_base}/include/{implementation_lower}{py_version_short}{abiflags}', 'platinclude': @@ -148,11 +148,11 @@ def joinuser(*args): 'data': '{userbase}', }, 'posix_user': { - 'stdlib': '{userbase}/{platlibdir}/{implementation_lower}{py_version_short}', - 'platstdlib': '{userbase}/{platlibdir}/{implementation_lower}{py_version_short}', - 'purelib': '{userbase}/lib/{implementation_lower}{py_version_short}/site-packages', - 'platlib': '{userbase}/lib/{implementation_lower}{py_version_short}/site-packages', - 'include': '{userbase}/include/{implementation_lower}{py_version_short}', + 'stdlib': '{userbase}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}', + 'platstdlib': '{userbase}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}', + 'purelib': '{userbase}/lib/{implementation_lower}{py_version_short}{abi_thread}/site-packages', + 'platlib': '{userbase}/lib/{implementation_lower}{py_version_short}{abi_thread}/site-packages', + 'include': '{userbase}/include/{implementation_lower}{py_version_short}{abi_thread}', 'scripts': '{userbase}/bin', 'data': '{userbase}', }, @@ -487,6 +487,9 @@ def _init_config_vars(): # the init-function. _CONFIG_VARS['userbase'] = _getuserbase() + # e.g., 't' for free-threaded or '' for default build + _CONFIG_VARS['abi_thread'] = 't' if _CONFIG_VARS.get('Py_GIL_DISABLED') else '' + # Always convert srcdir to an absolute path srcdir = _CONFIG_VARS.get('srcdir', _PROJECT_BASE) if os.name == 'posix': @@ -639,7 +642,7 @@ def get_platform(): release = m.group() elif osname[:6] == "darwin": if sys.platform == "ios": - release = get_config_vars().get("IPHONEOS_DEPLOYMENT_TARGET", "12.0") + release = get_config_vars().get("IPHONEOS_DEPLOYMENT_TARGET", "13.0") osname = sys.platform machine = sys.implementation._multiarch else: @@ -655,6 +658,10 @@ def get_python_version(): return _PY_VERSION_SHORT +def _get_python_version_abi(): + return _PY_VERSION_SHORT + get_config_var("abi_thread") + + def expand_makefile_vars(s, vars): """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in 'string' according to 'vars' (a dictionary mapping variable names to diff --git a/Lib/tarfile.py b/Lib/tarfile.py index 5fc6183ffcf..ea036193411 100755 --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -846,6 +846,9 @@ def data_filter(member, dest_path): # Sentinel for replace() defaults, meaning "don't change the attribute" _KEEP = object() +# Header length is digits followed by a space. +_header_length_prefix_re = re.compile(br"([0-9]{1,20}) ") + class TarInfo(object): """Informational class which holds the details about an archive member given by a tar header block. @@ -1433,37 +1436,59 @@ def _proc_pax(self, tarfile): else: pax_headers = tarfile.pax_headers.copy() - # Check if the pax header contains a hdrcharset field. This tells us - # the encoding of the path, linkpath, uname and gname fields. Normally, - # these fields are UTF-8 encoded but since POSIX.1-2008 tar - # implementations are allowed to store them as raw binary strings if - # the translation to UTF-8 fails. - match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf) - if match is not None: - pax_headers["hdrcharset"] = match.group(1).decode("utf-8") - - # For the time being, we don't care about anything other than "BINARY". - # The only other value that is currently allowed by the standard is - # "ISO-IR 10646 2000 UTF-8" in other words UTF-8. - hdrcharset = pax_headers.get("hdrcharset") - if hdrcharset == "BINARY": - encoding = tarfile.encoding - else: - encoding = "utf-8" - # Parse pax header information. A record looks like that: # "%d %s=%s\n" % (length, keyword, value). length is the size # of the complete record including the length field itself and - # the newline. keyword and value are both UTF-8 encoded strings. - regex = re.compile(br"(\d+) ([^=]+)=") + # the newline. pos = 0 - while match := regex.match(buf, pos): - length, keyword = match.groups() - length = int(length) - if length == 0: + encoding = None + raw_headers = [] + while len(buf) > pos and buf[pos] != 0x00: + if not (match := _header_length_prefix_re.match(buf, pos)): + raise InvalidHeaderError("invalid header") + try: + length = int(match.group(1)) + except ValueError: + raise InvalidHeaderError("invalid header") + # Headers must be at least 5 bytes, shortest being '5 x=\n'. + # Value is allowed to be empty. + if length < 5: + raise InvalidHeaderError("invalid header") + if pos + length > len(buf): + raise InvalidHeaderError("invalid header") + + header_value_end_offset = match.start(1) + length - 1 # Last byte of the header + keyword_and_value = buf[match.end(1) + 1:header_value_end_offset] + raw_keyword, equals, raw_value = keyword_and_value.partition(b"=") + + # Check the framing of the header. The last character must be '\n' (0x0A) + if not raw_keyword or equals != b"=" or buf[header_value_end_offset] != 0x0A: raise InvalidHeaderError("invalid header") - value = buf[match.end(2) + 1:match.start(1) + length - 1] + raw_headers.append((length, raw_keyword, raw_value)) + + # Check if the pax header contains a hdrcharset field. This tells us + # the encoding of the path, linkpath, uname and gname fields. Normally, + # these fields are UTF-8 encoded but since POSIX.1-2008 tar + # implementations are allowed to store them as raw binary strings if + # the translation to UTF-8 fails. For the time being, we don't care about + # anything other than "BINARY". The only other value that is currently + # allowed by the standard is "ISO-IR 10646 2000 UTF-8" in other words UTF-8. + # Note that we only follow the initial 'hdrcharset' setting to preserve + # the initial behavior of the 'tarfile' module. + if raw_keyword == b"hdrcharset" and encoding is None: + if raw_value == b"BINARY": + encoding = tarfile.encoding + else: # This branch ensures only the first 'hdrcharset' header is used. + encoding = "utf-8" + pos += length + + # If no explicit hdrcharset is set, we use UTF-8 as a default. + if encoding is None: + encoding = "utf-8" + + # After parsing the raw headers we can decode them to text. + for length, raw_keyword, raw_value in raw_headers: # Normally, we could just use "utf-8" as the encoding and "strict" # as the error handler, but we better not take the risk. For # example, GNU tar <= 1.23 is known to store filenames it cannot @@ -1471,17 +1496,16 @@ def _proc_pax(self, tarfile): # hdrcharset=BINARY header). # We first try the strict standard encoding, and if that fails we # fall back on the user's encoding and error handler. - keyword = self._decode_pax_field(keyword, "utf-8", "utf-8", + keyword = self._decode_pax_field(raw_keyword, "utf-8", "utf-8", tarfile.errors) if keyword in PAX_NAME_FIELDS: - value = self._decode_pax_field(value, encoding, tarfile.encoding, + value = self._decode_pax_field(raw_value, encoding, tarfile.encoding, tarfile.errors) else: - value = self._decode_pax_field(value, "utf-8", "utf-8", + value = self._decode_pax_field(raw_value, "utf-8", "utf-8", tarfile.errors) pax_headers[keyword] = value - pos += length # Fetch the next header. try: @@ -1496,7 +1520,7 @@ def _proc_pax(self, tarfile): elif "GNU.sparse.size" in pax_headers: # GNU extended sparse format version 0.0. - self._proc_gnusparse_00(next, pax_headers, buf) + self._proc_gnusparse_00(next, raw_headers) elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0": # GNU extended sparse format version 1.0. @@ -1518,15 +1542,24 @@ def _proc_pax(self, tarfile): return next - def _proc_gnusparse_00(self, next, pax_headers, buf): + def _proc_gnusparse_00(self, next, raw_headers): """Process a GNU tar extended sparse header, version 0.0. """ offsets = [] - for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf): - offsets.append(int(match.group(1))) numbytes = [] - for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf): - numbytes.append(int(match.group(1))) + for _, keyword, value in raw_headers: + if keyword == b"GNU.sparse.offset": + try: + offsets.append(int(value.decode())) + except ValueError: + raise InvalidHeaderError("invalid header") + + elif keyword == b"GNU.sparse.numbytes": + try: + numbytes.append(int(value.decode())) + except ValueError: + raise InvalidHeaderError("invalid header") + next.sparse = list(zip(offsets, numbytes)) def _proc_gnusparse_01(self, next, pax_headers): diff --git a/Lib/test/_test_eintr.py b/Lib/test/_test_eintr.py index 15586f15dfa..493932d6c6d 100644 --- a/Lib/test/_test_eintr.py +++ b/Lib/test/_test_eintr.py @@ -18,6 +18,7 @@ import socket import subprocess import sys +import textwrap import time import unittest @@ -492,29 +493,31 @@ def test_devpoll(self): self.check_elapsed_time(dt) -class FNTLEINTRTest(EINTRBaseTest): +class FCNTLEINTRTest(EINTRBaseTest): def _lock(self, lock_func, lock_name): self.addCleanup(os_helper.unlink, os_helper.TESTFN) - code = '\n'.join(( - "import fcntl, time", - "with open('%s', 'wb') as f:" % os_helper.TESTFN, - " fcntl.%s(f, fcntl.LOCK_EX)" % lock_name, - " time.sleep(%s)" % self.sleep_time)) - start_time = time.monotonic() - proc = self.subprocess(code) + rd1, wr1 = os.pipe() + rd2, wr2 = os.pipe() + for fd in (rd1, wr1, rd2, wr2): + self.addCleanup(os.close, fd) + code = textwrap.dedent(f""" + import fcntl, os, time + with open('{os_helper.TESTFN}', 'wb') as f: + fcntl.{lock_name}(f, fcntl.LOCK_EX) + os.write({wr1}, b"ok") + _ = os.read({rd2}, 2) # wait for parent process + time.sleep({self.sleep_time}) + """) + proc = self.subprocess(code, pass_fds=[wr1, rd2]) with kill_on_error(proc): with open(os_helper.TESTFN, 'wb') as f: # synchronize the subprocess + ok = os.read(rd1, 2) + self.assertEqual(ok, b"ok") + + # notify the child that the parent is ready start_time = time.monotonic() - for _ in support.sleeping_retry(support.LONG_TIMEOUT, error=False): - try: - lock_func(f, fcntl.LOCK_EX | fcntl.LOCK_NB) - lock_func(f, fcntl.LOCK_UN) - except BlockingIOError: - break - else: - dt = time.monotonic() - start_time - raise Exception("failed to sync child in %.1f sec" % dt) + os.write(wr2, b"go") # the child locked the file just a moment ago for 'sleep_time' seconds # that means that the lock below will block for 'sleep_time' minus some diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index f126b6745dc..5dae370936b 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -1332,6 +1332,23 @@ def _on_queue_feeder_error(e, obj): self.assertTrue(not_serializable_obj.reduce_was_called) self.assertTrue(not_serializable_obj.on_queue_feeder_error_was_called) + def test_closed_queue_empty_exceptions(self): + # Assert that checking the emptiness of an unused closed queue + # does not raise an OSError. The rationale is that q.close() is + # a no-op upon construction and becomes effective once the queue + # has been used (e.g., by calling q.put()). + for q in multiprocessing.Queue(), multiprocessing.JoinableQueue(): + q.close() # this is a no-op since the feeder thread is None + q.join_thread() # this is also a no-op + self.assertTrue(q.empty()) + + for q in multiprocessing.Queue(), multiprocessing.JoinableQueue(): + q.put('foo') # make sure that the queue is 'used' + q.close() # close the feeder thread + q.join_thread() # make sure to join the feeder thread + with self.assertRaisesRegex(OSError, 'is closed'): + q.empty() + def test_closed_queue_put_get_exceptions(self): for q in multiprocessing.Queue(), multiprocessing.JoinableQueue(): q.close() @@ -5815,6 +5832,15 @@ def _test_empty(cls, queue, child_can_start, parent_can_continue): finally: parent_can_continue.set() + def test_empty_exceptions(self): + # Assert that checking emptiness of a closed queue raises + # an OSError, independently of whether the queue was used + # or not. This differs from Queue and JoinableQueue. + q = multiprocessing.SimpleQueue() + q.close() # close the pipe + with self.assertRaisesRegex(OSError, 'is closed'): + q.empty() + def test_empty(self): queue = multiprocessing.SimpleQueue() child_can_start = multiprocessing.Event() diff --git a/Lib/test/certdata/cert3.pem b/Lib/test/certdata/cert3.pem new file mode 100644 index 00000000000..034bc43ff19 --- /dev/null +++ b/Lib/test/certdata/cert3.pem @@ -0,0 +1,34 @@ +-----BEGIN CERTIFICATE----- +MIIF8TCCBFmgAwIBAgIJAMstgJlaaVJcMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNV +BAYTAlhZMSYwJAYDVQQKDB1QeXRob24gU29mdHdhcmUgRm91bmRhdGlvbiBDQTEW +MBQGA1UEAwwNb3VyLWNhLXNlcnZlcjAeFw0xODA4MjkxNDIzMTZaFw0zNzEwMjgx +NDIzMTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEj +MCEGA1UECgwaUHl0aG9uIFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMMCWxv +Y2FsaG9zdDCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKAqKHEL7aDt +3swl8hQF8VaK4zDGDRaF3E/IZTMwCN7FsQ4ejSiOe3E90f0phHCIpEpv2OebNenY +IpOGoFgkh62r/cthmnhu8Mn+FUIv17iOq7WX7B30OSqEpnr1voLX93XYkAq8LlMh +P79vsSCVhTwow3HZY7krEgl5WlfryOfj1i1TODSFPRCJePh66BsOTUvV/33GC+Qd +pVZVDGLowU1Ycmr/FdRvwT+F39Dehp03UFcxaX0/joPhH5gYpBB1kWTAQmxuqKMW +9ZZs6hrPtMXF/yfSrrXrzTdpct9paKR8RcufOcS8qju/ISK+1P/LXg2b5KJHedLo +TTIO3yCZ4d1odyuZBP7JDrI05gMJx95gz6sG685Qc+52MzLSTwr/Qg+MOjQoBy0o +8fRRVvIMEwoN0ZDb4uFEUuwZceUP1vTk/GGpNQt7ct4ropn6K4Zta3BUtovlLjZa +IIBhc1KETUqjRDvC6ACKmlcJ/5pY/dbH1lOux+IMFsh+djmaV90b3QIDAQABo4IB +wDCCAbwwFAYDVR0RBA0wC4IJbG9jYWxob3N0MA4GA1UdDwEB/wQEAwIFoDAdBgNV +HSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4E +FgQUP7HpT6C+MGY+ChjID0caTzRqD0IwfQYDVR0jBHYwdIAU8+yUjvKOMMSOaMK/ +jmoZwMGfdmWhUaRPME0xCzAJBgNVBAYTAlhZMSYwJAYDVQQKDB1QeXRob24gU29m +dHdhcmUgRm91bmRhdGlvbiBDQTEWMBQGA1UEAwwNb3VyLWNhLXNlcnZlcoIJAMst +gJlaaVJbMIGDBggrBgEFBQcBAQR3MHUwPAYIKwYBBQUHMAKGMGh0dHA6Ly90ZXN0 +Y2EucHl0aG9udGVzdC5uZXQvdGVzdGNhL3B5Y2FjZXJ0LmNlcjA1BggrBgEFBQcw +AYYpaHR0cDovL3Rlc3RjYS5weXRob250ZXN0Lm5ldC90ZXN0Y2Evb2NzcC8wQwYD +VR0fBDwwOjA4oDagNIYyaHR0cDovL3Rlc3RjYS5weXRob250ZXN0Lm5ldC90ZXN0 +Y2EvcmV2b2NhdGlvbi5jcmwwDQYJKoZIhvcNAQELBQADggGBAMo0usXQzycxMtYN +JzC42xfftzmnu7E7hsQx/fur22MazJCruU6rNEkMXow+cKOnay+nmiV7AVoYlkh2 ++DZ4dPq8fWh/5cqmnXvccr2jJVEXaOjp1wKGLH0WfLXcRLIK4/fJM6NRNoO81HDN +hJGfBrot0gUKZcPZVQmouAlpu5OGwrfCkHR8v/BdvA5jE4zr+g/x+uUScE0M64wu +okJCAAQP/PkfQZxjePBmk7KPLuiTHFDLLX+2uldvUmLXOQsJgqumU03MBT4Z8NTA +zqmtEM65ceSP8lo8Zbrcy+AEkCulFaZ92tyjtbe8oN4wTmTLFw06oFLSZzuiOgDV +OaphdVKf/pvA6KBpr6izox0KQFIE5z3AAJZfKzMGDDD20xhy7jjQZNMAhjfsT+k4 +SeYB/6KafNxq08uoulj7w4Z4R/EGpkXnU96ZHYHmvGN0RnxwI1cpYHCazG8AjsK/ +anN9brBi5twTGrn+D8LRBqF5Yn+2MKkD0EdXJdtIENHP+32sPQ== +-----END CERTIFICATE----- \ No newline at end of file diff --git a/Lib/test/clinic.test.c b/Lib/test/clinic.test.c index 9d8f1ac3af7..ff1c345d2ba 100644 --- a/Lib/test/clinic.test.c +++ b/Lib/test/clinic.test.c @@ -3935,8 +3935,8 @@ test_vararg_and_posonly a: object - *args: object / + *args: object [clinic start generated code]*/ @@ -3978,7 +3978,7 @@ test_vararg_and_posonly(PyObject *module, PyObject *const *args, Py_ssize_t narg static PyObject * test_vararg_and_posonly_impl(PyObject *module, PyObject *a, PyObject *args) -/*[clinic end generated code: output=79b75dc07decc8d6 input=08dc2bf7afbf1613]*/ +/*[clinic end generated code: output=79b75dc07decc8d6 input=9cfa748bbff09877]*/ /*[clinic input] test_vararg @@ -4721,7 +4721,7 @@ Test_an_metho_arg_named_arg_impl(TestObj *self, int arg) /*[clinic input] Test.__init__ *args: object - / + Varargs init method. For example, nargs is translated to PyTuple_GET_SIZE. [clinic start generated code]*/ @@ -4759,14 +4759,14 @@ Test___init__(PyObject *self, PyObject *args, PyObject *kwargs) static int Test___init___impl(TestObj *self, PyObject *args) -/*[clinic end generated code: output=0ed1009fe0dcf98d input=96c3ddc0cd38fc0c]*/ +/*[clinic end generated code: output=0ed1009fe0dcf98d input=2a8bd0033c9ac772]*/ /*[clinic input] @classmethod Test.__new__ *args: object - / + Varargs new method. For example, nargs is translated to PyTuple_GET_SIZE. [clinic start generated code]*/ @@ -4803,7 +4803,7 @@ Test(PyTypeObject *type, PyObject *args, PyObject *kwargs) static PyObject * Test_impl(PyTypeObject *type, PyObject *args) -/*[clinic end generated code: output=8b219f6633e2a2e9 input=26a672e2e9750120]*/ +/*[clinic end generated code: output=8b219f6633e2a2e9 input=70ad829df3dd9b84]*/ /*[clinic input] diff --git a/Lib/test/crashers/README b/Lib/test/crashers/README index d844385113e..7111946b93b 100644 --- a/Lib/test/crashers/README +++ b/Lib/test/crashers/README @@ -15,7 +15,3 @@ what the variables are. Once the crash is fixed, the test case should be moved into an appropriate test (even if it was originally from the test suite). This ensures the regression doesn't happen again. And if it does, it should be easier to track down. - -Also see Lib/test_crashers.py which exercises the crashers in this directory. -In particular, make sure to add any new infinite loop crashers to the black -list so it doesn't try to run them. diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py index 77472d84e60..d0ee974dbcd 100644 --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -52,25 +52,6 @@ pass # -# This is copied from test_import/__init__.py. -def no_rerun(reason): - """Skip rerunning for a particular test. - - WARNING: Use this decorator with care; skipping rerunning makes it - impossible to find reference leaks. Provide a clear reason for skipping the - test using the 'reason' parameter. - """ - def deco(func): - _has_run = False - def wrapper(self): - nonlocal _has_run - if _has_run: - self.skipTest(reason) - func(self) - _has_run = True - return wrapper - return deco - pickle_loads = {pickle.loads, pickle._loads} pickle_choices = [(pickle, pickle, proto) @@ -6412,7 +6393,6 @@ class IranTest(ZoneInfoTest): @unittest.skipIf(_testcapi is None, 'need _testcapi module') -@no_rerun("the encapsulated datetime C API does not support reloading") class CapiTest(unittest.TestCase): def setUp(self): # Since the C API is not present in the _Pure tests, skip all tests @@ -6804,6 +6784,13 @@ def test_datetime_from_timestamp(self): self.assertEqual(dt_orig, dt_rt) def test_type_check_in_subinterp(self): + # iOS requires the use of the custom framework loader, + # not the ExtensionFileLoader. + if sys.platform == "ios": + extension_loader = "AppleFrameworkLoader" + else: + extension_loader = "ExtensionFileLoader" + script = textwrap.dedent(f""" if {_interpreters is None}: import _testcapi as module @@ -6813,7 +6800,7 @@ def test_type_check_in_subinterp(self): import importlib.util fullname = '_testcapi_datetime' origin = importlib.util.find_spec('_testcapi').origin - loader = importlib.machinery.ExtensionFileLoader(fullname, origin) + loader = importlib.machinery.{extension_loader}(fullname, origin) spec = importlib.util.spec_from_loader(fullname, loader) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) @@ -6881,6 +6868,38 @@ def pickle_fake_date(datetime_) -> Type[FakeDate]: """) script_helper.assert_python_ok('-c', script) + def test_update_type_cache(self): + # gh-120782 + script = textwrap.dedent(""" + import sys + for i in range(5): + import _datetime + assert _datetime.date.max > _datetime.date.min + assert _datetime.time.max > _datetime.time.min + assert _datetime.datetime.max > _datetime.datetime.min + assert _datetime.timedelta.max > _datetime.timedelta.min + assert _datetime.date.__dict__["min"] is _datetime.date.min + assert _datetime.date.__dict__["max"] is _datetime.date.max + assert _datetime.date.__dict__["resolution"] is _datetime.date.resolution + assert _datetime.time.__dict__["min"] is _datetime.time.min + assert _datetime.time.__dict__["max"] is _datetime.time.max + assert _datetime.time.__dict__["resolution"] is _datetime.time.resolution + assert _datetime.datetime.__dict__["min"] is _datetime.datetime.min + assert _datetime.datetime.__dict__["max"] is _datetime.datetime.max + assert _datetime.datetime.__dict__["resolution"] is _datetime.datetime.resolution + assert _datetime.timedelta.__dict__["min"] is _datetime.timedelta.min + assert _datetime.timedelta.__dict__["max"] is _datetime.timedelta.max + assert _datetime.timedelta.__dict__["resolution"] is _datetime.timedelta.resolution + assert _datetime.timezone.__dict__["min"] is _datetime.timezone.min + assert _datetime.timezone.__dict__["max"] is _datetime.timezone.max + assert _datetime.timezone.__dict__["utc"] is _datetime.timezone.utc + assert isinstance(_datetime.timezone.min, _datetime.tzinfo) + assert isinstance(_datetime.timezone.max, _datetime.tzinfo) + assert isinstance(_datetime.timezone.utc, _datetime.tzinfo) + del sys.modules['_datetime'] + """) + script_helper.assert_python_ok('-c', script) + def load_tests(loader, standard_tests, pattern): standard_tests.addTest(ZoneInfoCompleteTest()) diff --git a/Lib/test/libregrtest/cmdline.py b/Lib/test/libregrtest/cmdline.py index d4dac77b250..8bef04cba81 100644 --- a/Lib/test/libregrtest/cmdline.py +++ b/Lib/test/libregrtest/cmdline.py @@ -174,6 +174,7 @@ def __init__(self, **kwargs) -> None: self.tempdir = None self._add_python_opts = True self.xmlpath = None + self.single_process = False super().__init__(**kwargs) @@ -307,6 +308,12 @@ def _create_parser(): group.add_argument('-j', '--multiprocess', metavar='PROCESSES', dest='use_mp', type=int, help='run PROCESSES processes at once') + group.add_argument('--single-process', action='store_true', + dest='single_process', + help='always run all tests sequentially in ' + 'a single process, ignore -jN option, ' + 'and failed tests are also rerun sequentially ' + 'in the same process') group.add_argument('-T', '--coverage', action='store_true', dest='trace', help='turn on code coverage tracing using the trace ' @@ -421,9 +428,7 @@ def _parse_args(args, **kwargs): # Continuous Integration (CI): common options for fast/slow CI modes if ns.slow_ci or ns.fast_ci: # Similar to options: - # - # -j0 --randomize --fail-env-changed --fail-rerun --rerun - # --slowest --verbose3 + # -j0 --randomize --fail-env-changed --rerun --slowest --verbose3 if ns.use_mp is None: ns.use_mp = 0 ns.randomize = True @@ -435,6 +440,10 @@ def _parse_args(args, **kwargs): else: ns._add_python_opts = False + # --singleprocess overrides -jN option + if ns.single_process: + ns.use_mp = None + # When both --slow-ci and --fast-ci options are present, # --slow-ci has the priority if ns.slow_ci: diff --git a/Lib/test/libregrtest/logger.py b/Lib/test/libregrtest/logger.py index a1257069273..fa1d4d575c8 100644 --- a/Lib/test/libregrtest/logger.py +++ b/Lib/test/libregrtest/logger.py @@ -43,7 +43,10 @@ def log(self, line: str = '') -> None: def get_load_avg(self) -> float | None: if hasattr(os, 'getloadavg'): - return os.getloadavg()[0] + try: + return os.getloadavg()[0] + except OSError: + pass if self.win_load_tracker is not None: return self.win_load_tracker.getloadavg() return None diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py index 9e7a7d60880..5148d307051 100644 --- a/Lib/test/libregrtest/main.py +++ b/Lib/test/libregrtest/main.py @@ -89,12 +89,13 @@ def __init__(self, ns: Namespace, _add_python_opts: bool = False): self.cmdline_args: TestList = ns.args # Workers - if ns.use_mp is None: - num_workers = 0 # run sequentially + self.single_process: bool = ns.single_process + if self.single_process or ns.use_mp is None: + num_workers = 0 # run sequentially in a single process elif ns.use_mp <= 0: - num_workers = -1 # use the number of CPUs + num_workers = -1 # run in parallel, use the number of CPUs else: - num_workers = ns.use_mp + num_workers = ns.use_mp # run in parallel self.num_workers: int = num_workers self.worker_json: StrJSON | None = ns.worker_json @@ -236,7 +237,7 @@ def list_tests(tests: TestTuple): def _rerun_failed_tests(self, runtests: RunTests): # Configure the runner to re-run tests - if self.num_workers == 0: + if self.num_workers == 0 and not self.single_process: # Always run tests in fresh processes to have more deterministic # initial state. Don't re-run tests in parallel but limit to a # single worker process to have side effects (on the system load @@ -246,7 +247,6 @@ def _rerun_failed_tests(self, runtests: RunTests): tests, match_tests_dict = self.results.prepare_rerun() # Re-run failed tests - self.log(f"Re-running {len(tests)} failed tests in verbose mode in subprocesses") runtests = runtests.copy( tests=tests, rerun=True, @@ -256,7 +256,15 @@ def _rerun_failed_tests(self, runtests: RunTests): match_tests_dict=match_tests_dict, output_on_failure=False) self.logger.set_tests(runtests) - self._run_tests_mp(runtests, self.num_workers) + + msg = f"Re-running {len(tests)} failed tests in verbose mode" + if not self.single_process: + msg = f"{msg} in subprocesses" + self.log(msg) + self._run_tests_mp(runtests, self.num_workers) + else: + self.log(msg) + self.run_tests_sequentially(runtests) return runtests def rerun_failed_tests(self, runtests: RunTests): @@ -371,7 +379,7 @@ def run_tests_sequentially(self, runtests) -> None: tests = count(jobs, 'test') else: tests = 'tests' - msg = f"Run {tests} sequentially" + msg = f"Run {tests} sequentially in a single process" if runtests.timeout: msg += " (timeout: %s)" % format_duration(runtests.timeout) self.log(msg) @@ -599,7 +607,7 @@ def _add_cross_compile_opts(self, regrtest_opts): keep_environ = True if cross_compile and hostrunner: - if self.num_workers == 0: + if self.num_workers == 0 and not self.single_process: # For now use only two cores for cross-compiled builds; # hostrunner can be expensive. regrtest_opts.extend(['-j', '2']) diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py index 621e97514d4..ff811ee0a4a 100644 --- a/Lib/test/libregrtest/refleak.py +++ b/Lib/test/libregrtest/refleak.py @@ -145,7 +145,7 @@ def get_pooled_int(value): # Use an internal-only keyword argument that mypy doesn't know yet _only_immortal=True) # type: ignore[call-arg] alloc_after = getallocatedblocks() - interned_immortal_after - rc_after = gettotalrefcount() - interned_immortal_after * 2 + rc_after = gettotalrefcount() fd_after = fd_count() rc_deltas[i] = get_pooled_int(rc_after - rc_before) @@ -247,9 +247,13 @@ def dash_R_cleanup(fs, ps, pic, zdc, abcs): abs_classes = filter(isabstract, abs_classes) for abc in abs_classes: for obj in abc.__subclasses__() + [abc]: - for ref in abcs.get(obj, set()): - if ref() is not None: - obj.register(ref()) + refs = abcs.get(obj, None) + if refs is not None: + obj._abc_registry_clear() + for ref in refs: + subclass = ref() + if subclass is not None: + obj.register(subclass) obj._abc_caches_clear() # Clear caches diff --git a/Lib/test/libregrtest/testresult.py b/Lib/test/libregrtest/testresult.py index de23fdd59de..1820f354572 100644 --- a/Lib/test/libregrtest/testresult.py +++ b/Lib/test/libregrtest/testresult.py @@ -9,6 +9,7 @@ import traceback import unittest from test import support +from test.libregrtest.utils import sanitize_xml class RegressionTestResult(unittest.TextTestResult): USE_XML = False @@ -65,23 +66,24 @@ def _add_result(self, test, capture=False, **args): if capture: if self._stdout_buffer is not None: stdout = self._stdout_buffer.getvalue().rstrip() - ET.SubElement(e, 'system-out').text = stdout + ET.SubElement(e, 'system-out').text = sanitize_xml(stdout) if self._stderr_buffer is not None: stderr = self._stderr_buffer.getvalue().rstrip() - ET.SubElement(e, 'system-err').text = stderr + ET.SubElement(e, 'system-err').text = sanitize_xml(stderr) for k, v in args.items(): if not k or not v: continue + e2 = ET.SubElement(e, k) if hasattr(v, 'items'): for k2, v2 in v.items(): if k2: - e2.set(k2, str(v2)) + e2.set(k2, sanitize_xml(str(v2))) else: - e2.text = str(v2) + e2.text = sanitize_xml(str(v2)) else: - e2.text = str(v) + e2.text = sanitize_xml(str(v)) @classmethod def __makeErrorDict(cls, err_type, err_value, err_tb): diff --git a/Lib/test/libregrtest/utils.py b/Lib/test/libregrtest/utils.py index 8253d330b95..953ffb3f50f 100644 --- a/Lib/test/libregrtest/utils.py +++ b/Lib/test/libregrtest/utils.py @@ -5,6 +5,7 @@ import os.path import platform import random +import re import shlex import signal import subprocess @@ -263,6 +264,12 @@ def clear_caches(): for f in typing._cleanups: f() + import inspect + abs_classes = filter(inspect.isabstract, typing.__dict__.values()) + for abc in abs_classes: + for obj in abc.__subclasses__() + [abc]: + obj._abc_caches_clear() + try: fractions = sys.modules['fractions'] except KeyError: @@ -293,29 +300,78 @@ def get_build_info(): config_args = sysconfig.get_config_var('CONFIG_ARGS') or '' cflags = sysconfig.get_config_var('PY_CFLAGS') or '' - cflags_nodist = sysconfig.get_config_var('PY_CFLAGS_NODIST') or '' + cflags += ' ' + (sysconfig.get_config_var('PY_CFLAGS_NODIST') or '') ldflags_nodist = sysconfig.get_config_var('PY_LDFLAGS_NODIST') or '' build = [] # --disable-gil if sysconfig.get_config_var('Py_GIL_DISABLED'): - build.append("free_threading") + if not sys.flags.ignore_environment: + PYTHON_GIL = os.environ.get('PYTHON_GIL', None) + if PYTHON_GIL: + PYTHON_GIL = (PYTHON_GIL == '1') + else: + PYTHON_GIL = None + + free_threading = "free_threading" + if PYTHON_GIL is not None: + free_threading = f"{free_threading} GIL={int(PYTHON_GIL)}" + build.append(free_threading) if hasattr(sys, 'gettotalrefcount'): # --with-pydebug build.append('debug') - if '-DNDEBUG' in (cflags + cflags_nodist): + if '-DNDEBUG' in cflags: build.append('without_assert') else: build.append('release') if '--with-assertions' in config_args: build.append('with_assert') - elif '-DNDEBUG' not in (cflags + cflags_nodist): + elif '-DNDEBUG' not in cflags: build.append('with_assert') + # --enable-experimental-jit + tier2 = re.search('-D_Py_TIER2=([0-9]+)', cflags) + if tier2: + tier2 = int(tier2.group(1)) + + if not sys.flags.ignore_environment: + PYTHON_JIT = os.environ.get('PYTHON_JIT', None) + if PYTHON_JIT: + PYTHON_JIT = (PYTHON_JIT != '0') + else: + PYTHON_JIT = None + + if tier2 == 1: # =yes + if PYTHON_JIT == False: + jit = 'JIT=off' + else: + jit = 'JIT' + elif tier2 == 3: # =yes-off + if PYTHON_JIT: + jit = 'JIT' + else: + jit = 'JIT=off' + elif tier2 == 4: # =interpreter + if PYTHON_JIT == False: + jit = 'JIT-interpreter=off' + else: + jit = 'JIT-interpreter' + elif tier2 == 6: # =interpreter-off (Secret option!) + if PYTHON_JIT: + jit = 'JIT-interpreter' + else: + jit = 'JIT-interpreter=off' + elif '-D_Py_JIT' in cflags: + jit = 'JIT' + else: + jit = None + if jit: + build.append(jit) + # --enable-framework=name framework = sysconfig.get_config_var('PYTHONFRAMEWORK') if framework: @@ -336,6 +392,11 @@ def get_build_info(): if support.check_cflags_pgo(): # PGO (--enable-optimizations) optimizations.append('PGO') + + if support.check_bolt_optimized(): + # BOLT (--enable-bolt) + optimizations.append('BOLT') + if optimizations: build.append('+'.join(optimizations)) @@ -712,3 +773,24 @@ def get_signal_name(exitcode): pass return None + + +ILLEGAL_XML_CHARS_RE = re.compile( + '[' + # Control characters; newline (\x0A and \x0D) and TAB (\x09) are legal + '\x00-\x08\x0B\x0C\x0E-\x1F' + # Surrogate characters + '\uD800-\uDFFF' + # Special Unicode characters + '\uFFFE' + '\uFFFF' + # Match multiple sequential invalid characters for better effiency + ']+') + +def _sanitize_xml_replace(regs): + text = regs[0] + return ''.join(f'\\x{ord(ch):02x}' if ch <= '\xff' else ascii(ch)[1:-1] + for ch in text) + +def sanitize_xml(text): + return ILLEGAL_XML_CHARS_RE.sub(_sanitize_xml_replace, text) diff --git a/Lib/test/lock_tests.py b/Lib/test/lock_tests.py index 024c6debcd4..8c8f8901f00 100644 --- a/Lib/test/lock_tests.py +++ b/Lib/test/lock_tests.py @@ -1013,6 +1013,10 @@ def multipass(self, results, n): self.assertEqual(self.barrier.n_waiting, 0) self.assertFalse(self.barrier.broken) + def test_constructor(self): + self.assertRaises(ValueError, self.barriertype, parties=0) + self.assertRaises(ValueError, self.barriertype, parties=-1) + def test_barrier(self, passes=1): """ Test that a barrier is passed in lockstep diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py index 9922591ce71..7321546a747 100644 --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -144,6 +144,14 @@ class E(C): def __getinitargs__(self): return () +import __main__ +__main__.C = C +C.__module__ = "__main__" +__main__.D = D +D.__module__ = "__main__" +__main__.E = E +E.__module__ = "__main__" + # Simple mutable object. class Object: pass @@ -157,14 +165,6 @@ def __reduce__(self): # Shouldn't support the recursion itself return K, (self.value,) -import __main__ -__main__.C = C -C.__module__ = "__main__" -__main__.D = D -D.__module__ = "__main__" -__main__.E = E -E.__module__ = "__main__" - class myint(int): def __init__(self, x): self.str = str(x) @@ -1179,6 +1179,153 @@ def test_compat_unpickle(self): self.assertIs(type(unpickled), collections.UserDict) self.assertEqual(unpickled, collections.UserDict({1: 2})) + def test_load_global(self): + self.assertIs(self.loads(b'cbuiltins\nstr\n.'), str) + self.assertIs(self.loads(b'cmath\nlog\n.'), math.log) + self.assertIs(self.loads(b'cos.path\njoin\n.'), os.path.join) + self.assertIs(self.loads(b'\x80\x04cbuiltins\nstr.upper\n.'), str.upper) + with support.swap_item(sys.modules, 'mödule', types.SimpleNamespace(glöbal=42)): + self.assertEqual(self.loads(b'\x80\x04cm\xc3\xb6dule\ngl\xc3\xb6bal\n.'), 42) + + self.assertRaises(UnicodeDecodeError, self.loads, b'c\xff\nlog\n.') + self.assertRaises(UnicodeDecodeError, self.loads, b'cmath\n\xff\n.') + self.assertRaises(self.truncated_errors, self.loads, b'c\nlog\n.') + self.assertRaises(self.truncated_errors, self.loads, b'cmath\n\n.') + self.assertRaises(self.truncated_errors, self.loads, b'\x80\x04cmath\n\n.') + + def test_load_stack_global(self): + self.assertIs(self.loads(b'\x8c\x08builtins\x8c\x03str\x93.'), str) + self.assertIs(self.loads(b'\x8c\x04math\x8c\x03log\x93.'), math.log) + self.assertIs(self.loads(b'\x8c\x07os.path\x8c\x04join\x93.'), + os.path.join) + self.assertIs(self.loads(b'\x80\x04\x8c\x08builtins\x8c\x09str.upper\x93.'), + str.upper) + with support.swap_item(sys.modules, 'mödule', types.SimpleNamespace(glöbal=42)): + self.assertEqual(self.loads(b'\x80\x04\x8c\x07m\xc3\xb6dule\x8c\x07gl\xc3\xb6bal\x93.'), 42) + + self.assertRaises(UnicodeDecodeError, self.loads, b'\x8c\x01\xff\x8c\x03log\x93.') + self.assertRaises(UnicodeDecodeError, self.loads, b'\x8c\x04math\x8c\x01\xff\x93.') + self.assertRaises(ValueError, self.loads, b'\x8c\x00\x8c\x03log\x93.') + self.assertRaises(AttributeError, self.loads, b'\x8c\x04math\x8c\x00\x93.') + self.assertRaises(AttributeError, self.loads, b'\x80\x04\x8c\x04math\x8c\x00\x93.') + + self.assertRaises(pickle.UnpicklingError, self.loads, b'N\x8c\x03log\x93.') + self.assertRaises(pickle.UnpicklingError, self.loads, b'\x8c\x04mathN\x93.') + self.assertRaises(pickle.UnpicklingError, self.loads, b'\x80\x04\x8c\x04mathN\x93.') + + def test_find_class(self): + unpickler = self.unpickler(io.BytesIO()) + unpickler_nofix = self.unpickler(io.BytesIO(), fix_imports=False) + unpickler4 = self.unpickler(io.BytesIO(b'\x80\x04N.')) + unpickler4.load() + + self.assertIs(unpickler.find_class('__builtin__', 'str'), str) + self.assertRaises(ModuleNotFoundError, + unpickler_nofix.find_class, '__builtin__', 'str') + self.assertIs(unpickler.find_class('builtins', 'str'), str) + self.assertIs(unpickler_nofix.find_class('builtins', 'str'), str) + self.assertIs(unpickler.find_class('math', 'log'), math.log) + self.assertIs(unpickler.find_class('os.path', 'join'), os.path.join) + self.assertIs(unpickler.find_class('os.path', 'join'), os.path.join) + + self.assertIs(unpickler4.find_class('builtins', 'str.upper'), str.upper) + with self.assertRaises(AttributeError): + unpickler.find_class('builtins', 'str.upper') + + with self.assertRaises(AttributeError): + unpickler.find_class('math', 'spam') + with self.assertRaises(AttributeError): + unpickler4.find_class('math', 'spam') + with self.assertRaises(AttributeError): + unpickler.find_class('math', 'log.spam') + with self.assertRaises(AttributeError): + unpickler4.find_class('math', 'log.spam') + with self.assertRaises(AttributeError): + unpickler.find_class('math', 'log..spam') + with self.assertRaises(AttributeError): + unpickler4.find_class('math', 'log..spam') + with self.assertRaises(AttributeError): + unpickler.find_class('math', '') + with self.assertRaises(AttributeError): + unpickler4.find_class('math', '') + self.assertRaises(ModuleNotFoundError, unpickler.find_class, 'spam', 'log') + self.assertRaises(ValueError, unpickler.find_class, '', 'log') + + self.assertRaises(TypeError, unpickler.find_class, None, 'log') + self.assertRaises(TypeError, unpickler.find_class, 'math', None) + self.assertRaises((TypeError, AttributeError), unpickler4.find_class, 'math', None) + + def test_custom_find_class(self): + def loads(data): + class Unpickler(self.unpickler): + def find_class(self, module_name, global_name): + return (module_name, global_name) + return Unpickler(io.BytesIO(data)).load() + + self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log')) + self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log')) + + def loads(data): + class Unpickler(self.unpickler): + @staticmethod + def find_class(module_name, global_name): + return (module_name, global_name) + return Unpickler(io.BytesIO(data)).load() + + self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log')) + self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log')) + + def loads(data): + class Unpickler(self.unpickler): + @classmethod + def find_class(cls, module_name, global_name): + return (module_name, global_name) + return Unpickler(io.BytesIO(data)).load() + + self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log')) + self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log')) + + def loads(data): + class Unpickler(self.unpickler): + pass + def find_class(module_name, global_name): + return (module_name, global_name) + unpickler = Unpickler(io.BytesIO(data)) + unpickler.find_class = find_class + return unpickler.load() + + self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log')) + self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log')) + + def test_bad_ext_code(self): + # unregistered extension code + self.check_unpickling_error(ValueError, b'\x82\x01.') + self.check_unpickling_error(ValueError, b'\x82\xff.') + self.check_unpickling_error(ValueError, b'\x83\x01\x00.') + self.check_unpickling_error(ValueError, b'\x83\xff\xff.') + self.check_unpickling_error(ValueError, b'\x84\x01\x00\x00\x00.') + self.check_unpickling_error(ValueError, b'\x84\xff\xff\xff\x7f.') + # EXT specifies code <= 0 + self.check_unpickling_error(pickle.UnpicklingError, b'\x82\x00.') + self.check_unpickling_error(pickle.UnpicklingError, b'\x83\x00\x00.') + self.check_unpickling_error(pickle.UnpicklingError, b'\x84\x00\x00\x00\x00.') + self.check_unpickling_error(pickle.UnpicklingError, b'\x84\x00\x00\x00\x80.') + self.check_unpickling_error(pickle.UnpicklingError, b'\x84\xff\xff\xff\xff.') + + @support.cpython_only + def test_bad_ext_inverted_registry(self): + code = 1 + def check(key, exc): + with support.swap_item(copyreg._inverted_registry, code, key): + with self.assertRaises(exc): + self.loads(b'\x82\x01.') + check(None, ValueError) + check((), ValueError) + check((__name__,), (TypeError, ValueError)) + check((__name__, "MyList", "x"), (TypeError, ValueError)) + check((__name__, None), (TypeError, ValueError)) + check((None, "MyList"), (TypeError, ValueError)) + def test_bad_reduce(self): self.assertEqual(self.loads(b'cbuiltins\nint\n)R.'), 0) self.check_unpickling_error(TypeError, b'N)R.') @@ -1443,6 +1590,496 @@ def t(): [ToBeUnpickled] * 2) +class AbstractPicklingErrorTests: + # Subclass must define self.dumps, self.pickler. + + def test_bad_reduce_result(self): + obj = REX([print, ()]) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + obj = REX((print,)) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + obj = REX((print, (), None, None, None, None, None)) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_bad_reconstructor(self): + obj = REX((42, ())) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_unpickleable_reconstructor(self): + obj = REX((UnpickleableCallable(), ())) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_bad_reconstructor_args(self): + obj = REX((print, [])) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_unpickleable_reconstructor_args(self): + obj = REX((print, (1, 2, UNPICKLEABLE))) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_bad_newobj_args(self): + obj = REX((copyreg.__newobj__, ())) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises((IndexError, pickle.PicklingError)) as cm: + self.dumps(obj, proto) + + obj = REX((copyreg.__newobj__, [REX])) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises((IndexError, pickle.PicklingError)): + self.dumps(obj, proto) + + def test_bad_newobj_class(self): + obj = REX((copyreg.__newobj__, (NoNew(),))) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_wrong_newobj_class(self): + obj = REX((copyreg.__newobj__, (str,))) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_unpickleable_newobj_class(self): + class LocalREX(REX): pass + obj = LocalREX((copyreg.__newobj__, (LocalREX,))) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((pickle.PicklingError, AttributeError)): + self.dumps(obj, proto) + + def test_unpickleable_newobj_args(self): + obj = REX((copyreg.__newobj__, (REX, 1, 2, UNPICKLEABLE))) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_bad_newobj_ex_args(self): + obj = REX((copyreg.__newobj_ex__, ())) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises((ValueError, pickle.PicklingError)): + self.dumps(obj, proto) + + obj = REX((copyreg.__newobj_ex__, 42)) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + obj = REX((copyreg.__newobj_ex__, (REX, 42, {}))) + is_py = self.pickler is pickle._Pickler + for proto in protocols[2:4] if is_py else protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises((TypeError, pickle.PicklingError)): + self.dumps(obj, proto) + + obj = REX((copyreg.__newobj_ex__, (REX, (), []))) + for proto in protocols[2:4] if is_py else protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises((TypeError, pickle.PicklingError)): + self.dumps(obj, proto) + + def test_bad_newobj_ex__class(self): + obj = REX((copyreg.__newobj_ex__, (NoNew(), (), {}))) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_wrong_newobj_ex_class(self): + if self.pickler is not pickle._Pickler: + self.skipTest('only verified in the Python implementation') + obj = REX((copyreg.__newobj_ex__, (str, (), {}))) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_unpickleable_newobj_ex_class(self): + class LocalREX(REX): pass + obj = LocalREX((copyreg.__newobj_ex__, (LocalREX, (), {}))) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((pickle.PicklingError, AttributeError)): + self.dumps(obj, proto) + + def test_unpickleable_newobj_ex_args(self): + obj = REX((copyreg.__newobj_ex__, (REX, (1, 2, UNPICKLEABLE), {}))) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_newobj_ex_kwargs(self): + obj = REX((copyreg.__newobj_ex__, (REX, (), {'a': UNPICKLEABLE}))) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_state(self): + obj = REX_state(UNPICKLEABLE) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_bad_state_setter(self): + if self.pickler is pickle._Pickler: + self.skipTest('only verified in the C implementation') + obj = REX((print, (), 'state', None, None, 42)) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_unpickleable_state_setter(self): + obj = REX((print, (), 'state', None, None, UnpickleableCallable())) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_state_with_state_setter(self): + obj = REX((print, (), UNPICKLEABLE, None, None, print)) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_bad_object_list_items(self): + # Issue4176: crash when 4th and 5th items of __reduce__() + # are not iterators + obj = REX((list, (), None, 42)) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((TypeError, pickle.PicklingError)): + self.dumps(obj, proto) + + if self.pickler is not pickle._Pickler: + # Python implementation is less strict and also accepts iterables. + obj = REX((list, (), None, [])) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((TypeError, pickle.PicklingError)): + self.dumps(obj, proto) + + def test_unpickleable_object_list_items(self): + obj = REX_six([1, 2, UNPICKLEABLE]) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_bad_object_dict_items(self): + # Issue4176: crash when 4th and 5th items of __reduce__() + # are not iterators + obj = REX((dict, (), None, None, 42)) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((TypeError, pickle.PicklingError)): + self.dumps(obj, proto) + + for proto in protocols: + obj = REX((dict, (), None, None, iter([('a',)]))) + with self.subTest(proto=proto): + with self.assertRaises((ValueError, TypeError)): + self.dumps(obj, proto) + + if self.pickler is not pickle._Pickler: + # Python implementation is less strict and also accepts iterables. + obj = REX((dict, (), None, None, [])) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((TypeError, pickle.PicklingError)): + self.dumps(obj, proto) + + def test_unpickleable_object_dict_items(self): + obj = REX_seven({'a': UNPICKLEABLE}) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_list_items(self): + obj = [1, [2, 3, UNPICKLEABLE]] + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + for n in [0, 1, 1000, 1005]: + obj = [*range(n), UNPICKLEABLE] + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_tuple_items(self): + obj = (1, (2, 3, UNPICKLEABLE)) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + obj = (*range(10), UNPICKLEABLE) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_dict_items(self): + obj = {'a': {'b': UNPICKLEABLE}} + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + for n in [0, 1, 1000, 1005]: + obj = dict.fromkeys(range(n)) + obj['a'] = UNPICKLEABLE + for proto in protocols: + with self.subTest(proto=proto, n=n): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_set_items(self): + obj = {UNPICKLEABLE} + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_frozenset_items(self): + obj = frozenset({frozenset({UNPICKLEABLE})}) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_global_lookup_error(self): + # Global name does not exist + obj = REX('spam') + obj.__module__ = __name__ + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + obj.__module__ = 'nonexisting' + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + obj.__module__ = '' + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((ValueError, pickle.PicklingError)): + self.dumps(obj, proto) + + obj.__module__ = None + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_nonencodable_global_name_error(self): + for proto in protocols[:4]: + with self.subTest(proto=proto): + name = 'nonascii\xff' if proto < 3 else 'nonencodable\udbff' + obj = REX(name) + obj.__module__ = __name__ + with support.swap_item(globals(), name, obj): + with self.assertRaises((UnicodeEncodeError, pickle.PicklingError)): + self.dumps(obj, proto) + + def test_nonencodable_module_name_error(self): + for proto in protocols[:4]: + with self.subTest(proto=proto): + name = 'nonascii\xff' if proto < 3 else 'nonencodable\udbff' + obj = REX('test') + obj.__module__ = name + mod = types.SimpleNamespace(test=obj) + with support.swap_item(sys.modules, name, mod): + with self.assertRaises((UnicodeEncodeError, pickle.PicklingError)): + self.dumps(obj, proto) + + def test_nested_lookup_error(self): + # Nested name does not exist + obj = REX('AbstractPickleTests.spam') + obj.__module__ = __name__ + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + obj.__module__ = None + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_wrong_object_lookup_error(self): + # Name is bound to different object + obj = REX('AbstractPickleTests') + obj.__module__ = __name__ + AbstractPickleTests.ham = [] + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + obj.__module__ = None + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_local_lookup_error(self): + # Test that whichmodule() errors out cleanly when looking up + # an assumed globally-reachable object fails. + def f(): + pass + # Since the function is local, lookup will fail + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((AttributeError, pickle.PicklingError)): + self.dumps(f, proto) + # Same without a __module__ attribute (exercises a different path + # in _pickle.c). + del f.__module__ + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((AttributeError, pickle.PicklingError)): + self.dumps(f, proto) + # Yet a different path. + f.__name__ = f.__qualname__ + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((AttributeError, pickle.PicklingError)): + self.dumps(f, proto) + + def test_reduce_ex_None(self): + c = REX_None() + with self.assertRaises(TypeError): + self.dumps(c) + + def test_reduce_None(self): + c = R_None() + with self.assertRaises(TypeError): + self.dumps(c) + + @no_tracing + def test_bad_getattr(self): + # Issue #3514: crash when there is an infinite loop in __getattr__ + x = BadGetattr() + for proto in range(2): + with support.infinite_recursion(25): + self.assertRaises(RuntimeError, self.dumps, x, proto) + for proto in range(2, pickle.HIGHEST_PROTOCOL + 1): + s = self.dumps(x, proto) + + def test_picklebuffer_error(self): + # PickleBuffer forbidden with protocol < 5 + pb = pickle.PickleBuffer(b"foobar") + for proto in range(0, 5): + with self.subTest(proto=proto): + with self.assertRaises(pickle.PickleError) as cm: + self.dumps(pb, proto) + self.assertEqual(str(cm.exception), + 'PickleBuffer can only be pickled with protocol >= 5') + + def test_non_continuous_buffer(self): + for proto in protocols[5:]: + with self.subTest(proto=proto): + pb = pickle.PickleBuffer(memoryview(b"foobar")[::2]) + with self.assertRaises((pickle.PicklingError, BufferError)): + self.dumps(pb, proto) + + def test_buffer_callback_error(self): + def buffer_callback(buffers): + raise CustomError + pb = pickle.PickleBuffer(b"foobar") + with self.assertRaises(CustomError): + self.dumps(pb, 5, buffer_callback=buffer_callback) + + def test_evil_pickler_mutating_collection(self): + # https://github.com/python/cpython/issues/92930 + global Clearer + class Clearer: + pass + + def check(collection): + class EvilPickler(self.pickler): + def persistent_id(self, obj): + if isinstance(obj, Clearer): + collection.clear() + return None + pickler = EvilPickler(io.BytesIO(), proto) + try: + pickler.dump(collection) + except RuntimeError as e: + expected = "changed size during iteration" + self.assertIn(expected, str(e)) + + for proto in protocols: + check([Clearer()]) + check([Clearer(), Clearer()]) + check({Clearer()}) + check({Clearer(), Clearer()}) + check({Clearer(): 1}) + check({Clearer(): 1, Clearer(): 2}) + check({1: Clearer(), 2: Clearer()}) + + @support.cpython_only + def test_bad_ext_code(self): + # This should never happen in normal circumstances, because the type + # and the value of the extesion code is checked in copyreg.add_extension(). + key = (__name__, 'MyList') + def check(code, exc): + assert key not in copyreg._extension_registry + assert code not in copyreg._inverted_registry + with (support.swap_item(copyreg._extension_registry, key, code), + support.swap_item(copyreg._inverted_registry, code, key)): + for proto in protocols[2:]: + with self.assertRaises(exc): + self.dumps(MyList, proto) + + check(object(), TypeError) + check(None, TypeError) + check(-1, (RuntimeError, struct.error)) + check(0, RuntimeError) + check(2**31, (RuntimeError, OverflowError, struct.error)) + check(2**1000, (OverflowError, struct.error)) + check(-2**1000, (OverflowError, struct.error)) + class AbstractPickleTests: # Subclass must define self.dumps, self.loads. @@ -2453,55 +3090,12 @@ def test_reduce_calls_base(self): y = self.loads(s) self.assertEqual(y._reduce_called, 1) - def test_reduce_ex_None(self): - c = REX_None() - with self.assertRaises(TypeError): - self.dumps(c) - - def test_reduce_None(self): - c = R_None() - with self.assertRaises(TypeError): - self.dumps(c) - def test_pickle_setstate_None(self): c = C_None_setstate() p = self.dumps(c) with self.assertRaises(TypeError): self.loads(p) - @no_tracing - def test_bad_getattr(self): - # Issue #3514: crash when there is an infinite loop in __getattr__ - x = BadGetattr() - for proto in range(2): - with support.infinite_recursion(25): - self.assertRaises(RuntimeError, self.dumps, x, proto) - for proto in range(2, pickle.HIGHEST_PROTOCOL + 1): - s = self.dumps(x, proto) - - def test_reduce_bad_iterator(self): - # Issue4176: crash when 4th and 5th items of __reduce__() - # are not iterators - class C(object): - def __reduce__(self): - # 4th item is not an iterator - return list, (), None, [], None - class D(object): - def __reduce__(self): - # 5th item is not an iterator - return dict, (), None, None, [] - - # Python implementation is less strict and also accepts iterables. - for proto in protocols: - try: - self.dumps(C(), proto) - except pickle.PicklingError: - pass - try: - self.dumps(D(), proto) - except pickle.PicklingError: - pass - def test_many_puts_and_gets(self): # Test that internal data structures correctly deal with lots of # puts/gets. @@ -2818,6 +3412,18 @@ class Recursive: self.assertIs(unpickled, Recursive) del Recursive.mod # break reference loop + def test_recursive_nested_names2(self): + global Recursive + class Recursive: + pass + Recursive.ref = Recursive + Recursive.__qualname__ = 'Recursive.ref' + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.subTest(proto=proto): + unpickled = self.loads(self.dumps(Recursive, proto)) + self.assertIs(unpickled, Recursive) + del Recursive.ref # break reference loop + def test_py_methods(self): global PyMethodsTest class PyMethodsTest: @@ -2938,27 +3544,6 @@ def test_compat_pickle(self): self.assertIn(('c%s\n%s' % (mod, name)).encode(), pickled) self.assertIs(type(self.loads(pickled)), type(val)) - def test_local_lookup_error(self): - # Test that whichmodule() errors out cleanly when looking up - # an assumed globally-reachable object fails. - def f(): - pass - # Since the function is local, lookup will fail - for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): - with self.assertRaises((AttributeError, pickle.PicklingError)): - pickletools.dis(self.dumps(f, proto)) - # Same without a __module__ attribute (exercises a different path - # in _pickle.c). - del f.__module__ - for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): - with self.assertRaises((AttributeError, pickle.PicklingError)): - pickletools.dis(self.dumps(f, proto)) - # Yet a different path. - f.__name__ = f.__qualname__ - for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): - with self.assertRaises((AttributeError, pickle.PicklingError)): - pickletools.dis(self.dumps(f, proto)) - # # PEP 574 tests below # @@ -3069,20 +3654,6 @@ def test_oob_buffers_writable_to_readonly(self): self.assertIs(type(new), type(obj)) self.assertEqual(new, obj) - def test_picklebuffer_error(self): - # PickleBuffer forbidden with protocol < 5 - pb = pickle.PickleBuffer(b"foobar") - for proto in range(0, 5): - with self.assertRaises(pickle.PickleError): - self.dumps(pb, proto) - - def test_buffer_callback_error(self): - def buffer_callback(buffers): - 1/0 - pb = pickle.PickleBuffer(b"foobar") - with self.assertRaises(ZeroDivisionError): - self.dumps(pb, 5, buffer_callback=buffer_callback) - def test_buffers_error(self): pb = pickle.PickleBuffer(b"foobar") for proto in range(5, pickle.HIGHEST_PROTOCOL + 1): @@ -3174,37 +3745,6 @@ def __reduce__(self): expected = "changed size during iteration" self.assertIn(expected, str(e)) - def test_evil_pickler_mutating_collection(self): - # https://github.com/python/cpython/issues/92930 - if not hasattr(self, "pickler"): - raise self.skipTest(f"{type(self)} has no associated pickler type") - - global Clearer - class Clearer: - pass - - def check(collection): - class EvilPickler(self.pickler): - def persistent_id(self, obj): - if isinstance(obj, Clearer): - collection.clear() - return None - pickler = EvilPickler(io.BytesIO(), proto) - try: - pickler.dump(collection) - except RuntimeError as e: - expected = "changed size during iteration" - self.assertIn(expected, str(e)) - - for proto in protocols: - check([Clearer()]) - check([Clearer(), Clearer()]) - check({Clearer()}) - check({Clearer(), Clearer()}) - check({Clearer(): 1}) - check({Clearer(): 1, Clearer(): 2}) - check({1: Clearer(), 2: Clearer()}) - class BigmemPickleTests: @@ -3335,6 +3875,18 @@ def test_huge_str_64b(self, size): # Test classes for reduce_ex +class R: + def __init__(self, reduce=None): + self.reduce = reduce + def __reduce__(self, proto): + return self.reduce + +class REX: + def __init__(self, reduce_ex=None): + self.reduce_ex = reduce_ex + def __reduce_ex__(self, proto): + return self.reduce_ex + class REX_one(object): """No __reduce_ex__ here, but inheriting it from object""" _reduce_called = 0 @@ -3425,6 +3977,19 @@ def __getstate__(self): __setstate__ = None +class CustomError(Exception): + pass + +class Unpickleable: + def __reduce__(self): + raise CustomError + +UNPICKLEABLE = Unpickleable() + +class UnpickleableCallable(Unpickleable): + def __call__(self, *args, **kwargs): + pass + # Test classes for newobj @@ -3474,7 +4039,9 @@ class MyIntWithNew2(MyIntWithNew): class SlotList(MyList): __slots__ = ["foo"] -class SimpleNewObj(int): +# Ruff "redefined while unused" false positive here due to `global` variables +# being assigned (and then restored) from within test methods earlier in the file +class SimpleNewObj(int): # noqa: F811 def __init__(self, *args, **kwargs): # raise an error, to make sure this isn't called raise TypeError("SimpleNewObj.__init__() didn't expect to get called") @@ -3493,6 +4060,12 @@ class BadGetattr: def __getattr__(self, key): self.foo +class NoNew: + def __getattribute__(self, name): + if name == '__new__': + raise AttributeError + return super().__getattribute__(name) + class AbstractPickleModuleTests: @@ -3565,7 +4138,7 @@ def raises_oserror(self, *args, **kwargs): raise OSError @property def bad_property(self): - 1/0 + raise CustomError # File without read and readline class F: @@ -3586,23 +4159,23 @@ class F: class F: read = bad_property readline = raises_oserror - self.assertRaises(ZeroDivisionError, self.Unpickler, F()) + self.assertRaises(CustomError, self.Unpickler, F()) # File with bad readline class F: readline = bad_property read = raises_oserror - self.assertRaises(ZeroDivisionError, self.Unpickler, F()) + self.assertRaises(CustomError, self.Unpickler, F()) # File with bad readline, no read class F: readline = bad_property - self.assertRaises(ZeroDivisionError, self.Unpickler, F()) + self.assertRaises(CustomError, self.Unpickler, F()) # File with bad read, no readline class F: read = bad_property - self.assertRaises((AttributeError, ZeroDivisionError), self.Unpickler, F()) + self.assertRaises((AttributeError, CustomError), self.Unpickler, F()) # File with bad peek class F: @@ -3611,7 +4184,7 @@ class F: readline = raises_oserror try: self.Unpickler(F()) - except ZeroDivisionError: + except CustomError: pass # File with bad readinto @@ -3621,7 +4194,7 @@ class F: readline = raises_oserror try: self.Unpickler(F()) - except ZeroDivisionError: + except CustomError: pass def test_pickler_bad_file(self): @@ -3634,8 +4207,8 @@ class F: class F: @property def write(self): - 1/0 - self.assertRaises(ZeroDivisionError, self.Pickler, F()) + raise CustomError + self.assertRaises(CustomError, self.Pickler, F()) def check_dumps_loads_oob_buffers(self, dumps, loads): # No need to do the full gamut of tests here, just enough to @@ -3743,9 +4316,15 @@ def test_return_correct_type(self): def test_protocol0_is_ascii_only(self): non_ascii_str = "\N{EMPTY SET}" - self.assertRaises(pickle.PicklingError, self.dumps, non_ascii_str, 0) + with self.assertRaises(pickle.PicklingError) as cm: + self.dumps(non_ascii_str, 0) + self.assertEqual(str(cm.exception), + 'persistent IDs in protocol 0 must be ASCII strings') pickled = pickle.PERSID + non_ascii_str.encode('utf-8') + b'\n.' - self.assertRaises(pickle.UnpicklingError, self.loads, pickled) + with self.assertRaises(pickle.UnpicklingError) as cm: + self.loads(pickled) + self.assertEqual(str(cm.exception), + 'persistent IDs in protocol 0 must be ASCII strings') class AbstractPicklerUnpicklerObjectTests: diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 81cd7f14d2a..ed23f73e3cf 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -59,7 +59,8 @@ "Py_DEBUG", "exceeds_recursion_limit", "get_c_recursion_limit", "skip_on_s390x", "without_optimizer", - "force_not_colorized" + "force_not_colorized", + "BrokenIter", ] @@ -388,7 +389,7 @@ def skip_if_buildbot(reason=None): reason = 'not suitable for buildbots' try: isbuildbot = getpass.getuser().lower() == 'buildbot' - except (KeyError, EnvironmentError) as err: + except (KeyError, OSError) as err: warnings.warn(f'getpass.getuser() failed {err}.', RuntimeWarning) isbuildbot = False return unittest.skipIf(isbuildbot, reason) @@ -865,6 +866,15 @@ def check_cflags_pgo(): return any(option in cflags_nodist for option in pgo_options) +def check_bolt_optimized(): + # Always return false, if the platform is WASI, + # because BOLT optimization does not support WASM binary. + if is_wasi: + return False + config_args = sysconfig.get_config_var('CONFIG_ARGS') or '' + return '--enable-bolt' in config_args + + Py_GIL_DISABLED = bool(sysconfig.get_config_var('Py_GIL_DISABLED')) def requires_gil_enabled(msg="needs the GIL enabled"): @@ -2589,20 +2599,79 @@ def copy_python_src_ignore(path, names): return ignored +def iter_builtin_types(): + for obj in __builtins__.values(): + if not isinstance(obj, type): + continue + cls = obj + if cls.__module__ != 'builtins': + continue + yield cls + + +def iter_slot_wrappers(cls): + assert cls.__module__ == 'builtins', cls + + def is_slot_wrapper(name, value): + if not isinstance(value, types.WrapperDescriptorType): + assert not repr(value).startswith('= end: raise TimeoutError - obj = _channels.recv(self._id, _sentinel) + obj, unboundop = _channels.recv(self._id, _sentinel) + if unboundop is not None: + assert obj is None, repr(obj) + return _resolve_unbound(unboundop) return obj def recv_nowait(self, default=_NOT_SET): @@ -122,9 +158,13 @@ def recv_nowait(self, default=_NOT_SET): is the same as recv(). """ if default is _NOT_SET: - return _channels.recv(self._id) + obj, unboundop = _channels.recv(self._id) else: - return _channels.recv(self._id, default) + obj, unboundop = _channels.recv(self._id, default) + if unboundop is not None: + assert obj is None, repr(obj) + return _resolve_unbound(unboundop) + return obj def close(self): _channels.close(self._id, recv=True) @@ -135,43 +175,79 @@ class SendChannel(_ChannelEnd): _end = 'send' + def __new__(cls, cid, *, _unbound=None): + if _unbound is None: + try: + op = _channels.get_channel_defaults(cid) + _unbound = (op,) + except ChannelNotFoundError: + _unbound = _serialize_unbound(UNBOUND) + self = super().__new__(cls, cid) + self._unbound = _unbound + return self + @property def is_closed(self): info = self._info return info.closed or info.closing - def send(self, obj, timeout=None): + def send(self, obj, timeout=None, *, + unbound=None, + ): """Send the object (i.e. its data) to the channel's receiving end. This blocks until the object is received. """ - _channels.send(self._id, obj, timeout=timeout, blocking=True) + if unbound is None: + unboundop, = self._unbound + else: + unboundop, = _serialize_unbound(unbound) + _channels.send(self._id, obj, unboundop, timeout=timeout, blocking=True) - def send_nowait(self, obj): + def send_nowait(self, obj, *, + unbound=None, + ): """Send the object to the channel's receiving end. If the object is immediately received then return True (else False). Otherwise this is the same as send(). """ + if unbound is None: + unboundop, = self._unbound + else: + unboundop, = _serialize_unbound(unbound) # XXX Note that at the moment channel_send() only ever returns # None. This should be fixed when channel_send_wait() is added. # See bpo-32604 and gh-19829. - return _channels.send(self._id, obj, blocking=False) + return _channels.send(self._id, obj, unboundop, blocking=False) - def send_buffer(self, obj, timeout=None): + def send_buffer(self, obj, timeout=None, *, + unbound=None, + ): """Send the object's buffer to the channel's receiving end. This blocks until the object is received. """ - _channels.send_buffer(self._id, obj, timeout=timeout, blocking=True) + if unbound is None: + unboundop, = self._unbound + else: + unboundop, = _serialize_unbound(unbound) + _channels.send_buffer(self._id, obj, unboundop, + timeout=timeout, blocking=True) - def send_buffer_nowait(self, obj): + def send_buffer_nowait(self, obj, *, + unbound=None, + ): """Send the object's buffer to the channel's receiving end. If the object is immediately received then return True (else False). Otherwise this is the same as send(). """ - return _channels.send_buffer(self._id, obj, blocking=False) + if unbound is None: + unboundop, = self._unbound + else: + unboundop, = _serialize_unbound(unbound) + return _channels.send_buffer(self._id, obj, unboundop, blocking=False) def close(self): _channels.close(self._id, send=True) diff --git a/Lib/test/support/interpreters/queues.py b/Lib/test/support/interpreters/queues.py index 1b9e7481f2e..deb8e8613af 100644 --- a/Lib/test/support/interpreters/queues.py +++ b/Lib/test/support/interpreters/queues.py @@ -5,16 +5,22 @@ import time import weakref import _interpqueues as _queues +from . import _crossinterp # aliases: from _interpqueues import ( QueueError, QueueNotFoundError, ) +from ._crossinterp import ( + UNBOUND_ERROR, UNBOUND_REMOVE, +) __all__ = [ + 'UNBOUND', 'UNBOUND_ERROR', 'UNBOUND_REMOVE', 'create', 'list_all', 'Queue', 'QueueError', 'QueueNotFoundError', 'QueueEmpty', 'QueueFull', + 'ItemInterpreterDestroyed', ] @@ -32,26 +38,54 @@ class QueueFull(QueueError, queue.Full): """ +class ItemInterpreterDestroyed(QueueError, + _crossinterp.ItemInterpreterDestroyed): + """Raised from get() and get_nowait().""" + + _SHARED_ONLY = 0 _PICKLED = 1 -def create(maxsize=0, *, syncobj=False): + +UNBOUND = _crossinterp.UnboundItem.singleton('queue', __name__) + + +def _serialize_unbound(unbound): + if unbound is UNBOUND: + unbound = _crossinterp.UNBOUND + return _crossinterp.serialize_unbound(unbound) + + +def _resolve_unbound(flag): + resolved = _crossinterp.resolve_unbound(flag, ItemInterpreterDestroyed) + if resolved is _crossinterp.UNBOUND: + resolved = UNBOUND + return resolved + + +def create(maxsize=0, *, syncobj=False, unbounditems=UNBOUND): """Return a new cross-interpreter queue. The queue may be used to pass data safely between interpreters. "syncobj" sets the default for Queue.put() and Queue.put_nowait(). + + "unbounditems" likewise sets the default. See Queue.put() for + supported values. The default value is UNBOUND, which replaces + the unbound item. """ fmt = _SHARED_ONLY if syncobj else _PICKLED - qid = _queues.create(maxsize, fmt) - return Queue(qid, _fmt=fmt) + unbound = _serialize_unbound(unbounditems) + unboundop, = unbound + qid = _queues.create(maxsize, fmt, unboundop) + return Queue(qid, _fmt=fmt, _unbound=unbound) def list_all(): """Return a list of all open queues.""" - return [Queue(qid, _fmt=fmt) - for qid, fmt in _queues.list_all()] + return [Queue(qid, _fmt=fmt, _unbound=(unboundop,)) + for qid, fmt, unboundop in _queues.list_all()] _known_queues = weakref.WeakValueDictionary() @@ -59,20 +93,28 @@ def list_all(): class Queue: """A cross-interpreter queue.""" - def __new__(cls, id, /, *, _fmt=None): + def __new__(cls, id, /, *, _fmt=None, _unbound=None): # There is only one instance for any given ID. if isinstance(id, int): id = int(id) else: raise TypeError(f'id must be an int, got {id!r}') if _fmt is None: - _fmt, = _queues.get_queue_defaults(id) + if _unbound is None: + _fmt, op = _queues.get_queue_defaults(id) + _unbound = (op,) + else: + _fmt, _ = _queues.get_queue_defaults(id) + elif _unbound is None: + _, op = _queues.get_queue_defaults(id) + _unbound = (op,) try: self = _known_queues[id] except KeyError: self = super().__new__(cls) self._id = id self._fmt = _fmt + self._unbound = _unbound _known_queues[id] = self _queues.bind(id) return self @@ -124,6 +166,7 @@ def qsize(self): def put(self, obj, timeout=None, *, syncobj=None, + unbound=None, _delay=10 / 1000, # 10 milliseconds ): """Add the object to the queue. @@ -131,7 +174,7 @@ def put(self, obj, timeout=None, *, This blocks while the queue is full. If "syncobj" is None (the default) then it uses the - queue's default, set with create_queue().. + queue's default, set with create_queue(). If "syncobj" is false then all objects are supported, at the expense of worse performance. @@ -152,11 +195,37 @@ def put(self, obj, timeout=None, *, actually is. That's a slightly different and stronger promise than just (initial) equality, which is all "syncobj=False" can promise. + + "unbound" controls the behavior of Queue.get() for the given + object if the current interpreter (calling put()) is later + destroyed. + + If "unbound" is None (the default) then it uses the + queue's default, set with create_queue(), + which is usually UNBOUND. + + If "unbound" is UNBOUND_ERROR then get() will raise an + ItemInterpreterDestroyed exception if the original interpreter + has been destroyed. This does not otherwise affect the queue; + the next call to put() will work like normal, returning the next + item in the queue. + + If "unbound" is UNBOUND_REMOVE then the item will be removed + from the queue as soon as the original interpreter is destroyed. + Be aware that this will introduce an imbalance between put() + and get() calls. + + If "unbound" is UNBOUND then it is returned by get() in place + of the unbound item. """ if syncobj is None: fmt = self._fmt else: fmt = _SHARED_ONLY if syncobj else _PICKLED + if unbound is None: + unboundop, = self._unbound + else: + unboundop, = _serialize_unbound(unbound) if timeout is not None: timeout = int(timeout) if timeout < 0: @@ -166,7 +235,7 @@ def put(self, obj, timeout=None, *, obj = pickle.dumps(obj) while True: try: - _queues.put(self._id, obj, fmt) + _queues.put(self._id, obj, fmt, unboundop) except QueueFull as exc: if timeout is not None and time.time() >= end: raise # re-raise @@ -174,14 +243,18 @@ def put(self, obj, timeout=None, *, else: break - def put_nowait(self, obj, *, syncobj=None): + def put_nowait(self, obj, *, syncobj=None, unbound=None): if syncobj is None: fmt = self._fmt else: fmt = _SHARED_ONLY if syncobj else _PICKLED + if unbound is None: + unboundop, = self._unbound + else: + unboundop, = _serialize_unbound(unbound) if fmt is _PICKLED: obj = pickle.dumps(obj) - _queues.put(self._id, obj, fmt) + _queues.put(self._id, obj, fmt, unboundop) def get(self, timeout=None, *, _delay=10 / 1000, # 10 milliseconds @@ -189,6 +262,10 @@ def get(self, timeout=None, *, """Return the next object from the queue. This blocks while the queue is empty. + + If the next item's original interpreter has been destroyed + then the "next object" is determined by the value of the + "unbound" argument to put(). """ if timeout is not None: timeout = int(timeout) @@ -197,13 +274,16 @@ def get(self, timeout=None, *, end = time.time() + timeout while True: try: - obj, fmt = _queues.get(self._id) + obj, fmt, unboundop = _queues.get(self._id) except QueueEmpty as exc: if timeout is not None and time.time() >= end: raise # re-raise time.sleep(_delay) else: break + if unboundop is not None: + assert obj is None, repr(obj) + return _resolve_unbound(unboundop) if fmt == _PICKLED: obj = pickle.loads(obj) else: @@ -217,9 +297,12 @@ def get_nowait(self): is the same as get(). """ try: - obj, fmt = _queues.get(self._id) + obj, fmt, unboundop = _queues.get(self._id) except QueueEmpty as exc: raise # re-raise + if unboundop is not None: + assert obj is None, repr(obj) + return _resolve_unbound(unboundop) if fmt == _PICKLED: obj = pickle.loads(obj) else: diff --git a/Lib/test/support/testcase.py b/Lib/test/support/testcase.py index 1e4363b1578..fad1e4cb349 100644 --- a/Lib/test/support/testcase.py +++ b/Lib/test/support/testcase.py @@ -1,3 +1,6 @@ +from math import copysign, isnan + + class ExceptionIsLikeMixin: def assertExceptionIsLike(self, exc, template): """ @@ -23,3 +26,40 @@ def assertExceptionIsLike(self, exc, template): self.assertEqual(len(exc.exceptions), len(template.exceptions)) for e, t in zip(exc.exceptions, template.exceptions): self.assertExceptionIsLike(e, t) + + +class FloatsAreIdenticalMixin: + def assertFloatsAreIdentical(self, x, y): + """Fail unless floats x and y are identical, in the sense that: + (1) both x and y are nans, or + (2) both x and y are infinities, with the same sign, or + (3) both x and y are zeros, with the same sign, or + (4) x and y are both finite and nonzero, and x == y + + """ + msg = 'floats {!r} and {!r} are not identical' + + if isnan(x) or isnan(y): + if isnan(x) and isnan(y): + return + elif x == y: + if x != 0.0: + return + # both zero; check that signs match + elif copysign(1.0, x) == copysign(1.0, y): + return + else: + msg += ': zeros have different signs' + self.fail(msg.format(x, y)) + + +class ComplexesAreIdenticalMixin(FloatsAreIdenticalMixin): + def assertComplexesAreIdentical(self, x, y): + """Fail unless complex numbers x and y have equal values and signs. + + In particular, if x and y both have real (or imaginary) part + zero, but the zeros have different signs, this test will fail. + + """ + self.assertFloatsAreIdentical(x.real, y.real) + self.assertFloatsAreIdentical(x.imag, y.imag) diff --git a/Lib/test/test__interpchannels.py b/Lib/test/test__interpchannels.py index b76c58917c0..4a7f04b9df9 100644 --- a/Lib/test/test__interpchannels.py +++ b/Lib/test/test__interpchannels.py @@ -8,6 +8,8 @@ from test.support import import_helper +_channels = import_helper.import_module('_interpchannels') +from test.support.interpreters import _crossinterp from test.test__interpreters import ( _interpreters, _run_output, @@ -15,7 +17,7 @@ ) -_channels = import_helper.import_module('_interpchannels') +REPLACE = _crossinterp._UNBOUND_CONSTANT_TO_FLAG[_crossinterp.UNBOUND] # Additional tests are found in Lib/test/test_interpreters/test_channels.py. @@ -29,9 +31,19 @@ def recv_wait(cid): while True: try: - return _channels.recv(cid) + obj, unboundop = _channels.recv(cid) except _channels.ChannelEmptyError: time.sleep(0.1) + else: + assert unboundop is None, repr(unboundop) + return obj + + +def recv_nowait(cid, *args, unbound=False): + obj, unboundop = _channels.recv(cid, *args) + assert (unboundop is None) != unbound, repr(unboundop) + return obj + #@contextmanager #def run_threaded(id, source, **shared): @@ -212,7 +224,7 @@ def _run_action(cid, action, end, state): else: raise Exception('expected ChannelEmptyError') else: - _channels.recv(cid) + recv_nowait(cid) return state.decr() else: raise ValueError(end) @@ -235,7 +247,7 @@ def _run_action(cid, action, end, state): def clean_up_channels(): - for cid in _channels.list_all(): + for cid, _ in _channels.list_all(): try: _channels.destroy(cid) except _channels.ChannelNotFoundError: @@ -297,7 +309,7 @@ def test_bad_kwargs(self): _channels._channel_id(10, send=False, recv=False) def test_does_not_exist(self): - cid = _channels.create() + cid = _channels.create(REPLACE) with self.assertRaises(_channels.ChannelNotFoundError): _channels._channel_id(int(cid) + 1) # unforced @@ -319,9 +331,9 @@ def test_repr(self): self.assertEqual(repr(cid), 'ChannelID(10)') def test_equality(self): - cid1 = _channels.create() + cid1 = _channels.create(REPLACE) cid2 = _channels._channel_id(int(cid1)) - cid3 = _channels.create() + cid3 = _channels.create(REPLACE) self.assertTrue(cid1 == cid1) self.assertTrue(cid1 == cid2) @@ -341,11 +353,11 @@ def test_equality(self): self.assertTrue(cid1 != cid3) def test_shareable(self): - chan = _channels.create() + chan = _channels.create(REPLACE) - obj = _channels.create() + obj = _channels.create(REPLACE) _channels.send(chan, obj, blocking=False) - got = _channels.recv(chan) + got = recv_nowait(chan) self.assertEqual(got, obj) self.assertIs(type(got), type(obj)) @@ -356,15 +368,15 @@ def test_shareable(self): class ChannelTests(TestBase): def test_create_cid(self): - cid = _channels.create() + cid = _channels.create(REPLACE) self.assertIsInstance(cid, _channels.ChannelID) def test_sequential_ids(self): - before = _channels.list_all() - id1 = _channels.create() - id2 = _channels.create() - id3 = _channels.create() - after = _channels.list_all() + before = [cid for cid, _ in _channels.list_all()] + id1 = _channels.create(REPLACE) + id2 = _channels.create(REPLACE) + id3 = _channels.create(REPLACE) + after = [cid for cid, _ in _channels.list_all()] self.assertEqual(id2, int(id1) + 1) self.assertEqual(id3, int(id2) + 1) @@ -374,7 +386,7 @@ def test_ids_global(self): id1 = _interpreters.create() out = _run_output(id1, dedent(""" import _interpchannels as _channels - cid = _channels.create() + cid = _channels.create(3) print(cid) """)) cid1 = int(out.strip()) @@ -382,7 +394,7 @@ def test_ids_global(self): id2 = _interpreters.create() out = _run_output(id2, dedent(""" import _interpchannels as _channels - cid = _channels.create() + cid = _channels.create(3) print(cid) """)) cid2 = int(out.strip()) @@ -392,7 +404,7 @@ def test_ids_global(self): def test_channel_list_interpreters_none(self): """Test listing interpreters for a channel with no associations.""" # Test for channel with no associated _interpreters. - cid = _channels.create() + cid = _channels.create(REPLACE) send_interps = _channels.list_interpreters(cid, send=True) recv_interps = _channels.list_interpreters(cid, send=False) self.assertEqual(send_interps, []) @@ -401,7 +413,7 @@ def test_channel_list_interpreters_none(self): def test_channel_list_interpreters_basic(self): """Test basic listing channel _interpreters.""" interp0, *_ = _interpreters.get_main() - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, "send", blocking=False) # Test for a channel that has one end associated to an interpreter. send_interps = _channels.list_interpreters(cid, send=True) @@ -412,7 +424,7 @@ def test_channel_list_interpreters_basic(self): interp1 = _interpreters.create() _run_output(interp1, dedent(f""" import _interpchannels as _channels - obj = _channels.recv({cid}) + _channels.recv({cid}) """)) # Test for channel that has both ends associated to an interpreter. send_interps = _channels.list_interpreters(cid, send=True) @@ -426,7 +438,7 @@ def test_channel_list_interpreters_multiple(self): interp1 = _interpreters.create() interp2 = _interpreters.create() interp3 = _interpreters.create() - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, "send", blocking=False) _run_output(interp1, dedent(f""" @@ -435,11 +447,11 @@ def test_channel_list_interpreters_multiple(self): """)) _run_output(interp2, dedent(f""" import _interpchannels as _channels - obj = _channels.recv({cid}) + _channels.recv({cid}) """)) _run_output(interp3, dedent(f""" import _interpchannels as _channels - obj = _channels.recv({cid}) + _channels.recv({cid}) """)) send_interps = _channels.list_interpreters(cid, send=True) recv_interps = _channels.list_interpreters(cid, send=False) @@ -450,11 +462,11 @@ def test_channel_list_interpreters_destroyed(self): """Test listing channel interpreters with a destroyed interpreter.""" interp0, *_ = _interpreters.get_main() interp1 = _interpreters.create() - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, "send", blocking=False) _run_output(interp1, dedent(f""" import _interpchannels as _channels - obj = _channels.recv({cid}) + _channels.recv({cid}) """)) # Should be one interpreter associated with each end. send_interps = _channels.list_interpreters(cid, send=True) @@ -476,16 +488,16 @@ def test_channel_list_interpreters_released(self): interp0, *_ = _interpreters.get_main() interp1 = _interpreters.create() interp2 = _interpreters.create() - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, "data", blocking=False) _run_output(interp1, dedent(f""" import _interpchannels as _channels - obj = _channels.recv({cid}) + _channels.recv({cid}) """)) _channels.send(cid, "data", blocking=False) _run_output(interp2, dedent(f""" import _interpchannels as _channels - obj = _channels.recv({cid}) + _channels.recv({cid}) """)) # Check the setup. send_interps = _channels.list_interpreters(cid, send=True) @@ -516,7 +528,7 @@ def test_channel_list_interpreters_closed(self): """Test listing channel interpreters with a closed channel.""" interp0, *_ = _interpreters.get_main() interp1 = _interpreters.create() - cid = _channels.create() + cid = _channels.create(REPLACE) # Put something in the channel so that it's not empty. _channels.send(cid, "send", blocking=False) @@ -538,7 +550,7 @@ def test_channel_list_interpreters_closed_send_end(self): """Test listing channel interpreters with a channel's send end closed.""" interp0, *_ = _interpreters.get_main() interp1 = _interpreters.create() - cid = _channels.create() + cid = _channels.create(REPLACE) # Put something in the channel so that it's not empty. _channels.send(cid, "send", blocking=False) @@ -570,7 +582,7 @@ def test_channel_list_interpreters_closed_send_end(self): _channels.list_interpreters(cid, send=False) def test_allowed_types(self): - cid = _channels.create() + cid = _channels.create(REPLACE) objects = [ None, 'spam', @@ -580,7 +592,7 @@ def test_allowed_types(self): for obj in objects: with self.subTest(obj): _channels.send(cid, obj, blocking=False) - got = _channels.recv(cid) + got = recv_nowait(cid) self.assertEqual(got, obj) self.assertIs(type(got), type(obj)) @@ -589,7 +601,7 @@ def test_allowed_types(self): # XXX What about between interpreters? def test_run_string_arg_unresolved(self): - cid = _channels.create() + cid = _channels.create(REPLACE) interp = _interpreters.create() _interpreters.set___main___attrs(interp, dict(cid=cid.send)) @@ -598,7 +610,7 @@ def test_run_string_arg_unresolved(self): print(cid.end) _channels.send(cid, b'spam', blocking=False) """)) - obj = _channels.recv(cid) + obj = recv_nowait(cid) self.assertEqual(obj, b'spam') self.assertEqual(out.strip(), 'send') @@ -608,7 +620,7 @@ def test_run_string_arg_unresolved(self): # Note: this test caused crashes on some buildbots (bpo-33615). @unittest.skip('disabled until high-level channels exist') def test_run_string_arg_resolved(self): - cid = _channels.create() + cid = _channels.create(REPLACE) cid = _channels._channel_id(cid, _resolve=True) interp = _interpreters.create() @@ -618,7 +630,7 @@ def test_run_string_arg_resolved(self): _channels.send(chan.id, b'spam', blocking=False) """), dict(chan=cid.send)) - obj = _channels.recv(cid) + obj = recv_nowait(cid) self.assertEqual(obj, b'spam') self.assertEqual(out.strip(), 'send') @@ -627,10 +639,10 @@ def test_run_string_arg_resolved(self): # send/recv def test_send_recv_main(self): - cid = _channels.create() + cid = _channels.create(REPLACE) orig = b'spam' _channels.send(cid, orig, blocking=False) - obj = _channels.recv(cid) + obj = recv_nowait(cid) self.assertEqual(obj, orig) self.assertIsNot(obj, orig) @@ -639,27 +651,27 @@ def test_send_recv_same_interpreter(self): id1 = _interpreters.create() out = _run_output(id1, dedent(""" import _interpchannels as _channels - cid = _channels.create() + cid = _channels.create(REPLACE) orig = b'spam' _channels.send(cid, orig, blocking=False) - obj = _channels.recv(cid) + obj, _ = _channels.recv(cid) assert obj is not orig assert obj == orig """)) def test_send_recv_different_interpreters(self): - cid = _channels.create() + cid = _channels.create(REPLACE) id1 = _interpreters.create() out = _run_output(id1, dedent(f""" import _interpchannels as _channels _channels.send({cid}, b'spam', blocking=False) """)) - obj = _channels.recv(cid) + obj = recv_nowait(cid) self.assertEqual(obj, b'spam') def test_send_recv_different_threads(self): - cid = _channels.create() + cid = _channels.create(REPLACE) def f(): obj = recv_wait(cid) @@ -674,7 +686,7 @@ def f(): self.assertEqual(obj, b'spam') def test_send_recv_different_interpreters_and_threads(self): - cid = _channels.create() + cid = _channels.create(REPLACE) id1 = _interpreters.create() out = None @@ -685,7 +697,7 @@ def f(): import _interpchannels as _channels while True: try: - obj = _channels.recv({cid}) + obj, _ = _channels.recv({cid}) break except _channels.ChannelEmptyError: time.sleep(0.1) @@ -710,23 +722,23 @@ def test_recv_not_found(self): _channels.recv(10) def test_recv_empty(self): - cid = _channels.create() + cid = _channels.create(REPLACE) with self.assertRaises(_channels.ChannelEmptyError): _channels.recv(cid) def test_recv_default(self): default = object() - cid = _channels.create() - obj1 = _channels.recv(cid, default) + cid = _channels.create(REPLACE) + obj1 = recv_nowait(cid, default) _channels.send(cid, None, blocking=False) _channels.send(cid, 1, blocking=False) _channels.send(cid, b'spam', blocking=False) _channels.send(cid, b'eggs', blocking=False) - obj2 = _channels.recv(cid, default) - obj3 = _channels.recv(cid, default) - obj4 = _channels.recv(cid) - obj5 = _channels.recv(cid, default) - obj6 = _channels.recv(cid, default) + obj2 = recv_nowait(cid, default) + obj3 = recv_nowait(cid, default) + obj4 = recv_nowait(cid) + obj5 = recv_nowait(cid, default) + obj6 = recv_nowait(cid, default) self.assertIs(obj1, default) self.assertIs(obj2, None) @@ -737,7 +749,7 @@ def test_recv_default(self): def test_recv_sending_interp_destroyed(self): with self.subTest('closed'): - cid1 = _channels.create() + cid1 = _channels.create(REPLACE) interp = _interpreters.create() _interpreters.run_string(interp, dedent(f""" import _interpchannels as _channels @@ -750,7 +762,7 @@ def test_recv_sending_interp_destroyed(self): _channels.recv(cid1) del cid1 with self.subTest('still open'): - cid2 = _channels.create() + cid2 = _channels.create(REPLACE) interp = _interpreters.create() _interpreters.run_string(interp, dedent(f""" import _interpchannels as _channels @@ -759,7 +771,8 @@ def test_recv_sending_interp_destroyed(self): _channels.send(cid2, b'eggs', blocking=False) _interpreters.destroy(interp) - _channels.recv(cid2) + recv_nowait(cid2, unbound=True) + recv_nowait(cid2, unbound=False) with self.assertRaisesRegex(RuntimeError, f'channel {cid2} is empty'): _channels.recv(cid2) @@ -770,9 +783,9 @@ def test_recv_sending_interp_destroyed(self): def test_send_buffer(self): buf = bytearray(b'spamspamspam') - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send_buffer(cid, buf, blocking=False) - obj = _channels.recv(cid) + obj = recv_nowait(cid) self.assertIsNot(obj, buf) self.assertIsInstance(obj, memoryview) @@ -794,12 +807,12 @@ def build_send_waiter(self, obj, *, buffer=False): else: send = _channels.send - cid = _channels.create() + cid = _channels.create(REPLACE) try: started = time.monotonic() send(cid, obj, blocking=False) stopped = time.monotonic() - _channels.recv(cid) + recv_nowait(cid) finally: _channels.destroy(cid) delay = stopped - started # seconds @@ -813,7 +826,7 @@ def test_send_blocking_waiting(self): received = None obj = b'spam' wait = self.build_send_waiter(obj) - cid = _channels.create() + cid = _channels.create(REPLACE) def f(): nonlocal received wait() @@ -829,7 +842,7 @@ def test_send_buffer_blocking_waiting(self): received = None obj = bytearray(b'spam') wait = self.build_send_waiter(obj, buffer=True) - cid = _channels.create() + cid = _channels.create(REPLACE) def f(): nonlocal received wait() @@ -844,7 +857,7 @@ def f(): def test_send_blocking_no_wait(self): received = None obj = b'spam' - cid = _channels.create() + cid = _channels.create(REPLACE) def f(): nonlocal received received = recv_wait(cid) @@ -858,7 +871,7 @@ def f(): def test_send_buffer_blocking_no_wait(self): received = None obj = bytearray(b'spam') - cid = _channels.create() + cid = _channels.create(REPLACE) def f(): nonlocal received received = recv_wait(cid) @@ -873,20 +886,20 @@ def test_send_timeout(self): obj = b'spam' with self.subTest('non-blocking with timeout'): - cid = _channels.create() + cid = _channels.create(REPLACE) with self.assertRaises(ValueError): _channels.send(cid, obj, blocking=False, timeout=0.1) with self.subTest('timeout hit'): - cid = _channels.create() + cid = _channels.create(REPLACE) with self.assertRaises(TimeoutError): _channels.send(cid, obj, blocking=True, timeout=0.1) with self.assertRaises(_channels.ChannelEmptyError): - received = _channels.recv(cid) + received = recv_nowait(cid) print(repr(received)) with self.subTest('timeout not hit'): - cid = _channels.create() + cid = _channels.create(REPLACE) def f(): recv_wait(cid) t = threading.Thread(target=f) @@ -910,20 +923,20 @@ def test_send_buffer_timeout(self): obj = bytearray(b'spam') with self.subTest('non-blocking with timeout'): - cid = _channels.create() + cid = _channels.create(REPLACE) with self.assertRaises(ValueError): _channels.send_buffer(cid, obj, blocking=False, timeout=0.1) with self.subTest('timeout hit'): - cid = _channels.create() + cid = _channels.create(REPLACE) with self.assertRaises(TimeoutError): _channels.send_buffer(cid, obj, blocking=True, timeout=0.1) with self.assertRaises(_channels.ChannelEmptyError): - received = _channels.recv(cid) + received = recv_nowait(cid) print(repr(received)) with self.subTest('timeout not hit'): - cid = _channels.create() + cid = _channels.create(REPLACE) def f(): recv_wait(cid) t = threading.Thread(target=f) @@ -936,7 +949,7 @@ def test_send_closed_while_waiting(self): wait = self.build_send_waiter(obj) with self.subTest('without timeout'): - cid = _channels.create() + cid = _channels.create(REPLACE) def f(): wait() _channels.close(cid, force=True) @@ -947,7 +960,7 @@ def f(): t.join() with self.subTest('with timeout'): - cid = _channels.create() + cid = _channels.create(REPLACE) def f(): wait() _channels.close(cid, force=True) @@ -974,7 +987,7 @@ def test_send_buffer_closed_while_waiting(self): wait = self.build_send_waiter(obj, buffer=True) with self.subTest('without timeout'): - cid = _channels.create() + cid = _channels.create(REPLACE) def f(): wait() _channels.close(cid, force=True) @@ -985,7 +998,7 @@ def f(): t.join() with self.subTest('with timeout'): - cid = _channels.create() + cid = _channels.create(REPLACE) def f(): wait() _channels.close(cid, force=True) @@ -999,9 +1012,9 @@ def f(): # close def test_close_single_user(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) - _channels.recv(cid) + recv_nowait(cid) _channels.close(cid) with self.assertRaises(_channels.ChannelClosedError): @@ -1010,7 +1023,7 @@ def test_close_single_user(self): _channels.recv(cid) def test_close_multiple_users(self): - cid = _channels.create() + cid = _channels.create(REPLACE) id1 = _interpreters.create() id2 = _interpreters.create() _interpreters.run_string(id1, dedent(f""" @@ -1034,9 +1047,9 @@ def test_close_multiple_users(self): self.assertEqual(excsnap.type.__name__, 'ChannelClosedError') def test_close_multiple_times(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) - _channels.recv(cid) + recv_nowait(cid) _channels.close(cid) with self.assertRaises(_channels.ChannelClosedError): @@ -1051,9 +1064,9 @@ def test_close_empty(self): ] for send, recv in tests: with self.subTest((send, recv)): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) - _channels.recv(cid) + recv_nowait(cid) _channels.close(cid, send=send, recv=recv) with self.assertRaises(_channels.ChannelClosedError): @@ -1062,56 +1075,56 @@ def test_close_empty(self): _channels.recv(cid) def test_close_defaults_with_unused_items(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) _channels.send(cid, b'ham', blocking=False) with self.assertRaises(_channels.ChannelNotEmptyError): _channels.close(cid) - _channels.recv(cid) + recv_nowait(cid) _channels.send(cid, b'eggs', blocking=False) def test_close_recv_with_unused_items_unforced(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) _channels.send(cid, b'ham', blocking=False) with self.assertRaises(_channels.ChannelNotEmptyError): _channels.close(cid, recv=True) - _channels.recv(cid) + recv_nowait(cid) _channels.send(cid, b'eggs', blocking=False) - _channels.recv(cid) - _channels.recv(cid) + recv_nowait(cid) + recv_nowait(cid) _channels.close(cid, recv=True) def test_close_send_with_unused_items_unforced(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) _channels.send(cid, b'ham', blocking=False) _channels.close(cid, send=True) with self.assertRaises(_channels.ChannelClosedError): _channels.send(cid, b'eggs') - _channels.recv(cid) - _channels.recv(cid) + recv_nowait(cid) + recv_nowait(cid) with self.assertRaises(_channels.ChannelClosedError): _channels.recv(cid) def test_close_both_with_unused_items_unforced(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) _channels.send(cid, b'ham', blocking=False) with self.assertRaises(_channels.ChannelNotEmptyError): _channels.close(cid, recv=True, send=True) - _channels.recv(cid) + recv_nowait(cid) _channels.send(cid, b'eggs', blocking=False) - _channels.recv(cid) - _channels.recv(cid) + recv_nowait(cid) + recv_nowait(cid) _channels.close(cid, recv=True) def test_close_recv_with_unused_items_forced(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) _channels.send(cid, b'ham', blocking=False) _channels.close(cid, recv=True, force=True) @@ -1122,7 +1135,7 @@ def test_close_recv_with_unused_items_forced(self): _channels.recv(cid) def test_close_send_with_unused_items_forced(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) _channels.send(cid, b'ham', blocking=False) _channels.close(cid, send=True, force=True) @@ -1133,7 +1146,7 @@ def test_close_send_with_unused_items_forced(self): _channels.recv(cid) def test_close_both_with_unused_items_forced(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) _channels.send(cid, b'ham', blocking=False) _channels.close(cid, send=True, recv=True, force=True) @@ -1144,7 +1157,7 @@ def test_close_both_with_unused_items_forced(self): _channels.recv(cid) def test_close_never_used(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.close(cid) with self.assertRaises(_channels.ChannelClosedError): @@ -1153,7 +1166,7 @@ def test_close_never_used(self): _channels.recv(cid) def test_close_by_unassociated_interp(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) interp = _interpreters.create() _interpreters.run_string(interp, dedent(f""" @@ -1166,11 +1179,11 @@ def test_close_by_unassociated_interp(self): _channels.close(cid) def test_close_used_multiple_times_by_single_user(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) _channels.send(cid, b'spam', blocking=False) _channels.send(cid, b'spam', blocking=False) - _channels.recv(cid) + recv_nowait(cid) _channels.close(cid, force=True) with self.assertRaises(_channels.ChannelClosedError): @@ -1179,7 +1192,7 @@ def test_close_used_multiple_times_by_single_user(self): _channels.recv(cid) def test_channel_list_interpreters_invalid_channel(self): - cid = _channels.create() + cid = _channels.create(REPLACE) # Test for invalid channel ID. with self.assertRaises(_channels.ChannelNotFoundError): _channels.list_interpreters(1000, send=True) @@ -1191,7 +1204,7 @@ def test_channel_list_interpreters_invalid_channel(self): def test_channel_list_interpreters_invalid_args(self): # Tests for invalid arguments passed to the API. - cid = _channels.create() + cid = _channels.create(REPLACE) with self.assertRaises(TypeError): _channels.list_interpreters(cid) @@ -1240,9 +1253,9 @@ class ChannelReleaseTests(TestBase): """ def test_single_user(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) - _channels.recv(cid) + recv_nowait(cid) _channels.release(cid, send=True, recv=True) with self.assertRaises(_channels.ChannelClosedError): @@ -1251,7 +1264,7 @@ def test_single_user(self): _channels.recv(cid) def test_multiple_users(self): - cid = _channels.create() + cid = _channels.create(REPLACE) id1 = _interpreters.create() id2 = _interpreters.create() _interpreters.run_string(id1, dedent(f""" @@ -1260,7 +1273,7 @@ def test_multiple_users(self): """)) out = _run_output(id2, dedent(f""" import _interpchannels as _channels - obj = _channels.recv({cid}) + obj, _ = _channels.recv({cid}) _channels.release({cid}) print(repr(obj)) """)) @@ -1271,9 +1284,9 @@ def test_multiple_users(self): self.assertEqual(out.strip(), "b'spam'") def test_no_kwargs(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) - _channels.recv(cid) + recv_nowait(cid) _channels.release(cid) with self.assertRaises(_channels.ChannelClosedError): @@ -1282,16 +1295,16 @@ def test_no_kwargs(self): _channels.recv(cid) def test_multiple_times(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) - _channels.recv(cid) + recv_nowait(cid) _channels.release(cid, send=True, recv=True) with self.assertRaises(_channels.ChannelClosedError): _channels.release(cid, send=True, recv=True) def test_with_unused_items(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) _channels.send(cid, b'ham', blocking=False) _channels.release(cid, send=True, recv=True) @@ -1300,7 +1313,7 @@ def test_with_unused_items(self): _channels.recv(cid) def test_never_used(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.release(cid) with self.assertRaises(_channels.ChannelClosedError): @@ -1309,14 +1322,14 @@ def test_never_used(self): _channels.recv(cid) def test_by_unassociated_interp(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) interp = _interpreters.create() _interpreters.run_string(interp, dedent(f""" import _interpchannels as _channels _channels.release({cid}) """)) - obj = _channels.recv(cid) + obj = recv_nowait(cid) _channels.release(cid) with self.assertRaises(_channels.ChannelClosedError): @@ -1325,7 +1338,7 @@ def test_by_unassociated_interp(self): def test_close_if_unassociated(self): # XXX Something's not right with this test... - cid = _channels.create() + cid = _channels.create(REPLACE) interp = _interpreters.create() _interpreters.run_string(interp, dedent(f""" import _interpchannels as _channels @@ -1338,21 +1351,21 @@ def test_close_if_unassociated(self): def test_partially(self): # XXX Is partial close too weird/confusing? - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, None, blocking=False) - _channels.recv(cid) + recv_nowait(cid) _channels.send(cid, b'spam', blocking=False) _channels.release(cid, send=True) - obj = _channels.recv(cid) + obj = recv_nowait(cid) self.assertEqual(obj, b'spam') def test_used_multiple_times_by_single_user(self): - cid = _channels.create() + cid = _channels.create(REPLACE) _channels.send(cid, b'spam', blocking=False) _channels.send(cid, b'spam', blocking=False) _channels.send(cid, b'spam', blocking=False) - _channels.recv(cid) + recv_nowait(cid) _channels.release(cid, send=True, recv=True) with self.assertRaises(_channels.ChannelClosedError): @@ -1428,9 +1441,9 @@ def clean_up(self): def _new_channel(self, creator): if creator.name == 'main': - return _channels.create() + return _channels.create(REPLACE) else: - ch = _channels.create() + ch = _channels.create(REPLACE) run_interp(creator.id, f""" import _interpreters cid = _xxsubchannels.create() @@ -1439,7 +1452,7 @@ def _new_channel(self, creator): _xxsubchannels.send({ch}, int(cid), blocking=False) del _interpreters """) - self._cid = _channels.recv(ch) + self._cid = recv_nowait(ch) return self._cid def _get_interpreter(self, interp): @@ -1657,7 +1670,7 @@ def run_action(self, fix, action, *, hideclosed=True): ) fix.record_action(action, result) else: - _cid = _channels.create() + _cid = _channels.create(REPLACE) run_interp(interp.id, f""" result = helpers.run_action( {fix.cid}, @@ -1670,8 +1683,8 @@ def run_action(self, fix, action, *, hideclosed=True): _channels.send({_cid}, b'X' if result.closed else b'', blocking=False) """) result = ChannelState( - pending=int.from_bytes(_channels.recv(_cid), 'little'), - closed=bool(_channels.recv(_cid)), + pending=int.from_bytes(recv_nowait(_cid), 'little'), + closed=bool(recv_nowait(_cid)), ) fix.record_action(action, result) @@ -1729,7 +1742,7 @@ def _assert_closed(self, fix): self.assertTrue(fix.state.closed) for _ in range(fix.state.pending): - _channels.recv(fix.cid) + recv_nowait(fix.cid) self._assert_closed_in_interp(fix) for interp in ('same', 'other'): diff --git a/Lib/test/test_android.py b/Lib/test/test_android.py index 115882a4c28..2ef9f10fdcc 100644 --- a/Lib/test/test_android.py +++ b/Lib/test/test_android.py @@ -1,14 +1,17 @@ +import io import platform import queue import re import subprocess import sys import unittest +from _android_support import TextLogStream from array import array -from contextlib import contextmanager +from contextlib import ExitStack, contextmanager from threading import Thread from test.support import LOOPBACK_TIMEOUT from time import time +from unittest.mock import patch if sys.platform != "android": @@ -16,6 +19,9 @@ api_level = platform.android_ver().api_level +# (name, level, fileno) +STREAM_INFO = [("stdout", "I", 1), ("stderr", "W", 2)] + # Test redirection of stdout and stderr to the Android log. @unittest.skipIf( @@ -36,7 +42,8 @@ def logcat_thread(): for line in self.logcat_process.stdout: self.logcat_queue.put(line.rstrip("\n")) self.logcat_process.stdout.close() - Thread(target=logcat_thread).start() + self.logcat_thread = Thread(target=logcat_thread) + self.logcat_thread.start() from ctypes import CDLL, c_char_p, c_int android_log_write = getattr(CDLL("liblog.so"), "__android_log_write") @@ -72,6 +79,7 @@ def assert_log(self, level, tag, expected, *, skip=False, timeout=0.5): def tearDown(self): self.logcat_process.terminate() self.logcat_process.wait(LOOPBACK_TIMEOUT) + self.logcat_thread.join(LOOPBACK_TIMEOUT) @contextmanager def unbuffered(self, stream): @@ -81,18 +89,42 @@ def unbuffered(self, stream): finally: stream.reconfigure(write_through=False) + # In --verbose3 mode, sys.stdout and sys.stderr are captured, so we can't + # test them directly. Detect this mode and use some temporary streams with + # the same properties. + def stream_context(self, stream_name, level): + # https://developer.android.com/ndk/reference/group/logging + prio = {"I": 4, "W": 5}[level] + + stack = ExitStack() + stack.enter_context(self.subTest(stream_name)) + stream = getattr(sys, stream_name) + native_stream = getattr(sys, f"__{stream_name}__") + if isinstance(stream, io.StringIO): + stack.enter_context( + patch( + f"sys.{stream_name}", + TextLogStream( + prio, f"python.{stream_name}", native_stream.fileno(), + errors="backslashreplace" + ), + ) + ) + return stack + def test_str(self): - for stream_name, level in [("stdout", "I"), ("stderr", "W")]: - with self.subTest(stream=stream_name): + for stream_name, level, fileno in STREAM_INFO: + with self.stream_context(stream_name, level): stream = getattr(sys, stream_name) tag = f"python.{stream_name}" self.assertEqual(f"", repr(stream)) - self.assertTrue(stream.writable()) - self.assertFalse(stream.readable()) + self.assertIs(stream.writable(), True) + self.assertIs(stream.readable(), False) + self.assertEqual(stream.fileno(), fileno) self.assertEqual("UTF-8", stream.encoding) - self.assertTrue(stream.line_buffering) - self.assertFalse(stream.write_through) + self.assertIs(stream.line_buffering, True) + self.assertIs(stream.write_through, False) # stderr is backslashreplace by default; stdout is configured # that way by libregrtest.main. @@ -147,6 +179,13 @@ def write(s, lines=None, *, write_len=None): write("f\n\ng", ["exxf", ""]) write("\n", ["g"]) + # Since this is a line-based logging system, line buffering + # cannot be turned off, i.e. a newline always causes a flush. + stream.reconfigure(line_buffering=False) + self.assertIs(stream.line_buffering, True) + + # However, buffering can be turned off completely if you want a + # flush after every write. with self.unbuffered(stream): write("\nx", ["", "x"]) write("\na\n", ["", "a"]) @@ -209,30 +248,31 @@ def __str__(self): # (MAX_BYTES_PER_WRITE). # # ASCII (1 byte per character) - write(("foobar" * 700) + "\n", - [("foobar" * 666) + "foob", # 4000 bytes - "ar" + ("foobar" * 33)]) # 200 bytes + write(("foobar" * 700) + "\n", # 4200 bytes in + [("foobar" * 666) + "foob", # 4000 bytes out + "ar" + ("foobar" * 33)]) # 200 bytes out # "Full-width" digits 0-9 (3 bytes per character) s = "\uff10\uff11\uff12\uff13\uff14\uff15\uff16\uff17\uff18\uff19" - write((s * 150) + "\n", - [s * 100, # 3000 bytes - s * 50]) # 1500 bytes + write((s * 150) + "\n", # 4500 bytes in + [s * 100, # 3000 bytes out + s * 50]) # 1500 bytes out s = "0123456789" - write(s * 200, []) - write(s * 150, []) - write(s * 51, [s * 350]) # 3500 bytes - write("\n", [s * 51]) # 510 bytes + write(s * 200, []) # 2000 bytes in + write(s * 150, []) # 1500 bytes in + write(s * 51, [s * 350]) # 510 bytes in, 3500 bytes out + write("\n", [s * 51]) # 0 bytes in, 510 bytes out def test_bytes(self): - for stream_name, level in [("stdout", "I"), ("stderr", "W")]: - with self.subTest(stream=stream_name): + for stream_name, level, fileno in STREAM_INFO: + with self.stream_context(stream_name, level): stream = getattr(sys, stream_name).buffer tag = f"python.{stream_name}" self.assertEqual(f"", repr(stream)) - self.assertTrue(stream.writable()) - self.assertFalse(stream.readable()) + self.assertIs(stream.writable(), True) + self.assertIs(stream.readable(), False) + self.assertEqual(stream.fileno(), fileno) def write(b, lines=None, *, write_len=None): if write_len is None: @@ -330,3 +370,82 @@ def write(b, lines=None, *, write_len=None): fr"{type(obj).__name__}" ): stream.write(obj) + + +class TestAndroidRateLimit(unittest.TestCase): + def test_rate_limit(self): + # https://cs.android.com/android/platform/superproject/+/android-14.0.0_r1:system/logging/liblog/include/log/log_read.h;l=39 + PER_MESSAGE_OVERHEAD = 28 + + # https://developer.android.com/ndk/reference/group/logging + ANDROID_LOG_DEBUG = 3 + + # To avoid flooding the test script output, use a different tag rather + # than stdout or stderr. + tag = "python.rate_limit" + stream = TextLogStream(ANDROID_LOG_DEBUG, tag) + + # Make a test message which consumes 1 KB of the logcat buffer. + message = "Line {:03d} " + message += "." * ( + 1024 - PER_MESSAGE_OVERHEAD - len(tag) - len(message.format(0)) + ) + "\n" + + # To avoid depending on the performance of the test device, we mock the + # passage of time. + mock_now = time() + + def mock_time(): + # Avoid division by zero by simulating a small delay. + mock_sleep(0.0001) + return mock_now + + def mock_sleep(duration): + nonlocal mock_now + mock_now += duration + + # See _android_support.py. The default values of these parameters work + # well across a wide range of devices, but we'll use smaller values to + # ensure a quick and reliable test that doesn't flood the log too much. + MAX_KB_PER_SECOND = 100 + BUCKET_KB = 10 + with ( + patch("_android_support.MAX_BYTES_PER_SECOND", MAX_KB_PER_SECOND * 1024), + patch("_android_support.BUCKET_SIZE", BUCKET_KB * 1024), + patch("_android_support.sleep", mock_sleep), + patch("_android_support.time", mock_time), + ): + # Make sure the token bucket is full. + stream.write("Initial message to reset _prev_write_time") + mock_sleep(BUCKET_KB / MAX_KB_PER_SECOND) + line_num = 0 + + # Write BUCKET_KB messages, and return the rate at which they were + # accepted in KB per second. + def write_bucketful(): + nonlocal line_num + start = mock_time() + max_line_num = line_num + BUCKET_KB + while line_num < max_line_num: + stream.write(message.format(line_num)) + line_num += 1 + return BUCKET_KB / (mock_time() - start) + + # The first bucketful should be written with minimal delay. The + # factor of 2 here is not arbitrary: it verifies that the system can + # write fast enough to empty the bucket within two bucketfuls, which + # the next part of the test depends on. + self.assertGreater(write_bucketful(), MAX_KB_PER_SECOND * 2) + + # Write another bucketful to empty the token bucket completely. + write_bucketful() + + # The next bucketful should be written at the rate limit. + self.assertAlmostEqual( + write_bucketful(), MAX_KB_PER_SECOND, + delta=MAX_KB_PER_SECOND * 0.1 + ) + + # Once the token bucket refills, we should go back to full speed. + mock_sleep(BUCKET_KB / MAX_KB_PER_SECOND) + self.assertGreater(write_bucketful(), MAX_KB_PER_SECOND * 2) diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py index d4619113077..eb744468b55 100644 --- a/Lib/test/test_argparse.py +++ b/Lib/test/test_argparse.py @@ -3002,12 +3002,12 @@ def get_parser(self, required=None): ] usage_when_not_required = '''\ - usage: PROG [-h] [--abcde ABCDE] [--fghij FGHIJ] - [--klmno KLMNO | --pqrst PQRST] + usage: PROG [-h] [--abcde ABCDE] [--fghij FGHIJ] [--klmno KLMNO | + --pqrst PQRST] ''' usage_when_required = '''\ - usage: PROG [-h] [--abcde ABCDE] [--fghij FGHIJ] - (--klmno KLMNO | --pqrst PQRST) + usage: PROG [-h] [--abcde ABCDE] [--fghij FGHIJ] (--klmno KLMNO | + --pqrst PQRST) ''' help = '''\ @@ -4390,6 +4390,24 @@ def test_nested_mutex_groups(self): ''') self.assertEqual(parser.format_usage(), usage) + def test_long_mutex_groups_wrap(self): + parser = argparse.ArgumentParser(prog='PROG') + g = parser.add_mutually_exclusive_group() + g.add_argument('--op1', metavar='MET', nargs='?') + g.add_argument('--op2', metavar=('MET1', 'MET2'), nargs='*') + g.add_argument('--op3', nargs='*') + g.add_argument('--op4', metavar=('MET1', 'MET2'), nargs='+') + g.add_argument('--op5', nargs='+') + g.add_argument('--op6', nargs=3) + g.add_argument('--op7', metavar=('MET1', 'MET2', 'MET3'), nargs=3) + + usage = textwrap.dedent('''\ + usage: PROG [-h] [--op1 [MET] | --op2 [MET1 [MET2 ...]] | --op3 [OP3 ...] | + --op4 MET1 [MET2 ...] | --op5 OP5 [OP5 ...] | --op6 OP6 OP6 OP6 | + --op7 MET1 MET2 MET3] + ''') + self.assertEqual(parser.format_usage(), usage) + class TestHelpVariableExpansion(HelpTestCase): """Test that variables are expanded properly in help messages""" @@ -4963,6 +4981,46 @@ def custom_type(string): version = '' +class TestHelpUsageLongSubparserCommand(TestCase): + """Test that subparser commands are formatted correctly in help""" + maxDiff = None + + def test_parent_help(self): + def custom_formatter(prog): + return argparse.RawTextHelpFormatter(prog, max_help_position=50) + + parent_parser = argparse.ArgumentParser( + prog='PROG', + formatter_class=custom_formatter + ) + + cmd_subparsers = parent_parser.add_subparsers(title="commands", + metavar='CMD', + help='command to use') + cmd_subparsers.add_parser("add", + help="add something") + + cmd_subparsers.add_parser("remove", + help="remove something") + + cmd_subparsers.add_parser("a-very-long-command", + help="command that does something") + + parser_help = parent_parser.format_help() + self.assertEqual(parser_help, textwrap.dedent('''\ + usage: PROG [-h] CMD ... + + options: + -h, --help show this help message and exit + + commands: + CMD command to use + add add something + remove remove something + a-very-long-command command that does something + ''')) + + # ===================================== # Optional/Positional constructor tests # ===================================== @@ -5816,9 +5874,8 @@ def test_invalid_args(self): parser = ErrorRaisingArgumentParser(prog='PROG') parser.add_argument('--foo', nargs="*") parser.add_argument('foo') - with captured_stderr() as stderr: + with self.assertWarns(UserWarning): parser.parse_intermixed_args(['hello', '--foo']) - self.assertIn("UserWarning", stderr.getvalue()) class TestIntermixedMessageContentError(TestCase): # case where Intermixed gives different error message diff --git a/Lib/test/test_array.py b/Lib/test/test_array.py index 95383be9659..47cbe60bfca 100755 --- a/Lib/test/test_array.py +++ b/Lib/test/test_array.py @@ -1493,7 +1493,7 @@ def test_byteswap(self): self.assertEqual(a, b) else: # On alphas treating the byte swapped bit patters as - # floats/doubles results in floating point exceptions + # floats/doubles results in floating-point exceptions # => compare the 8bit string values instead self.assertNotEqual(a.tobytes(), b.tobytes()) b.byteswap() diff --git a/Lib/test/test_ast/__init__.py b/Lib/test/test_ast/__init__.py new file mode 100644 index 00000000000..9a89d27ba9f --- /dev/null +++ b/Lib/test/test_ast/__init__.py @@ -0,0 +1,7 @@ +import os + +from test import support + + +def load_tests(*args): + return support.load_package_tests(os.path.dirname(__file__), *args) diff --git a/Lib/test/test_ast/snippets.py b/Lib/test/test_ast/snippets.py new file mode 100644 index 00000000000..28d32b2941f --- /dev/null +++ b/Lib/test/test_ast/snippets.py @@ -0,0 +1,601 @@ +import ast +import sys + +from test.test_ast.utils import to_tuple + + +# These tests are compiled through "exec" +# There should be at least one test per statement +exec_tests = [ + # Module docstring + "'module docstring'", + # FunctionDef + "def f(): pass", + # FunctionDef with docstring + "def f(): 'function docstring'", + # FunctionDef with arg + "def f(a): pass", + # FunctionDef with arg and default value + "def f(a=0): pass", + # FunctionDef with varargs + "def f(*args): pass", + # FunctionDef with varargs as TypeVarTuple + "def f(*args: *Ts): pass", + # FunctionDef with varargs as unpacked Tuple + "def f(*args: *tuple[int, ...]): pass", + # FunctionDef with varargs as unpacked Tuple *and* TypeVarTuple + "def f(*args: *tuple[int, *Ts]): pass", + # FunctionDef with kwargs + "def f(**kwargs): pass", + # FunctionDef with all kind of args and docstring + "def f(a, b=1, c=None, d=[], e={}, *args, f=42, **kwargs): 'doc for f()'", + # FunctionDef with type annotation on return involving unpacking + "def f() -> tuple[*Ts]: pass", + "def f() -> tuple[int, *Ts]: pass", + "def f() -> tuple[int, *tuple[int, ...]]: pass", + # ClassDef + "class C:pass", + # ClassDef with docstring + "class C: 'docstring for class C'", + # ClassDef, new style class + "class C(object): pass", + # Classdef with multiple bases + "class C(A, B): pass", + # Return + "def f():return 1", + "def f():return", + # Delete + "del v", + # Assign + "v = 1", + "a,b = c", + "(a,b) = c", + "[a,b] = c", + "a[b] = c", + # AnnAssign with unpacked types + "x: tuple[*Ts]", + "x: tuple[int, *Ts]", + "x: tuple[int, *tuple[str, ...]]", + # AugAssign + "v += 1", + "v -= 1", + "v *= 1", + "v @= 1", + "v /= 1", + "v %= 1", + "v **= 1", + "v <<= 1", + "v >>= 1", + "v |= 1", + "v ^= 1", + "v &= 1", + "v //= 1", + # For + "for v in v:pass", + # For-Else + "for v in v:\n pass\nelse:\n pass", + # While + "while v:pass", + # While-Else + "while v:\n pass\nelse:\n pass", + # If-Elif-Else + "if v:pass", + "if a:\n pass\nelif b:\n pass", + "if a:\n pass\nelse:\n pass", + "if a:\n pass\nelif b:\n pass\nelse:\n pass", + "if a:\n pass\nelif b:\n pass\nelif b:\n pass\nelif b:\n pass\nelse:\n pass", + # With + "with x: pass", + "with x, y: pass", + "with x as y: pass", + "with x as y, z as q: pass", + "with (x as y): pass", + "with (x, y): pass", + # Raise + "raise", + "raise Exception('string')", + "raise Exception", + "raise Exception('string') from None", + # TryExcept + "try:\n pass\nexcept Exception:\n pass", + "try:\n pass\nexcept Exception as exc:\n pass", + # TryFinally + "try:\n pass\nfinally:\n pass", + # TryStarExcept + "try:\n pass\nexcept* Exception:\n pass", + "try:\n pass\nexcept* Exception as exc:\n pass", + # TryExceptFinallyElse + "try:\n pass\nexcept Exception:\n pass\nelse: pass\nfinally:\n pass", + "try:\n pass\nexcept Exception as exc:\n pass\nelse: pass\nfinally:\n pass", + "try:\n pass\nexcept* Exception as exc:\n pass\nelse: pass\nfinally:\n pass", + # Assert + "assert v", + # Assert with message + "assert v, 'message'", + # Import + "import sys", + "import foo as bar", + # ImportFrom + "from sys import x as y", + "from sys import v", + # Global + "global v", + # Expr + "1", + # Pass, + "pass", + # Break + "for v in v:break", + # Continue + "for v in v:continue", + # for statements with naked tuples (see http://bugs.python.org/issue6704) + "for a,b in c: pass", + "for (a,b) in c: pass", + "for [a,b] in c: pass", + # Multiline generator expression (test for .lineno & .col_offset) + """( + ( + Aa + , + Bb + ) + for + Aa + , + Bb in Cc + )""", + # dictcomp + "{a : b for w in x for m in p if g}", + # dictcomp with naked tuple + "{a : b for v,w in x}", + # setcomp + "{r for l in x if g}", + # setcomp with naked tuple + "{r for l,m in x}", + # AsyncFunctionDef + "async def f():\n 'async function'\n await something()", + # AsyncFor + "async def f():\n async for e in i: 1\n else: 2", + # AsyncWith + "async def f():\n async with a as b: 1", + # PEP 448: Additional Unpacking Generalizations + "{**{1:2}, 2:3}", + "{*{1, 2}, 3}", + # Function with yield (from) + "def f(): yield 1", + "def f(): yield from []", + # Asynchronous comprehensions + "async def f():\n [i async for b in c]", + # Decorated FunctionDef + "@deco1\n@deco2()\n@deco3(1)\ndef f(): pass", + # Decorated AsyncFunctionDef + "@deco1\n@deco2()\n@deco3(1)\nasync def f(): pass", + # Decorated ClassDef + "@deco1\n@deco2()\n@deco3(1)\nclass C: pass", + # Decorator with generator argument + "@deco(a for a in b)\ndef f(): pass", + # Decorator with attribute + "@a.b.c\ndef f(): pass", + # Simple assignment expression + "(a := 1)", + # Assignment expression in if statement + "if a := foo(): pass", + # Assignment expression in while + "while a := foo(): pass", + # Positional-only arguments + "def f(a, /,): pass", + "def f(a, /, c, d, e): pass", + "def f(a, /, c, *, d, e): pass", + "def f(a, /, c, *, d, e, **kwargs): pass", + # Positional-only arguments with defaults + "def f(a=1, /,): pass", + "def f(a=1, /, b=2, c=4): pass", + "def f(a=1, /, b=2, *, c=4): pass", + "def f(a=1, /, b=2, *, c): pass", + "def f(a=1, /, b=2, *, c=4, **kwargs): pass", + "def f(a=1, /, b=2, *, c, **kwargs): pass", + # Type aliases + "type X = int", + "type X[T] = int", + "type X[T, *Ts, **P] = (T, Ts, P)", + "type X[T: int, *Ts, **P] = (T, Ts, P)", + "type X[T: (int, str), *Ts, **P] = (T, Ts, P)", + "type X[T: int = 1, *Ts = 2, **P =3] = (T, Ts, P)", + # Generic classes + "class X[T]: pass", + "class X[T, *Ts, **P]: pass", + "class X[T: int, *Ts, **P]: pass", + "class X[T: (int, str), *Ts, **P]: pass", + "class X[T: int = 1, *Ts = 2, **P = 3]: pass", + # Generic functions + "def f[T](): pass", + "def f[T, *Ts, **P](): pass", + "def f[T: int, *Ts, **P](): pass", + "def f[T: (int, str), *Ts, **P](): pass", + "def f[T: int = 1, *Ts = 2, **P = 3](): pass", + # Match + "match x:\n\tcase 1:\n\t\tpass", + # Match with _ + "match x:\n\tcase 1:\n\t\tpass\n\tcase _:\n\t\tpass", +] + +# These are compiled through "single" +# because of overlap with "eval", it just tests what +# can't be tested with "eval" +single_tests = [ + "1+2" +] + +# These are compiled through "eval" +# It should test all expressions +eval_tests = [ + # Constant(value=None) + "None", + # True + "True", + # False + "False", + # BoolOp + "a and b", + "a or b", + # BinOp + "a + b", + "a - b", + "a * b", + "a / b", + "a @ b", + "a // b", + "a ** b", + "a % b", + "a >> b", + "a << b", + "a ^ b", + "a | b", + "a & b", + # UnaryOp + "not v", + "+v", + "-v", + "~v", + # Lambda + "lambda:None", + # Dict + "{ 1:2 }", + # Empty dict + "{}", + # Set + "{None,}", + # Multiline dict (test for .lineno & .col_offset) + """{ + 1 + : + 2 + }""", + # Multiline list + """[ + 1 + , + 1 + ]""", + # Multiline tuple + """( + 1 + , + )""", + # Multiline set + """{ + 1 + , + 1 + }""", + # ListComp + "[a for b in c if d]", + # GeneratorExp + "(a for b in c if d)", + # SetComp + "{a for b in c if d}", + # DictComp + "{k: v for k, v in c if d}", + # Comprehensions with multiple for targets + "[(a,b) for a,b in c]", + "[(a,b) for (a,b) in c]", + "[(a,b) for [a,b] in c]", + "{(a,b) for a,b in c}", + "{(a,b) for (a,b) in c}", + "{(a,b) for [a,b] in c}", + "((a,b) for a,b in c)", + "((a,b) for (a,b) in c)", + "((a,b) for [a,b] in c)", + # Async comprehensions - async comprehensions can't work outside an asynchronous function + # + # Yield - yield expressions can't work outside a function + # + # Compare + "1 < 2 < 3", + "a == b", + "a <= b", + "a >= b", + "a != b", + "a is b", + "a is not b", + "a in b", + "a not in b", + # Call without argument + "f()", + # Call + "f(1,2,c=3,*d,**e)", + # Call with multi-character starred + "f(*[0, 1])", + # Call with a generator argument + "f(a for a in b)", + # Constant(value=int()) + "10", + # Complex num + "1j", + # Constant(value=str()) + "'string'", + # Attribute + "a.b", + # Subscript + "a[b:c]", + # Name + "v", + # List + "[1,2,3]", + # Empty list + "[]", + # Tuple + "1,2,3", + # Tuple + "(1,2,3)", + # Empty tuple + "()", + # Combination + "a.b.c.d(a.b[1:2])", + # Slice + "[5][1:]", + "[5][:1]", + "[5][::1]", + "[5][1:1:1]", + # IfExp + "foo() if x else bar()", + # JoinedStr and FormattedValue + "f'{a}'", + "f'{a:.2f}'", + "f'{a!r}'", + "f'foo({a})'", +] + + +def main(): + if __name__ != '__main__': + return + if sys.argv[1:] == ['-g']: + for statements, kind in ((exec_tests, "exec"), (single_tests, "single"), + (eval_tests, "eval")): + print(kind+"_results = [") + for statement in statements: + tree = ast.parse(statement, "?", kind) + print("%r," % (to_tuple(tree),)) + print("]") + print("main()") + raise SystemExit + +#### EVERYTHING BELOW IS GENERATED BY python Lib/test/test_ast/snippets.py -g ##### +exec_results = [ +('Module', [('Expr', (1, 0, 1, 18), ('Constant', (1, 0, 1, 18), 'module docstring', None))], []), +('Module', [('FunctionDef', (1, 0, 1, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 9, 1, 13))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 29), ('Constant', (1, 9, 1, 29), 'function docstring', None))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 14), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None)], None, [], [], None, []), [('Pass', (1, 10, 1, 14))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None)], None, [], [], None, [('Constant', (1, 8, 1, 9), 0, None)]), [('Pass', (1, 12, 1, 16))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 18), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 11), 'args', None, None), [], [], None, []), [('Pass', (1, 14, 1, 18))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 23), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 16), 'args', ('Starred', (1, 13, 1, 16), ('Name', (1, 14, 1, 16), 'Ts', ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 19, 1, 23))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 36), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 29), 'args', ('Starred', (1, 13, 1, 29), ('Subscript', (1, 14, 1, 29), ('Name', (1, 14, 1, 19), 'tuple', ('Load',)), ('Tuple', (1, 20, 1, 28), [('Name', (1, 20, 1, 23), 'int', ('Load',)), ('Constant', (1, 25, 1, 28), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 32, 1, 36))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 36), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 29), 'args', ('Starred', (1, 13, 1, 29), ('Subscript', (1, 14, 1, 29), ('Name', (1, 14, 1, 19), 'tuple', ('Load',)), ('Tuple', (1, 20, 1, 28), [('Name', (1, 20, 1, 23), 'int', ('Load',)), ('Starred', (1, 25, 1, 28), ('Name', (1, 26, 1, 28), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 32, 1, 36))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 21), 'f', ('arguments', [], [], None, [], [], ('arg', (1, 8, 1, 14), 'kwargs', None, None), []), [('Pass', (1, 17, 1, 21))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 71), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None), ('arg', (1, 9, 1, 10), 'b', None, None), ('arg', (1, 14, 1, 15), 'c', None, None), ('arg', (1, 22, 1, 23), 'd', None, None), ('arg', (1, 28, 1, 29), 'e', None, None)], ('arg', (1, 35, 1, 39), 'args', None, None), [('arg', (1, 41, 1, 42), 'f', None, None)], [('Constant', (1, 43, 1, 45), 42, None)], ('arg', (1, 49, 1, 55), 'kwargs', None, None), [('Constant', (1, 11, 1, 12), 1, None), ('Constant', (1, 16, 1, 20), None, None), ('List', (1, 24, 1, 26), [], ('Load',)), ('Dict', (1, 30, 1, 32), [], [])]), [('Expr', (1, 58, 1, 71), ('Constant', (1, 58, 1, 71), 'doc for f()', None))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 27), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 23, 1, 27))], [], ('Subscript', (1, 11, 1, 21), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 20), [('Starred', (1, 17, 1, 20), ('Name', (1, 18, 1, 20), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 32), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 28, 1, 32))], [], ('Subscript', (1, 11, 1, 26), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 25), [('Name', (1, 17, 1, 20), 'int', ('Load',)), ('Starred', (1, 22, 1, 25), ('Name', (1, 23, 1, 25), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 45), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 41, 1, 45))], [], ('Subscript', (1, 11, 1, 39), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 38), [('Name', (1, 17, 1, 20), 'int', ('Load',)), ('Starred', (1, 22, 1, 38), ('Subscript', (1, 23, 1, 38), ('Name', (1, 23, 1, 28), 'tuple', ('Load',)), ('Tuple', (1, 29, 1, 37), [('Name', (1, 29, 1, 32), 'int', ('Load',)), ('Constant', (1, 34, 1, 37), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []), +('Module', [('ClassDef', (1, 0, 1, 12), 'C', [], [], [('Pass', (1, 8, 1, 12))], [], [])], []), +('Module', [('ClassDef', (1, 0, 1, 32), 'C', [], [], [('Expr', (1, 9, 1, 32), ('Constant', (1, 9, 1, 32), 'docstring for class C', None))], [], [])], []), +('Module', [('ClassDef', (1, 0, 1, 21), 'C', [('Name', (1, 8, 1, 14), 'object', ('Load',))], [], [('Pass', (1, 17, 1, 21))], [], [])], []), +('Module', [('ClassDef', (1, 0, 1, 19), 'C', [('Name', (1, 8, 1, 9), 'A', ('Load',)), ('Name', (1, 11, 1, 12), 'B', ('Load',))], [], [('Pass', (1, 15, 1, 19))], [], [])], []), +('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Return', (1, 8, 1, 16), ('Constant', (1, 15, 1, 16), 1, None))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 14), 'f', ('arguments', [], [], None, [], [], None, []), [('Return', (1, 8, 1, 14), None)], [], None, None, [])], []), +('Module', [('Delete', (1, 0, 1, 5), [('Name', (1, 4, 1, 5), 'v', ('Del',))])], []), +('Module', [('Assign', (1, 0, 1, 5), [('Name', (1, 0, 1, 1), 'v', ('Store',))], ('Constant', (1, 4, 1, 5), 1, None), None)], []), +('Module', [('Assign', (1, 0, 1, 7), [('Tuple', (1, 0, 1, 3), [('Name', (1, 0, 1, 1), 'a', ('Store',)), ('Name', (1, 2, 1, 3), 'b', ('Store',))], ('Store',))], ('Name', (1, 6, 1, 7), 'c', ('Load',)), None)], []), +('Module', [('Assign', (1, 0, 1, 9), [('Tuple', (1, 0, 1, 5), [('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Name', (1, 3, 1, 4), 'b', ('Store',))], ('Store',))], ('Name', (1, 8, 1, 9), 'c', ('Load',)), None)], []), +('Module', [('Assign', (1, 0, 1, 9), [('List', (1, 0, 1, 5), [('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Name', (1, 3, 1, 4), 'b', ('Store',))], ('Store',))], ('Name', (1, 8, 1, 9), 'c', ('Load',)), None)], []), +('Module', [('Assign', (1, 0, 1, 8), [('Subscript', (1, 0, 1, 4), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 2, 1, 3), 'b', ('Load',)), ('Store',))], ('Name', (1, 7, 1, 8), 'c', ('Load',)), None)], []), +('Module', [('AnnAssign', (1, 0, 1, 13), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 13), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 12), [('Starred', (1, 9, 1, 12), ('Name', (1, 10, 1, 12), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []), +('Module', [('AnnAssign', (1, 0, 1, 18), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 18), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 17), [('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Starred', (1, 14, 1, 17), ('Name', (1, 15, 1, 17), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []), +('Module', [('AnnAssign', (1, 0, 1, 31), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 31), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 30), [('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Starred', (1, 14, 1, 30), ('Subscript', (1, 15, 1, 30), ('Name', (1, 15, 1, 20), 'tuple', ('Load',)), ('Tuple', (1, 21, 1, 29), [('Name', (1, 21, 1, 24), 'str', ('Load',)), ('Constant', (1, 26, 1, 29), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Add',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Sub',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Mult',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('MatMult',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Div',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Mod',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Pow',), ('Constant', (1, 6, 1, 7), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('LShift',), ('Constant', (1, 6, 1, 7), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('RShift',), ('Constant', (1, 6, 1, 7), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('BitOr',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('BitXor',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('BitAnd',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('FloorDiv',), ('Constant', (1, 6, 1, 7), 1, None))], []), +('Module', [('For', (1, 0, 1, 15), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Pass', (1, 11, 1, 15))], [], None)], []), +('Module', [('For', (1, 0, 4, 6), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Pass', (2, 2, 2, 6))], [('Pass', (4, 2, 4, 6))], None)], []), +('Module', [('While', (1, 0, 1, 12), ('Name', (1, 6, 1, 7), 'v', ('Load',)), [('Pass', (1, 8, 1, 12))], [])], []), +('Module', [('While', (1, 0, 4, 6), ('Name', (1, 6, 1, 7), 'v', ('Load',)), [('Pass', (2, 2, 2, 6))], [('Pass', (4, 2, 4, 6))])], []), +('Module', [('If', (1, 0, 1, 9), ('Name', (1, 3, 1, 4), 'v', ('Load',)), [('Pass', (1, 5, 1, 9))], [])], []), +('Module', [('If', (1, 0, 4, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 4, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [])])], []), +('Module', [('If', (1, 0, 4, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('Pass', (4, 2, 4, 6))])], []), +('Module', [('If', (1, 0, 6, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 6, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [('Pass', (6, 2, 6, 6))])])], []), +('Module', [('If', (1, 0, 10, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 10, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [('If', (5, 0, 10, 6), ('Name', (5, 5, 5, 6), 'b', ('Load',)), [('Pass', (6, 2, 6, 6))], [('If', (7, 0, 10, 6), ('Name', (7, 5, 7, 6), 'b', ('Load',)), [('Pass', (8, 2, 8, 6))], [('Pass', (10, 2, 10, 6))])])])])], []), +('Module', [('With', (1, 0, 1, 12), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), None)], [('Pass', (1, 8, 1, 12))], None)], []), +('Module', [('With', (1, 0, 1, 15), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), None), ('withitem', ('Name', (1, 8, 1, 9), 'y', ('Load',)), None)], [('Pass', (1, 11, 1, 15))], None)], []), +('Module', [('With', (1, 0, 1, 17), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), ('Name', (1, 10, 1, 11), 'y', ('Store',)))], [('Pass', (1, 13, 1, 17))], None)], []), +('Module', [('With', (1, 0, 1, 25), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), ('Name', (1, 10, 1, 11), 'y', ('Store',))), ('withitem', ('Name', (1, 13, 1, 14), 'z', ('Load',)), ('Name', (1, 18, 1, 19), 'q', ('Store',)))], [('Pass', (1, 21, 1, 25))], None)], []), +('Module', [('With', (1, 0, 1, 19), [('withitem', ('Name', (1, 6, 1, 7), 'x', ('Load',)), ('Name', (1, 11, 1, 12), 'y', ('Store',)))], [('Pass', (1, 15, 1, 19))], None)], []), +('Module', [('With', (1, 0, 1, 17), [('withitem', ('Name', (1, 6, 1, 7), 'x', ('Load',)), None), ('withitem', ('Name', (1, 9, 1, 10), 'y', ('Load',)), None)], [('Pass', (1, 13, 1, 17))], None)], []), +('Module', [('Raise', (1, 0, 1, 5), None, None)], []), +('Module', [('Raise', (1, 0, 1, 25), ('Call', (1, 6, 1, 25), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), [('Constant', (1, 16, 1, 24), 'string', None)], []), None)], []), +('Module', [('Raise', (1, 0, 1, 15), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), None)], []), +('Module', [('Raise', (1, 0, 1, 35), ('Call', (1, 6, 1, 25), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), [('Constant', (1, 16, 1, 24), 'string', None)], []), ('Constant', (1, 31, 1, 35), None, None))], []), +('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [], [])], []), +('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [], [])], []), +('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [], [], [('Pass', (4, 2, 4, 6))])], []), +('Module', [('TryStar', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [], [])], []), +('Module', [('TryStar', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [], [])], []), +('Module', [('Try', (1, 0, 7, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [('Pass', (5, 7, 5, 11))], [('Pass', (7, 2, 7, 6))])], []), +('Module', [('Try', (1, 0, 7, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [('Pass', (5, 7, 5, 11))], [('Pass', (7, 2, 7, 6))])], []), +('Module', [('TryStar', (1, 0, 7, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [('Pass', (5, 7, 5, 11))], [('Pass', (7, 2, 7, 6))])], []), +('Module', [('Assert', (1, 0, 1, 8), ('Name', (1, 7, 1, 8), 'v', ('Load',)), None)], []), +('Module', [('Assert', (1, 0, 1, 19), ('Name', (1, 7, 1, 8), 'v', ('Load',)), ('Constant', (1, 10, 1, 19), 'message', None))], []), +('Module', [('Import', (1, 0, 1, 10), [('alias', (1, 7, 1, 10), 'sys', None)])], []), +('Module', [('Import', (1, 0, 1, 17), [('alias', (1, 7, 1, 17), 'foo', 'bar')])], []), +('Module', [('ImportFrom', (1, 0, 1, 22), 'sys', [('alias', (1, 16, 1, 22), 'x', 'y')], 0)], []), +('Module', [('ImportFrom', (1, 0, 1, 17), 'sys', [('alias', (1, 16, 1, 17), 'v', None)], 0)], []), +('Module', [('Global', (1, 0, 1, 8), ['v'])], []), +('Module', [('Expr', (1, 0, 1, 1), ('Constant', (1, 0, 1, 1), 1, None))], []), +('Module', [('Pass', (1, 0, 1, 4))], []), +('Module', [('For', (1, 0, 1, 16), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Break', (1, 11, 1, 16))], [], None)], []), +('Module', [('For', (1, 0, 1, 19), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Continue', (1, 11, 1, 19))], [], None)], []), +('Module', [('For', (1, 0, 1, 18), ('Tuple', (1, 4, 1, 7), [('Name', (1, 4, 1, 5), 'a', ('Store',)), ('Name', (1, 6, 1, 7), 'b', ('Store',))], ('Store',)), ('Name', (1, 11, 1, 12), 'c', ('Load',)), [('Pass', (1, 14, 1, 18))], [], None)], []), +('Module', [('For', (1, 0, 1, 20), ('Tuple', (1, 4, 1, 9), [('Name', (1, 5, 1, 6), 'a', ('Store',)), ('Name', (1, 7, 1, 8), 'b', ('Store',))], ('Store',)), ('Name', (1, 13, 1, 14), 'c', ('Load',)), [('Pass', (1, 16, 1, 20))], [], None)], []), +('Module', [('For', (1, 0, 1, 20), ('List', (1, 4, 1, 9), [('Name', (1, 5, 1, 6), 'a', ('Store',)), ('Name', (1, 7, 1, 8), 'b', ('Store',))], ('Store',)), ('Name', (1, 13, 1, 14), 'c', ('Load',)), [('Pass', (1, 16, 1, 20))], [], None)], []), +('Module', [('Expr', (1, 0, 11, 5), ('GeneratorExp', (1, 0, 11, 5), ('Tuple', (2, 4, 6, 5), [('Name', (3, 4, 3, 6), 'Aa', ('Load',)), ('Name', (5, 7, 5, 9), 'Bb', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (8, 4, 10, 6), [('Name', (8, 4, 8, 6), 'Aa', ('Store',)), ('Name', (10, 4, 10, 6), 'Bb', ('Store',))], ('Store',)), ('Name', (10, 10, 10, 12), 'Cc', ('Load',)), [], 0)]))], []), +('Module', [('Expr', (1, 0, 1, 34), ('DictComp', (1, 0, 1, 34), ('Name', (1, 1, 1, 2), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',)), [('comprehension', ('Name', (1, 11, 1, 12), 'w', ('Store',)), ('Name', (1, 16, 1, 17), 'x', ('Load',)), [], 0), ('comprehension', ('Name', (1, 22, 1, 23), 'm', ('Store',)), ('Name', (1, 27, 1, 28), 'p', ('Load',)), [('Name', (1, 32, 1, 33), 'g', ('Load',))], 0)]))], []), +('Module', [('Expr', (1, 0, 1, 20), ('DictComp', (1, 0, 1, 20), ('Name', (1, 1, 1, 2), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'v', ('Store',)), ('Name', (1, 13, 1, 14), 'w', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'x', ('Load',)), [], 0)]))], []), +('Module', [('Expr', (1, 0, 1, 19), ('SetComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'r', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'l', ('Store',)), ('Name', (1, 12, 1, 13), 'x', ('Load',)), [('Name', (1, 17, 1, 18), 'g', ('Load',))], 0)]))], []), +('Module', [('Expr', (1, 0, 1, 16), ('SetComp', (1, 0, 1, 16), ('Name', (1, 1, 1, 2), 'r', ('Load',)), [('comprehension', ('Tuple', (1, 7, 1, 10), [('Name', (1, 7, 1, 8), 'l', ('Store',)), ('Name', (1, 9, 1, 10), 'm', ('Store',))], ('Store',)), ('Name', (1, 14, 1, 15), 'x', ('Load',)), [], 0)]))], []), +('Module', [('AsyncFunctionDef', (1, 0, 3, 18), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (2, 1, 2, 17), ('Constant', (2, 1, 2, 17), 'async function', None)), ('Expr', (3, 1, 3, 18), ('Await', (3, 1, 3, 18), ('Call', (3, 7, 3, 18), ('Name', (3, 7, 3, 16), 'something', ('Load',)), [], [])))], [], None, None, [])], []), +('Module', [('AsyncFunctionDef', (1, 0, 3, 8), 'f', ('arguments', [], [], None, [], [], None, []), [('AsyncFor', (2, 1, 3, 8), ('Name', (2, 11, 2, 12), 'e', ('Store',)), ('Name', (2, 16, 2, 17), 'i', ('Load',)), [('Expr', (2, 19, 2, 20), ('Constant', (2, 19, 2, 20), 1, None))], [('Expr', (3, 7, 3, 8), ('Constant', (3, 7, 3, 8), 2, None))], None)], [], None, None, [])], []), +('Module', [('AsyncFunctionDef', (1, 0, 2, 21), 'f', ('arguments', [], [], None, [], [], None, []), [('AsyncWith', (2, 1, 2, 21), [('withitem', ('Name', (2, 12, 2, 13), 'a', ('Load',)), ('Name', (2, 17, 2, 18), 'b', ('Store',)))], [('Expr', (2, 20, 2, 21), ('Constant', (2, 20, 2, 21), 1, None))], None)], [], None, None, [])], []), +('Module', [('Expr', (1, 0, 1, 14), ('Dict', (1, 0, 1, 14), [None, ('Constant', (1, 10, 1, 11), 2, None)], [('Dict', (1, 3, 1, 8), [('Constant', (1, 4, 1, 5), 1, None)], [('Constant', (1, 6, 1, 7), 2, None)]), ('Constant', (1, 12, 1, 13), 3, None)]))], []), +('Module', [('Expr', (1, 0, 1, 12), ('Set', (1, 0, 1, 12), [('Starred', (1, 1, 1, 8), ('Set', (1, 2, 1, 8), [('Constant', (1, 3, 1, 4), 1, None), ('Constant', (1, 6, 1, 7), 2, None)]), ('Load',)), ('Constant', (1, 10, 1, 11), 3, None)]))], []), +('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 16), ('Yield', (1, 9, 1, 16), ('Constant', (1, 15, 1, 16), 1, None)))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 22), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 22), ('YieldFrom', (1, 9, 1, 22), ('List', (1, 20, 1, 22), [], ('Load',))))], [], None, None, [])], []), +('Module', [('AsyncFunctionDef', (1, 0, 2, 21), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (2, 1, 2, 21), ('ListComp', (2, 1, 2, 21), ('Name', (2, 2, 2, 3), 'i', ('Load',)), [('comprehension', ('Name', (2, 14, 2, 15), 'b', ('Store',)), ('Name', (2, 19, 2, 20), 'c', ('Load',)), [], 1)]))], [], None, None, [])], []), +('Module', [('FunctionDef', (4, 0, 4, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (4, 9, 4, 13))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], None, None, [])], []), +('Module', [('AsyncFunctionDef', (4, 0, 4, 19), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (4, 15, 4, 19))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], None, None, [])], []), +('Module', [('ClassDef', (4, 0, 4, 13), 'C', [], [], [('Pass', (4, 9, 4, 13))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], [])], []), +('Module', [('FunctionDef', (2, 0, 2, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (2, 9, 2, 13))], [('Call', (1, 1, 1, 19), ('Name', (1, 1, 1, 5), 'deco', ('Load',)), [('GeneratorExp', (1, 5, 1, 19), ('Name', (1, 6, 1, 7), 'a', ('Load',)), [('comprehension', ('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 17, 1, 18), 'b', ('Load',)), [], 0)])], [])], None, None, [])], []), +('Module', [('FunctionDef', (2, 0, 2, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (2, 9, 2, 13))], [('Attribute', (1, 1, 1, 6), ('Attribute', (1, 1, 1, 4), ('Name', (1, 1, 1, 2), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',))], None, None, [])], []), +('Module', [('Expr', (1, 0, 1, 8), ('NamedExpr', (1, 1, 1, 7), ('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Constant', (1, 6, 1, 7), 1, None)))], []), +('Module', [('If', (1, 0, 1, 19), ('NamedExpr', (1, 3, 1, 13), ('Name', (1, 3, 1, 4), 'a', ('Store',)), ('Call', (1, 8, 1, 13), ('Name', (1, 8, 1, 11), 'foo', ('Load',)), [], [])), [('Pass', (1, 15, 1, 19))], [])], []), +('Module', [('While', (1, 0, 1, 22), ('NamedExpr', (1, 6, 1, 16), ('Name', (1, 6, 1, 7), 'a', ('Store',)), ('Call', (1, 11, 1, 16), ('Name', (1, 11, 1, 14), 'foo', ('Load',)), [], [])), [('Pass', (1, 18, 1, 22))], [])], []), +('Module', [('FunctionDef', (1, 0, 1, 18), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [], None, [], [], None, []), [('Pass', (1, 14, 1, 18))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 26), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None), ('arg', (1, 15, 1, 16), 'd', None, None), ('arg', (1, 18, 1, 19), 'e', None, None)], None, [], [], None, []), [('Pass', (1, 22, 1, 26))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None)], None, [('arg', (1, 18, 1, 19), 'd', None, None), ('arg', (1, 21, 1, 22), 'e', None, None)], [None, None], None, []), [('Pass', (1, 25, 1, 29))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 39), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None)], None, [('arg', (1, 18, 1, 19), 'd', None, None), ('arg', (1, 21, 1, 22), 'e', None, None)], [None, None], ('arg', (1, 26, 1, 32), 'kwargs', None, None), []), [('Pass', (1, 35, 1, 39))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 20), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [], None, [], [], None, [('Constant', (1, 8, 1, 9), 1, None)]), [('Pass', (1, 16, 1, 20))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None), ('arg', (1, 19, 1, 20), 'c', None, None)], None, [], [], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None), ('Constant', (1, 21, 1, 22), 4, None)]), [('Pass', (1, 25, 1, 29))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 32), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [('Constant', (1, 24, 1, 25), 4, None)], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 28, 1, 32))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 30), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [None], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 26, 1, 30))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 42), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [('Constant', (1, 24, 1, 25), 4, None)], ('arg', (1, 29, 1, 35), 'kwargs', None, None), [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 38, 1, 42))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 40), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [None], ('arg', (1, 27, 1, 33), 'kwargs', None, None), [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 36, 1, 40))], [], None, None, [])], []), +('Module', [('TypeAlias', (1, 0, 1, 12), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [], ('Name', (1, 9, 1, 12), 'int', ('Load',)))], []), +('Module', [('TypeAlias', (1, 0, 1, 15), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 8), 'T', None, None)], ('Name', (1, 12, 1, 15), 'int', ('Load',)))], []), +('Module', [('TypeAlias', (1, 0, 1, 32), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 8), 'T', None, None), ('TypeVarTuple', (1, 10, 1, 13), 'Ts', None), ('ParamSpec', (1, 15, 1, 18), 'P', None)], ('Tuple', (1, 22, 1, 32), [('Name', (1, 23, 1, 24), 'T', ('Load',)), ('Name', (1, 26, 1, 28), 'Ts', ('Load',)), ('Name', (1, 30, 1, 31), 'P', ('Load',))], ('Load',)))], []), +('Module', [('TypeAlias', (1, 0, 1, 37), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 13), 'T', ('Name', (1, 10, 1, 13), 'int', ('Load',)), None), ('TypeVarTuple', (1, 15, 1, 18), 'Ts', None), ('ParamSpec', (1, 20, 1, 23), 'P', None)], ('Tuple', (1, 27, 1, 37), [('Name', (1, 28, 1, 29), 'T', ('Load',)), ('Name', (1, 31, 1, 33), 'Ts', ('Load',)), ('Name', (1, 35, 1, 36), 'P', ('Load',))], ('Load',)))], []), +('Module', [('TypeAlias', (1, 0, 1, 44), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 20), 'T', ('Tuple', (1, 10, 1, 20), [('Name', (1, 11, 1, 14), 'int', ('Load',)), ('Name', (1, 16, 1, 19), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 22, 1, 25), 'Ts', None), ('ParamSpec', (1, 27, 1, 30), 'P', None)], ('Tuple', (1, 34, 1, 44), [('Name', (1, 35, 1, 36), 'T', ('Load',)), ('Name', (1, 38, 1, 40), 'Ts', ('Load',)), ('Name', (1, 42, 1, 43), 'P', ('Load',))], ('Load',)))], []), +('Module', [('TypeAlias', (1, 0, 1, 48), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 17), 'T', ('Name', (1, 10, 1, 13), 'int', ('Load',)), ('Constant', (1, 16, 1, 17), 1, None)), ('TypeVarTuple', (1, 19, 1, 26), 'Ts', ('Constant', (1, 25, 1, 26), 2, None)), ('ParamSpec', (1, 28, 1, 34), 'P', ('Constant', (1, 33, 1, 34), 3, None))], ('Tuple', (1, 38, 1, 48), [('Name', (1, 39, 1, 40), 'T', ('Load',)), ('Name', (1, 42, 1, 44), 'Ts', ('Load',)), ('Name', (1, 46, 1, 47), 'P', ('Load',))], ('Load',)))], []), +('Module', [('ClassDef', (1, 0, 1, 16), 'X', [], [], [('Pass', (1, 12, 1, 16))], [], [('TypeVar', (1, 8, 1, 9), 'T', None, None)])], []), +('Module', [('ClassDef', (1, 0, 1, 26), 'X', [], [], [('Pass', (1, 22, 1, 26))], [], [('TypeVar', (1, 8, 1, 9), 'T', None, None), ('TypeVarTuple', (1, 11, 1, 14), 'Ts', None), ('ParamSpec', (1, 16, 1, 19), 'P', None)])], []), +('Module', [('ClassDef', (1, 0, 1, 31), 'X', [], [], [('Pass', (1, 27, 1, 31))], [], [('TypeVar', (1, 8, 1, 14), 'T', ('Name', (1, 11, 1, 14), 'int', ('Load',)), None), ('TypeVarTuple', (1, 16, 1, 19), 'Ts', None), ('ParamSpec', (1, 21, 1, 24), 'P', None)])], []), +('Module', [('ClassDef', (1, 0, 1, 38), 'X', [], [], [('Pass', (1, 34, 1, 38))], [], [('TypeVar', (1, 8, 1, 21), 'T', ('Tuple', (1, 11, 1, 21), [('Name', (1, 12, 1, 15), 'int', ('Load',)), ('Name', (1, 17, 1, 20), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 23, 1, 26), 'Ts', None), ('ParamSpec', (1, 28, 1, 31), 'P', None)])], []), +('Module', [('ClassDef', (1, 0, 1, 43), 'X', [], [], [('Pass', (1, 39, 1, 43))], [], [('TypeVar', (1, 8, 1, 18), 'T', ('Name', (1, 11, 1, 14), 'int', ('Load',)), ('Constant', (1, 17, 1, 18), 1, None)), ('TypeVarTuple', (1, 20, 1, 27), 'Ts', ('Constant', (1, 26, 1, 27), 2, None)), ('ParamSpec', (1, 29, 1, 36), 'P', ('Constant', (1, 35, 1, 36), 3, None))])], []), +('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 12, 1, 16))], [], None, None, [('TypeVar', (1, 6, 1, 7), 'T', None, None)])], []), +('Module', [('FunctionDef', (1, 0, 1, 26), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 22, 1, 26))], [], None, None, [('TypeVar', (1, 6, 1, 7), 'T', None, None), ('TypeVarTuple', (1, 9, 1, 12), 'Ts', None), ('ParamSpec', (1, 14, 1, 17), 'P', None)])], []), +('Module', [('FunctionDef', (1, 0, 1, 31), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 27, 1, 31))], [], None, None, [('TypeVar', (1, 6, 1, 12), 'T', ('Name', (1, 9, 1, 12), 'int', ('Load',)), None), ('TypeVarTuple', (1, 14, 1, 17), 'Ts', None), ('ParamSpec', (1, 19, 1, 22), 'P', None)])], []), +('Module', [('FunctionDef', (1, 0, 1, 38), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 34, 1, 38))], [], None, None, [('TypeVar', (1, 6, 1, 19), 'T', ('Tuple', (1, 9, 1, 19), [('Name', (1, 10, 1, 13), 'int', ('Load',)), ('Name', (1, 15, 1, 18), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 21, 1, 24), 'Ts', None), ('ParamSpec', (1, 26, 1, 29), 'P', None)])], []), +('Module', [('FunctionDef', (1, 0, 1, 43), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 39, 1, 43))], [], None, None, [('TypeVar', (1, 6, 1, 16), 'T', ('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Constant', (1, 15, 1, 16), 1, None)), ('TypeVarTuple', (1, 18, 1, 25), 'Ts', ('Constant', (1, 24, 1, 25), 2, None)), ('ParamSpec', (1, 27, 1, 34), 'P', ('Constant', (1, 33, 1, 34), 3, None))])], []), +('Module', [('Match', (1, 0, 3, 6), ('Name', (1, 6, 1, 7), 'x', ('Load',)), [('match_case', ('MatchValue', (2, 6, 2, 7), ('Constant', (2, 6, 2, 7), 1, None)), None, [('Pass', (3, 2, 3, 6))])])], []), +('Module', [('Match', (1, 0, 5, 6), ('Name', (1, 6, 1, 7), 'x', ('Load',)), [('match_case', ('MatchValue', (2, 6, 2, 7), ('Constant', (2, 6, 2, 7), 1, None)), None, [('Pass', (3, 2, 3, 6))]), ('match_case', ('MatchAs', (4, 6, 4, 7), None, None), None, [('Pass', (5, 2, 5, 6))])])], []), +] +single_results = [ +('Interactive', [('Expr', (1, 0, 1, 3), ('BinOp', (1, 0, 1, 3), ('Constant', (1, 0, 1, 1), 1, None), ('Add',), ('Constant', (1, 2, 1, 3), 2, None)))]), +] +eval_results = [ +('Expression', ('Constant', (1, 0, 1, 4), None, None)), +('Expression', ('Constant', (1, 0, 1, 4), True, None)), +('Expression', ('Constant', (1, 0, 1, 5), False, None)), +('Expression', ('BoolOp', (1, 0, 1, 7), ('And',), [('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 6, 1, 7), 'b', ('Load',))])), +('Expression', ('BoolOp', (1, 0, 1, 6), ('Or',), [('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',))])), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Add',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Sub',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Mult',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Div',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('MatMult',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('FloorDiv',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Pow',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Mod',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('RShift',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('LShift',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('BitXor',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('BitOr',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('BitAnd',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('UnaryOp', (1, 0, 1, 5), ('Not',), ('Name', (1, 4, 1, 5), 'v', ('Load',)))), +('Expression', ('UnaryOp', (1, 0, 1, 2), ('UAdd',), ('Name', (1, 1, 1, 2), 'v', ('Load',)))), +('Expression', ('UnaryOp', (1, 0, 1, 2), ('USub',), ('Name', (1, 1, 1, 2), 'v', ('Load',)))), +('Expression', ('UnaryOp', (1, 0, 1, 2), ('Invert',), ('Name', (1, 1, 1, 2), 'v', ('Load',)))), +('Expression', ('Lambda', (1, 0, 1, 11), ('arguments', [], [], None, [], [], None, []), ('Constant', (1, 7, 1, 11), None, None))), +('Expression', ('Dict', (1, 0, 1, 7), [('Constant', (1, 2, 1, 3), 1, None)], [('Constant', (1, 4, 1, 5), 2, None)])), +('Expression', ('Dict', (1, 0, 1, 2), [], [])), +('Expression', ('Set', (1, 0, 1, 7), [('Constant', (1, 1, 1, 5), None, None)])), +('Expression', ('Dict', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None)], [('Constant', (4, 10, 4, 11), 2, None)])), +('Expression', ('List', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None), ('Constant', (4, 8, 4, 9), 1, None)], ('Load',))), +('Expression', ('Tuple', (1, 0, 4, 6), [('Constant', (2, 6, 2, 7), 1, None)], ('Load',))), +('Expression', ('Set', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None), ('Constant', (4, 8, 4, 9), 1, None)])), +('Expression', ('ListComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])), +('Expression', ('GeneratorExp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])), +('Expression', ('SetComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])), +('Expression', ('DictComp', (1, 0, 1, 25), ('Name', (1, 1, 1, 2), 'k', ('Load',)), ('Name', (1, 4, 1, 5), 'v', ('Load',)), [('comprehension', ('Tuple', (1, 10, 1, 14), [('Name', (1, 10, 1, 11), 'k', ('Store',)), ('Name', (1, 13, 1, 14), 'v', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [('Name', (1, 23, 1, 24), 'd', ('Load',))], 0)])), +('Expression', ('ListComp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])), +('Expression', ('ListComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), +('Expression', ('ListComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), +('Expression', ('SetComp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])), +('Expression', ('SetComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), +('Expression', ('SetComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), +('Expression', ('GeneratorExp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])), +('Expression', ('GeneratorExp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), +('Expression', ('GeneratorExp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), +('Expression', ('Compare', (1, 0, 1, 9), ('Constant', (1, 0, 1, 1), 1, None), [('Lt',), ('Lt',)], [('Constant', (1, 4, 1, 5), 2, None), ('Constant', (1, 8, 1, 9), 3, None)])), +('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('Eq',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), +('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('LtE',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), +('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('GtE',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), +('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('NotEq',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), +('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('Is',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), +('Expression', ('Compare', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('IsNot',)], [('Name', (1, 9, 1, 10), 'b', ('Load',))])), +('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('In',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), +('Expression', ('Compare', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('NotIn',)], [('Name', (1, 9, 1, 10), 'b', ('Load',))])), +('Expression', ('Call', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [], [])), +('Expression', ('Call', (1, 0, 1, 17), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('Constant', (1, 2, 1, 3), 1, None), ('Constant', (1, 4, 1, 5), 2, None), ('Starred', (1, 10, 1, 12), ('Name', (1, 11, 1, 12), 'd', ('Load',)), ('Load',))], [('keyword', (1, 6, 1, 9), 'c', ('Constant', (1, 8, 1, 9), 3, None)), ('keyword', (1, 13, 1, 16), None, ('Name', (1, 15, 1, 16), 'e', ('Load',)))])), +('Expression', ('Call', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('Starred', (1, 2, 1, 9), ('List', (1, 3, 1, 9), [('Constant', (1, 4, 1, 5), 0, None), ('Constant', (1, 7, 1, 8), 1, None)], ('Load',)), ('Load',))], [])), +('Expression', ('Call', (1, 0, 1, 15), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('GeneratorExp', (1, 1, 1, 15), ('Name', (1, 2, 1, 3), 'a', ('Load',)), [('comprehension', ('Name', (1, 8, 1, 9), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Load',)), [], 0)])], [])), +('Expression', ('Constant', (1, 0, 1, 2), 10, None)), +('Expression', ('Constant', (1, 0, 1, 2), 1j, None)), +('Expression', ('Constant', (1, 0, 1, 8), 'string', None)), +('Expression', ('Attribute', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'a', ('Load',)), 'b', ('Load',))), +('Expression', ('Subscript', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Slice', (1, 2, 1, 5), ('Name', (1, 2, 1, 3), 'b', ('Load',)), ('Name', (1, 4, 1, 5), 'c', ('Load',)), None), ('Load',))), +('Expression', ('Name', (1, 0, 1, 1), 'v', ('Load',))), +('Expression', ('List', (1, 0, 1, 7), [('Constant', (1, 1, 1, 2), 1, None), ('Constant', (1, 3, 1, 4), 2, None), ('Constant', (1, 5, 1, 6), 3, None)], ('Load',))), +('Expression', ('List', (1, 0, 1, 2), [], ('Load',))), +('Expression', ('Tuple', (1, 0, 1, 5), [('Constant', (1, 0, 1, 1), 1, None), ('Constant', (1, 2, 1, 3), 2, None), ('Constant', (1, 4, 1, 5), 3, None)], ('Load',))), +('Expression', ('Tuple', (1, 0, 1, 7), [('Constant', (1, 1, 1, 2), 1, None), ('Constant', (1, 3, 1, 4), 2, None), ('Constant', (1, 5, 1, 6), 3, None)], ('Load',))), +('Expression', ('Tuple', (1, 0, 1, 2), [], ('Load',))), +('Expression', ('Call', (1, 0, 1, 17), ('Attribute', (1, 0, 1, 7), ('Attribute', (1, 0, 1, 5), ('Attribute', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',)), 'd', ('Load',)), [('Subscript', (1, 8, 1, 16), ('Attribute', (1, 8, 1, 11), ('Name', (1, 8, 1, 9), 'a', ('Load',)), 'b', ('Load',)), ('Slice', (1, 12, 1, 15), ('Constant', (1, 12, 1, 13), 1, None), ('Constant', (1, 14, 1, 15), 2, None), None), ('Load',))], [])), +('Expression', ('Subscript', (1, 0, 1, 7), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 6), ('Constant', (1, 4, 1, 5), 1, None), None, None), ('Load',))), +('Expression', ('Subscript', (1, 0, 1, 7), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 6), None, ('Constant', (1, 5, 1, 6), 1, None), None), ('Load',))), +('Expression', ('Subscript', (1, 0, 1, 8), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 7), None, None, ('Constant', (1, 6, 1, 7), 1, None)), ('Load',))), +('Expression', ('Subscript', (1, 0, 1, 10), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 9), ('Constant', (1, 4, 1, 5), 1, None), ('Constant', (1, 6, 1, 7), 1, None), ('Constant', (1, 8, 1, 9), 1, None)), ('Load',))), +('Expression', ('IfExp', (1, 0, 1, 21), ('Name', (1, 9, 1, 10), 'x', ('Load',)), ('Call', (1, 0, 1, 5), ('Name', (1, 0, 1, 3), 'foo', ('Load',)), [], []), ('Call', (1, 16, 1, 21), ('Name', (1, 16, 1, 19), 'bar', ('Load',)), [], []))), +('Expression', ('JoinedStr', (1, 0, 1, 6), [('FormattedValue', (1, 2, 1, 5), ('Name', (1, 3, 1, 4), 'a', ('Load',)), -1, None)])), +('Expression', ('JoinedStr', (1, 0, 1, 10), [('FormattedValue', (1, 2, 1, 9), ('Name', (1, 3, 1, 4), 'a', ('Load',)), -1, ('JoinedStr', (1, 4, 1, 8), [('Constant', (1, 5, 1, 8), '.2f', None)]))])), +('Expression', ('JoinedStr', (1, 0, 1, 8), [('FormattedValue', (1, 2, 1, 7), ('Name', (1, 3, 1, 4), 'a', ('Load',)), 114, None)])), +('Expression', ('JoinedStr', (1, 0, 1, 11), [('Constant', (1, 2, 1, 6), 'foo(', None), ('FormattedValue', (1, 6, 1, 9), ('Name', (1, 7, 1, 8), 'a', ('Load',)), -1, None), ('Constant', (1, 9, 1, 10), ')', None)])), +] +main() diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast/test_ast.py similarity index 50% rename from Lib/test/test_ast.py rename to Lib/test/test_ast/test_ast.py index 93bd5dec6ea..b789b721c52 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast/test_ast.py @@ -13,6 +13,7 @@ import weakref from functools import partial from textwrap import dedent + try: import _testinternalcapi except ImportError: @@ -22,277 +23,11 @@ from test.support.import_helper import import_fresh_module from test.support import os_helper, script_helper from test.support.ast_helper import ASTTestMixin +from test.test_ast.utils import to_tuple +from test.test_ast.snippets import ( + eval_tests, eval_results, exec_tests, exec_results, single_tests, single_results +) -def to_tuple(t): - if t is None or isinstance(t, (str, int, complex, float, bytes)) or t is Ellipsis: - return t - elif isinstance(t, list): - return [to_tuple(e) for e in t] - result = [t.__class__.__name__] - if hasattr(t, 'lineno') and hasattr(t, 'col_offset'): - result.append((t.lineno, t.col_offset)) - if hasattr(t, 'end_lineno') and hasattr(t, 'end_col_offset'): - result[-1] += (t.end_lineno, t.end_col_offset) - if t._fields is None: - return tuple(result) - for f in t._fields: - result.append(to_tuple(getattr(t, f))) - return tuple(result) - - -# These tests are compiled through "exec" -# There should be at least one test per statement -exec_tests = [ - # None - "None", - # Module docstring - "'module docstring'", - # FunctionDef - "def f(): pass", - # FunctionDef with docstring - "def f(): 'function docstring'", - # FunctionDef with arg - "def f(a): pass", - # FunctionDef with arg and default value - "def f(a=0): pass", - # FunctionDef with varargs - "def f(*args): pass", - # FunctionDef with varargs as TypeVarTuple - "def f(*args: *Ts): pass", - # FunctionDef with varargs as unpacked Tuple - "def f(*args: *tuple[int, ...]): pass", - # FunctionDef with varargs as unpacked Tuple *and* TypeVarTuple - "def f(*args: *tuple[int, *Ts]): pass", - # FunctionDef with kwargs - "def f(**kwargs): pass", - # FunctionDef with all kind of args and docstring - "def f(a, b=1, c=None, d=[], e={}, *args, f=42, **kwargs): 'doc for f()'", - # FunctionDef with type annotation on return involving unpacking - "def f() -> tuple[*Ts]: pass", - "def f() -> tuple[int, *Ts]: pass", - "def f() -> tuple[int, *tuple[int, ...]]: pass", - # ClassDef - "class C:pass", - # ClassDef with docstring - "class C: 'docstring for class C'", - # ClassDef, new style class - "class C(object): pass", - # Return - "def f():return 1", - # Delete - "del v", - # Assign - "v = 1", - "a,b = c", - "(a,b) = c", - "[a,b] = c", - # AnnAssign with unpacked types - "x: tuple[*Ts]", - "x: tuple[int, *Ts]", - "x: tuple[int, *tuple[str, ...]]", - # AugAssign - "v += 1", - # For - "for v in v:pass", - # While - "while v:pass", - # If - "if v:pass", - # If-Elif - "if a:\n pass\nelif b:\n pass", - # If-Elif-Else - "if a:\n pass\nelif b:\n pass\nelse:\n pass", - # With - "with x as y: pass", - "with x as y, z as q: pass", - "with (x as y): pass", - "with (x, y): pass", - # Raise - "raise Exception('string')", - # TryExcept - "try:\n pass\nexcept Exception:\n pass", - # TryFinally - "try:\n pass\nfinally:\n pass", - # TryStarExcept - "try:\n pass\nexcept* Exception:\n pass", - # Assert - "assert v", - # Import - "import sys", - # ImportFrom - "from sys import v", - # Global - "global v", - # Expr - "1", - # Pass, - "pass", - # Break - "for v in v:break", - # Continue - "for v in v:continue", - # for statements with naked tuples (see http://bugs.python.org/issue6704) - "for a,b in c: pass", - "for (a,b) in c: pass", - "for [a,b] in c: pass", - # Multiline generator expression (test for .lineno & .col_offset) - """( - ( - Aa - , - Bb - ) - for - Aa - , - Bb in Cc - )""", - # dictcomp - "{a : b for w in x for m in p if g}", - # dictcomp with naked tuple - "{a : b for v,w in x}", - # setcomp - "{r for l in x if g}", - # setcomp with naked tuple - "{r for l,m in x}", - # AsyncFunctionDef - "async def f():\n 'async function'\n await something()", - # AsyncFor - "async def f():\n async for e in i: 1\n else: 2", - # AsyncWith - "async def f():\n async with a as b: 1", - # PEP 448: Additional Unpacking Generalizations - "{**{1:2}, 2:3}", - "{*{1, 2}, 3}", - # Asynchronous comprehensions - "async def f():\n [i async for b in c]", - # Decorated FunctionDef - "@deco1\n@deco2()\n@deco3(1)\ndef f(): pass", - # Decorated AsyncFunctionDef - "@deco1\n@deco2()\n@deco3(1)\nasync def f(): pass", - # Decorated ClassDef - "@deco1\n@deco2()\n@deco3(1)\nclass C: pass", - # Decorator with generator argument - "@deco(a for a in b)\ndef f(): pass", - # Decorator with attribute - "@a.b.c\ndef f(): pass", - # Simple assignment expression - "(a := 1)", - # Positional-only arguments - "def f(a, /,): pass", - "def f(a, /, c, d, e): pass", - "def f(a, /, c, *, d, e): pass", - "def f(a, /, c, *, d, e, **kwargs): pass", - # Positional-only arguments with defaults - "def f(a=1, /,): pass", - "def f(a=1, /, b=2, c=4): pass", - "def f(a=1, /, b=2, *, c=4): pass", - "def f(a=1, /, b=2, *, c): pass", - "def f(a=1, /, b=2, *, c=4, **kwargs): pass", - "def f(a=1, /, b=2, *, c, **kwargs): pass", - # Type aliases - "type X = int", - "type X[T] = int", - "type X[T, *Ts, **P] = (T, Ts, P)", - "type X[T: int, *Ts, **P] = (T, Ts, P)", - "type X[T: (int, str), *Ts, **P] = (T, Ts, P)", - "type X[T: int = 1, *Ts = 2, **P =3] = (T, Ts, P)", - # Generic classes - "class X[T]: pass", - "class X[T, *Ts, **P]: pass", - "class X[T: int, *Ts, **P]: pass", - "class X[T: (int, str), *Ts, **P]: pass", - "class X[T: int = 1, *Ts = 2, **P = 3]: pass", - # Generic functions - "def f[T](): pass", - "def f[T, *Ts, **P](): pass", - "def f[T: int, *Ts, **P](): pass", - "def f[T: (int, str), *Ts, **P](): pass", - "def f[T: int = 1, *Ts = 2, **P = 3](): pass", -] - -# These are compiled through "single" -# because of overlap with "eval", it just tests what -# can't be tested with "eval" -single_tests = [ - "1+2" -] - -# These are compiled through "eval" -# It should test all expressions -eval_tests = [ - # None - "None", - # BoolOp - "a and b", - # BinOp - "a + b", - # UnaryOp - "not v", - # Lambda - "lambda:None", - # Dict - "{ 1:2 }", - # Empty dict - "{}", - # Set - "{None,}", - # Multiline dict (test for .lineno & .col_offset) - """{ - 1 - : - 2 - }""", - # ListComp - "[a for b in c if d]", - # GeneratorExp - "(a for b in c if d)", - # Comprehensions with multiple for targets - "[(a,b) for a,b in c]", - "[(a,b) for (a,b) in c]", - "[(a,b) for [a,b] in c]", - "{(a,b) for a,b in c}", - "{(a,b) for (a,b) in c}", - "{(a,b) for [a,b] in c}", - "((a,b) for a,b in c)", - "((a,b) for (a,b) in c)", - "((a,b) for [a,b] in c)", - # Yield - yield expressions can't work outside a function - # - # Compare - "1 < 2 < 3", - # Call - "f(1,2,c=3,*d,**e)", - # Call with multi-character starred - "f(*[0, 1])", - # Call with a generator argument - "f(a for a in b)", - # Num - "10", - # Str - "'string'", - # Attribute - "a.b", - # Subscript - "a[b:c]", - # Name - "v", - # List - "[1,2,3]", - # Empty list - "[]", - # Tuple - "1,2,3", - # Tuple - "(1,2,3)", - # Empty tuple - "()", - # Combination - "a.b.c.d(a.b[1:2])", -] - -# TODO: expr_context, slice, boolop, operator, unaryop, cmpop, comprehension -# excepthandler, arguments, keywords, alias class AST_Tests(unittest.TestCase): maxDiff = None @@ -302,7 +37,7 @@ def _is_ast_node(self, name, node): return False if "ast" not in node.__module__: return False - return name != 'AST' and name[0].isupper() + return name != "AST" and name[0].isupper() def _assertTrueorder(self, ast_node, parent_pos): if not isinstance(ast_node, ast.AST) or ast_node._fields is None: @@ -315,7 +50,7 @@ def _assertTrueorder(self, ast_node, parent_pos): value = getattr(ast_node, name) if isinstance(value, list): first_pos = parent_pos - if value and name == 'decorator_list': + if value and name == "decorator_list": first_pos = (value[0].lineno, value[0].col_offset) for child in value: self._assertTrueorder(child, first_pos) @@ -340,6 +75,7 @@ def test_AST_objects(self): def test_AST_garbage_collection(self): class X: pass + a = ast.AST() a.x = X() a.x.a = a @@ -349,9 +85,11 @@ class X: self.assertIsNone(ref()) def test_snippets(self): - for input, output, kind in ((exec_tests, exec_results, "exec"), - (single_tests, single_results, "single"), - (eval_tests, eval_results, "eval")): + for input, output, kind in ( + (exec_tests, exec_results, "exec"), + (single_tests, single_results, "single"), + (eval_tests, eval_results, "eval"), + ): for i, o in zip(input, output): with self.subTest(action="parsing", input=i): ast_tree = compile(i, "?", kind, ast.PyCF_ONLY_AST) @@ -365,11 +103,11 @@ def test_ast_validation(self): snippets_to_validate = exec_tests + single_tests + eval_tests for snippet in snippets_to_validate: tree = ast.parse(snippet) - compile(tree, '', 'exec') + compile(tree, "", "exec") def test_optimization_levels__debug__(self): - cases = [(-1, '__debug__'), (0, '__debug__'), (1, False), (2, False)] - for (optval, expected) in cases: + cases = [(-1, "__debug__"), (0, "__debug__"), (1, False), (2, False)] + for optval, expected in cases: with self.subTest(optval=optval, expected=expected): res1 = ast.parse("__debug__", optimize=optval) res2 = ast.parse(ast.parse("__debug__"), optimize=optval) @@ -383,15 +121,21 @@ def test_optimization_levels__debug__(self): self.assertEqual(res.body[0].value.id, expected) def test_optimization_levels_const_folding(self): - folded = ('Expr', (1, 0, 1, 5), ('Constant', (1, 0, 1, 5), 3, None)) - not_folded = ('Expr', (1, 0, 1, 5), - ('BinOp', (1, 0, 1, 5), - ('Constant', (1, 0, 1, 1), 1, None), - ('Add',), - ('Constant', (1, 4, 1, 5), 2, None))) + folded = ("Expr", (1, 0, 1, 5), ("Constant", (1, 0, 1, 5), 3, None)) + not_folded = ( + "Expr", + (1, 0, 1, 5), + ( + "BinOp", + (1, 0, 1, 5), + ("Constant", (1, 0, 1, 1), 1, None), + ("Add",), + ("Constant", (1, 4, 1, 5), 2, None), + ), + ) cases = [(-1, not_folded), (0, not_folded), (1, folded), (2, folded)] - for (optval, expected) in cases: + for optval, expected in cases: with self.subTest(optval=optval): tree1 = ast.parse("1 + 2", optimize=optval) tree2 = ast.parse(ast.parse("1 + 2"), optimize=optval) @@ -400,9 +144,7 @@ def test_optimization_levels_const_folding(self): self.assertEqual(res, expected) def test_invalid_position_information(self): - invalid_linenos = [ - (10, 1), (-10, -11), (10, -11), (-5, -2), (-5, 1) - ] + invalid_linenos = [(10, 1), (-10, -11), (10, -11), (-5, -2), (-5, 1)] for lineno, end_lineno in invalid_linenos: with self.subTest(f"Check invalid linenos {lineno}:{end_lineno}"): @@ -411,25 +153,36 @@ def test_invalid_position_information(self): tree.body[0].lineno = lineno tree.body[0].end_lineno = end_lineno with self.assertRaises(ValueError): - compile(tree, '', 'exec') + compile(tree, "", "exec") - invalid_col_offsets = [ - (10, 1), (-10, -11), (10, -11), (-5, -2), (-5, 1) - ] + invalid_col_offsets = [(10, 1), (-10, -11), (10, -11), (-5, -2), (-5, 1)] for col_offset, end_col_offset in invalid_col_offsets: - with self.subTest(f"Check invalid col_offset {col_offset}:{end_col_offset}"): + with self.subTest( + f"Check invalid col_offset {col_offset}:{end_col_offset}" + ): snippet = "a = 1" tree = ast.parse(snippet) tree.body[0].col_offset = col_offset tree.body[0].end_col_offset = end_col_offset with self.assertRaises(ValueError): - compile(tree, '', 'exec') + compile(tree, "", "exec") def test_compilation_of_ast_nodes_with_default_end_position_values(self): - tree = ast.Module(body=[ - ast.Import(names=[ast.alias(name='builtins', lineno=1, col_offset=0)], lineno=1, col_offset=0), - ast.Import(names=[ast.alias(name='traceback', lineno=0, col_offset=0)], lineno=0, col_offset=1) - ], type_ignores=[]) + tree = ast.Module( + body=[ + ast.Import( + names=[ast.alias(name="builtins", lineno=1, col_offset=0)], + lineno=1, + col_offset=0, + ), + ast.Import( + names=[ast.alias(name="traceback", lineno=0, col_offset=0)], + lineno=0, + col_offset=1, + ), + ], + type_ignores=[], + ) # Check that compilation doesn't crash. Note: this may crash explicitly only on debug mode. compile(tree, "", "exec") @@ -454,7 +207,7 @@ def test_alias(self): im = ast.parse("from bar import y").body[0] self.assertEqual(len(im.names), 1) alias = im.names[0] - self.assertEqual(alias.name, 'y') + self.assertEqual(alias.name, "y") self.assertIsNone(alias.asname) self.assertEqual(alias.lineno, 1) self.assertEqual(alias.end_lineno, 1) @@ -463,7 +216,7 @@ def test_alias(self): im = ast.parse("from bar import *").body[0] alias = im.names[0] - self.assertEqual(alias.name, '*') + self.assertEqual(alias.name, "*") self.assertIsNone(alias.asname) self.assertEqual(alias.lineno, 1) self.assertEqual(alias.end_lineno, 1) @@ -497,21 +250,21 @@ def test_base_classes(self): self.assertTrue(issubclass(ast.Gt, ast.AST)) def test_import_deprecated(self): - ast = import_fresh_module('ast') + ast = import_fresh_module("ast") depr_regex = ( - r'ast\.{} is deprecated and will be removed in Python 3.14; ' - r'use ast\.Constant instead' + r"ast\.{} is deprecated and will be removed in Python 3.14; " + r"use ast\.Constant instead" ) - for name in 'Num', 'Str', 'Bytes', 'NameConstant', 'Ellipsis': + for name in "Num", "Str", "Bytes", "NameConstant", "Ellipsis": with self.assertWarnsRegex(DeprecationWarning, depr_regex.format(name)): getattr(ast, name) def test_field_attr_existence_deprecated(self): with warnings.catch_warnings(): - warnings.filterwarnings('ignore', '', DeprecationWarning) + warnings.filterwarnings("ignore", "", DeprecationWarning) from ast import Num, Str, Bytes, NameConstant, Ellipsis - for name in ('Num', 'Str', 'Bytes', 'NameConstant', 'Ellipsis'): + for name in ("Num", "Str", "Bytes", "NameConstant", "Ellipsis"): item = getattr(ast, name) if self._is_ast_node(name, item): with self.subTest(item): @@ -523,10 +276,10 @@ def test_field_attr_existence_deprecated(self): def test_field_attr_existence(self): for name, item in ast.__dict__.items(): # These emit DeprecationWarnings - if name in {'Num', 'Str', 'Bytes', 'NameConstant', 'Ellipsis'}: + if name in {"Num", "Str", "Bytes", "NameConstant", "Ellipsis"}: continue # constructor has a different signature - if name == 'Index': + if name == "Index": continue if self._is_ast_node(name, item): x = self._construct_ast_class(item) @@ -537,28 +290,41 @@ def _construct_ast_class(self, cls): kwargs = {} for name, typ in cls.__annotations__.items(): if typ is str: - kwargs[name] = 'capybara' + kwargs[name] = "capybara" elif typ is int: kwargs[name] = 42 elif typ is object: - kwargs[name] = b'capybara' + kwargs[name] = b"capybara" elif isinstance(typ, type) and issubclass(typ, ast.AST): kwargs[name] = self._construct_ast_class(typ) return cls(**kwargs) def test_arguments(self): x = ast.arguments() - self.assertEqual(x._fields, ('posonlyargs', 'args', 'vararg', 'kwonlyargs', - 'kw_defaults', 'kwarg', 'defaults')) - self.assertEqual(x.__annotations__, { - 'posonlyargs': list[ast.arg], - 'args': list[ast.arg], - 'vararg': ast.arg | None, - 'kwonlyargs': list[ast.arg], - 'kw_defaults': list[ast.expr], - 'kwarg': ast.arg | None, - 'defaults': list[ast.expr], - }) + self.assertEqual( + x._fields, + ( + "posonlyargs", + "args", + "vararg", + "kwonlyargs", + "kw_defaults", + "kwarg", + "defaults", + ), + ) + self.assertEqual( + x.__annotations__, + { + "posonlyargs": list[ast.arg], + "args": list[ast.arg], + "vararg": ast.arg | None, + "kwonlyargs": list[ast.arg], + "kw_defaults": list[ast.expr], + "kwarg": ast.arg | None, + "defaults": list[ast.expr], + }, + ) self.assertEqual(x.args, []) self.assertIsNone(x.vararg) @@ -569,7 +335,7 @@ def test_arguments(self): def test_field_attr_writable_deprecated(self): with warnings.catch_warnings(): - warnings.filterwarnings('ignore', '', DeprecationWarning) + warnings.filterwarnings("ignore", "", DeprecationWarning) x = ast.Num() # We can assign to _fields x._fields = 666 @@ -583,13 +349,13 @@ def test_field_attr_writable(self): def test_classattrs_deprecated(self): with warnings.catch_warnings(): - warnings.filterwarnings('ignore', '', DeprecationWarning) + warnings.filterwarnings("ignore", "", DeprecationWarning) from ast import Num, Str, Bytes, NameConstant, Ellipsis with warnings.catch_warnings(record=True) as wlog: - warnings.filterwarnings('always', '', DeprecationWarning) + warnings.filterwarnings("always", "", DeprecationWarning) x = ast.Num() - self.assertEqual(x._fields, ('value', 'kind')) + self.assertEqual(x._fields, ("value", "kind")) with self.assertRaises(AttributeError): x.value @@ -612,7 +378,7 @@ def test_classattrs_deprecated(self): x = ast.Num(42, lineno=0) self.assertEqual(x.lineno, 0) - self.assertEqual(x._fields, ('value', 'kind')) + self.assertEqual(x._fields, ("value", "kind")) self.assertEqual(x.value, 42) self.assertEqual(x.n, 42) @@ -620,57 +386,62 @@ def test_classattrs_deprecated(self): self.assertRaises(TypeError, ast.Num, 1, None, 2, lineno=0) # Arbitrary keyword arguments are supported - self.assertEqual(ast.Num(1, foo='bar').foo, 'bar') + self.assertEqual(ast.Num(1, foo="bar").foo, "bar") - with self.assertRaisesRegex(TypeError, "Num got multiple values for argument 'n'"): + with self.assertRaisesRegex( + TypeError, "Num got multiple values for argument 'n'" + ): ast.Num(1, n=2) self.assertEqual(ast.Num(42).n, 42) self.assertEqual(ast.Num(4.25).n, 4.25) self.assertEqual(ast.Num(4.25j).n, 4.25j) - self.assertEqual(ast.Str('42').s, '42') - self.assertEqual(ast.Bytes(b'42').s, b'42') + self.assertEqual(ast.Str("42").s, "42") + self.assertEqual(ast.Bytes(b"42").s, b"42") self.assertIs(ast.NameConstant(True).value, True) self.assertIs(ast.NameConstant(False).value, False) self.assertIs(ast.NameConstant(None).value, None) - self.assertEqual([str(w.message) for w in wlog], [ - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - "Constant.__init__ missing 1 required positional argument: 'value'. This will become " - 'an error in Python 3.15.', - 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - "Constant.__init__ missing 1 required positional argument: 'value'. This will become " - 'an error in Python 3.15.', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - "Constant.__init__ got an unexpected keyword argument 'foo'. Support for " - 'arbitrary keyword arguments is deprecated and will be removed in Python ' - '3.15.', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', - 'ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'Attribute s is deprecated and will be removed in Python 3.14; use value instead', - 'ast.Bytes is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'Attribute s is deprecated and will be removed in Python 3.14; use value instead', - 'ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead', - ]) + self.assertEqual( + [str(w.message) for w in wlog], + [ + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "Constant.__init__ missing 1 required positional argument: 'value'. This will become " + "an error in Python 3.15.", + "Attribute n is deprecated and will be removed in Python 3.14; use value instead", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "Attribute n is deprecated and will be removed in Python 3.14; use value instead", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "Constant.__init__ missing 1 required positional argument: 'value'. This will become " + "an error in Python 3.15.", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "Attribute n is deprecated and will be removed in Python 3.14; use value instead", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "Constant.__init__ got an unexpected keyword argument 'foo'. Support for " + "arbitrary keyword arguments is deprecated and will be removed in Python " + "3.15.", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "Attribute n is deprecated and will be removed in Python 3.14; use value instead", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "Attribute n is deprecated and will be removed in Python 3.14; use value instead", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "Attribute n is deprecated and will be removed in Python 3.14; use value instead", + "ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "Attribute s is deprecated and will be removed in Python 3.14; use value instead", + "ast.Bytes is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "Attribute s is deprecated and will be removed in Python 3.14; use value instead", + "ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead", + ], + ) def test_classattrs(self): with self.assertWarns(DeprecationWarning): x = ast.Constant() - self.assertEqual(x._fields, ('value', 'kind')) + self.assertEqual(x._fields, ("value", "kind")) with self.assertRaises(AttributeError): x.value @@ -689,7 +460,7 @@ def test_classattrs(self): x = ast.Constant(42, lineno=0) self.assertEqual(x.lineno, 0) - self.assertEqual(x._fields, ('value', 'kind')) + self.assertEqual(x._fields, ("value", "kind")) self.assertEqual(x.value, 42) self.assertRaises(TypeError, ast.Constant, 1, None, 2) @@ -697,16 +468,18 @@ def test_classattrs(self): # Arbitrary keyword arguments are supported (but deprecated) with self.assertWarns(DeprecationWarning): - self.assertEqual(ast.Constant(1, foo='bar').foo, 'bar') + self.assertEqual(ast.Constant(1, foo="bar").foo, "bar") - with self.assertRaisesRegex(TypeError, "Constant got multiple values for argument 'value'"): + with self.assertRaisesRegex( + TypeError, "Constant got multiple values for argument 'value'" + ): ast.Constant(1, value=2) self.assertEqual(ast.Constant(42).value, 42) self.assertEqual(ast.Constant(4.25).value, 4.25) self.assertEqual(ast.Constant(4.25j).value, 4.25j) - self.assertEqual(ast.Constant('42').value, '42') - self.assertEqual(ast.Constant(b'42').value, b'42') + self.assertEqual(ast.Constant("42").value, "42") + self.assertEqual(ast.Constant(b"42").value, b"42") self.assertIs(ast.Constant(True).value, True) self.assertIs(ast.Constant(False).value, False) self.assertIs(ast.Constant(None).value, None) @@ -714,43 +487,46 @@ def test_classattrs(self): def test_realtype(self): with warnings.catch_warnings(): - warnings.filterwarnings('ignore', '', DeprecationWarning) + warnings.filterwarnings("ignore", "", DeprecationWarning) from ast import Num, Str, Bytes, NameConstant, Ellipsis with warnings.catch_warnings(record=True) as wlog: - warnings.filterwarnings('always', '', DeprecationWarning) + warnings.filterwarnings("always", "", DeprecationWarning) self.assertIs(type(ast.Num(42)), ast.Constant) self.assertIs(type(ast.Num(4.25)), ast.Constant) self.assertIs(type(ast.Num(4.25j)), ast.Constant) - self.assertIs(type(ast.Str('42')), ast.Constant) - self.assertIs(type(ast.Bytes(b'42')), ast.Constant) + self.assertIs(type(ast.Str("42")), ast.Constant) + self.assertIs(type(ast.Bytes(b"42")), ast.Constant) self.assertIs(type(ast.NameConstant(True)), ast.Constant) self.assertIs(type(ast.NameConstant(False)), ast.Constant) self.assertIs(type(ast.NameConstant(None)), ast.Constant) self.assertIs(type(ast.Ellipsis()), ast.Constant) - self.assertEqual([str(w.message) for w in wlog], [ - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.Bytes is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.Ellipsis is deprecated and will be removed in Python 3.14; use ast.Constant instead', - ]) + self.assertEqual( + [str(w.message) for w in wlog], + [ + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.Bytes is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.Ellipsis is deprecated and will be removed in Python 3.14; use ast.Constant instead", + ], + ) def test_isinstance(self): from ast import Constant with warnings.catch_warnings(): - warnings.filterwarnings('ignore', '', DeprecationWarning) + warnings.filterwarnings("ignore", "", DeprecationWarning) from ast import Num, Str, Bytes, NameConstant, Ellipsis cls_depr_msg = ( - 'ast.{} is deprecated and will be removed in Python 3.14; ' - 'use ast.Constant instead' + "ast.{} is deprecated and will be removed in Python 3.14; " + "use ast.Constant instead" ) assertNumDeprecated = partial( @@ -765,7 +541,7 @@ def test_isinstance(self): assertNameConstantDeprecated = partial( self.assertWarnsRegex, DeprecationWarning, - cls_depr_msg.format("NameConstant") + cls_depr_msg.format("NameConstant"), ) assertEllipsisDeprecated = partial( self.assertWarnsRegex, DeprecationWarning, cls_depr_msg.format("Ellipsis") @@ -779,12 +555,12 @@ def test_isinstance(self): self.assertIsInstance(n, Num) with assertStrDeprecated(): - s = Str('42') + s = Str("42") with assertStrDeprecated(): self.assertIsInstance(s, Str) with assertBytesDeprecated(): - b = Bytes(b'42') + b = Bytes(b"42") with assertBytesDeprecated(): self.assertIsInstance(b, Bytes) @@ -806,10 +582,10 @@ def test_isinstance(self): self.assertIsInstance(Constant(arg), Num) with assertStrDeprecated(): - self.assertIsInstance(Constant('42'), Str) + self.assertIsInstance(Constant("42"), Str) with assertBytesDeprecated(): - self.assertIsInstance(Constant(b'42'), Bytes) + self.assertIsInstance(Constant(b"42"), Bytes) for arg in True, False, None: with self.subTest(arg=arg): @@ -820,7 +596,7 @@ def test_isinstance(self): self.assertIsInstance(Constant(...), Ellipsis) with assertStrDeprecated(): - s = Str('42') + s = Str("42") assertNumDeprecated(self.assertNotIsInstance, s, Num) assertBytesDeprecated(self.assertNotIsInstance, s, Bytes) @@ -840,44 +616,52 @@ def test_isinstance(self): with assertNumDeprecated(): self.assertNotIsInstance(n, Num) - for arg in '42', True, False: + for arg in "42", True, False: with self.subTest(arg=arg): with assertNumDeprecated(): self.assertNotIsInstance(Constant(arg), Num) assertStrDeprecated(self.assertNotIsInstance, Constant(42), Str) - assertBytesDeprecated(self.assertNotIsInstance, Constant('42'), Bytes) - assertNameConstantDeprecated(self.assertNotIsInstance, Constant(42), NameConstant) + assertBytesDeprecated(self.assertNotIsInstance, Constant("42"), Bytes) + assertNameConstantDeprecated( + self.assertNotIsInstance, Constant(42), NameConstant + ) assertEllipsisDeprecated(self.assertNotIsInstance, Constant(42), Ellipsis) assertNumDeprecated(self.assertNotIsInstance, Constant(None), Num) assertStrDeprecated(self.assertNotIsInstance, Constant(None), Str) assertBytesDeprecated(self.assertNotIsInstance, Constant(None), Bytes) - assertNameConstantDeprecated(self.assertNotIsInstance, Constant(1), NameConstant) + assertNameConstantDeprecated( + self.assertNotIsInstance, Constant(1), NameConstant + ) assertEllipsisDeprecated(self.assertNotIsInstance, Constant(None), Ellipsis) - class S(str): pass + class S(str): + pass + with assertStrDeprecated(): - self.assertIsInstance(Constant(S('42')), Str) + self.assertIsInstance(Constant(S("42")), Str) with assertNumDeprecated(): - self.assertNotIsInstance(Constant(S('42')), Num) + self.assertNotIsInstance(Constant(S("42")), Num) def test_constant_subclasses_deprecated(self): with warnings.catch_warnings(): - warnings.filterwarnings('ignore', '', DeprecationWarning) + warnings.filterwarnings("ignore", "", DeprecationWarning) from ast import Num with warnings.catch_warnings(record=True) as wlog: - warnings.filterwarnings('always', '', DeprecationWarning) + warnings.filterwarnings("always", "", DeprecationWarning) + class N(ast.Num): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.z = 'spam' + self.z = "spam" + class N2(ast.Num): pass n = N(42) self.assertEqual(n.n, 42) - self.assertEqual(n.z, 'spam') + self.assertEqual(n.z, "spam") self.assertIs(type(n), N) self.assertIsInstance(n, N) self.assertIsInstance(n, ast.Num) @@ -887,26 +671,30 @@ class N2(ast.Num): self.assertEqual(n.n, 42) self.assertIs(type(n), N) - self.assertEqual([str(w.message) for w in wlog], [ - 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', - 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', - 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', - ]) + self.assertEqual( + [str(w.message) for w in wlog], + [ + "Attribute n is deprecated and will be removed in Python 3.14; use value instead", + "Attribute n is deprecated and will be removed in Python 3.14; use value instead", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "Attribute n is deprecated and will be removed in Python 3.14; use value instead", + "Attribute n is deprecated and will be removed in Python 3.14; use value instead", + ], + ) def test_constant_subclasses(self): class N(ast.Constant): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.z = 'spam' + self.z = "spam" + class N2(ast.Constant): pass n = N(42) self.assertEqual(n.value, 42) - self.assertEqual(n.z, 'spam') + self.assertEqual(n.z, "spam") self.assertEqual(type(n), N) self.assertTrue(isinstance(n, N)) self.assertTrue(isinstance(n, ast.Constant)) @@ -925,7 +713,7 @@ def test_nodeclasses(self): # Zero arguments constructor explicitly allowed (but deprecated) with self.assertWarns(DeprecationWarning): x = ast.BinOp() - self.assertEqual(x._fields, ('left', 'op', 'right')) + self.assertEqual(x._fields, ("left", "op", "right")) # Random attribute allowed too x.foobarbaz = 5 @@ -990,9 +778,7 @@ def test_invalid_constant(self): for invalid_constant in int, (1, 2, int), frozenset((1, 2, int)): e = ast.Expression(body=ast.Constant(invalid_constant)) ast.fix_missing_locations(e) - with self.assertRaisesRegex( - TypeError, "invalid type in Constant: type" - ): + with self.assertRaisesRegex(TypeError, "invalid type in Constant: type"): compile(e, "", "eval") def test_empty_yield_from(self): @@ -1008,13 +794,15 @@ def test_issue31592(self): # There shouldn't be an assertion failure in case of a bad # unicodedata.normalize(). import unicodedata + def bad_normalize(*args): return None - with support.swap_attr(unicodedata, 'normalize', bad_normalize): - self.assertRaises(TypeError, ast.parse, '\u03D5') + + with support.swap_attr(unicodedata, "normalize", bad_normalize): + self.assertRaises(TypeError, ast.parse, "\u03d5") def test_issue18374_binop_col_offset(self): - tree = ast.parse('4+5+6+7') + tree = ast.parse("4+5+6+7") parent_binop = tree.body[0].value child_binop = parent_binop.left grandchild_binop = child_binop.left @@ -1025,7 +813,7 @@ def test_issue18374_binop_col_offset(self): self.assertEqual(grandchild_binop.col_offset, 0) self.assertEqual(grandchild_binop.end_col_offset, 3) - tree = ast.parse('4+5-\\\n 6-7') + tree = ast.parse("4+5-\\\n 6-7") parent_binop = tree.body[0].value child_binop = parent_binop.left grandchild_binop = child_binop.left @@ -1045,48 +833,53 @@ def test_issue18374_binop_col_offset(self): self.assertEqual(grandchild_binop.end_lineno, 1) def test_issue39579_dotted_name_end_col_offset(self): - tree = ast.parse('@a.b.c\ndef f(): pass') + tree = ast.parse("@a.b.c\ndef f(): pass") attr_b = tree.body[0].decorator_list[0].value self.assertEqual(attr_b.end_col_offset, 4) def test_ast_asdl_signature(self): - self.assertEqual(ast.withitem.__doc__, "withitem(expr context_expr, expr? optional_vars)") + self.assertEqual( + ast.withitem.__doc__, "withitem(expr context_expr, expr? optional_vars)" + ) self.assertEqual(ast.GtE.__doc__, "GtE") self.assertEqual(ast.Name.__doc__, "Name(identifier id, expr_context ctx)") - self.assertEqual(ast.cmpop.__doc__, "cmpop = Eq | NotEq | Lt | LtE | Gt | GtE | Is | IsNot | In | NotIn") + self.assertEqual( + ast.cmpop.__doc__, + "cmpop = Eq | NotEq | Lt | LtE | Gt | GtE | Is | IsNot | In | NotIn", + ) expressions = [f" | {node.__doc__}" for node in ast.expr.__subclasses__()] expressions[0] = f"expr = {ast.expr.__subclasses__()[0].__doc__}" self.assertCountEqual(ast.expr.__doc__.split("\n"), expressions) def test_positional_only_feature_version(self): - ast.parse('def foo(x, /): ...', feature_version=(3, 8)) - ast.parse('def bar(x=1, /): ...', feature_version=(3, 8)) + ast.parse("def foo(x, /): ...", feature_version=(3, 8)) + ast.parse("def bar(x=1, /): ...", feature_version=(3, 8)) with self.assertRaises(SyntaxError): - ast.parse('def foo(x, /): ...', feature_version=(3, 7)) + ast.parse("def foo(x, /): ...", feature_version=(3, 7)) with self.assertRaises(SyntaxError): - ast.parse('def bar(x=1, /): ...', feature_version=(3, 7)) + ast.parse("def bar(x=1, /): ...", feature_version=(3, 7)) - ast.parse('lambda x, /: ...', feature_version=(3, 8)) - ast.parse('lambda x=1, /: ...', feature_version=(3, 8)) + ast.parse("lambda x, /: ...", feature_version=(3, 8)) + ast.parse("lambda x=1, /: ...", feature_version=(3, 8)) with self.assertRaises(SyntaxError): - ast.parse('lambda x, /: ...', feature_version=(3, 7)) + ast.parse("lambda x, /: ...", feature_version=(3, 7)) with self.assertRaises(SyntaxError): - ast.parse('lambda x=1, /: ...', feature_version=(3, 7)) + ast.parse("lambda x=1, /: ...", feature_version=(3, 7)) def test_assignment_expression_feature_version(self): - ast.parse('(x := 0)', feature_version=(3, 8)) + ast.parse("(x := 0)", feature_version=(3, 8)) with self.assertRaises(SyntaxError): - ast.parse('(x := 0)', feature_version=(3, 7)) + ast.parse("(x := 0)", feature_version=(3, 7)) def test_conditional_context_managers_parse_with_low_feature_version(self): # regression test for gh-115881 - ast.parse('with (x() if y else z()): ...', feature_version=(3, 8)) + ast.parse("with (x() if y else z()): ...", feature_version=(3, 8)) def test_exception_groups_feature_version(self): - code = dedent(''' + code = dedent(""" try: ... except* Exception: ... - ''') + """) ast.parse(code) with self.assertRaises(SyntaxError): ast.parse(code, feature_version=(3, 10)) @@ -1117,45 +910,50 @@ def test_type_params_default_feature_version(self): def test_invalid_major_feature_version(self): with self.assertRaises(ValueError): - ast.parse('pass', feature_version=(2, 7)) + ast.parse("pass", feature_version=(2, 7)) with self.assertRaises(ValueError): - ast.parse('pass', feature_version=(4, 0)) + ast.parse("pass", feature_version=(4, 0)) def test_constant_as_name(self): for constant in "True", "False", "None": expr = ast.Expression(ast.Name(constant, ast.Load())) ast.fix_missing_locations(expr) - with self.assertRaisesRegex(ValueError, f"identifier field can't represent '{constant}' constant"): + with self.assertRaisesRegex( + ValueError, f"identifier field can't represent '{constant}' constant" + ): compile(expr, "", "eval") def test_precedence_enum(self): class _Precedence(enum.IntEnum): """Precedence table that originated from python grammar.""" - NAMED_EXPR = enum.auto() # := - TUPLE = enum.auto() # , - YIELD = enum.auto() # 'yield', 'yield from' - TEST = enum.auto() # 'if'-'else', 'lambda' - OR = enum.auto() # 'or' - AND = enum.auto() # 'and' - NOT = enum.auto() # 'not' - CMP = enum.auto() # '<', '>', '==', '>=', '<=', '!=', - # 'in', 'not in', 'is', 'is not' + + NAMED_EXPR = enum.auto() # := + TUPLE = enum.auto() # , + YIELD = enum.auto() # 'yield', 'yield from' + TEST = enum.auto() # 'if'-'else', 'lambda' + OR = enum.auto() # 'or' + AND = enum.auto() # 'and' + NOT = enum.auto() # 'not' + CMP = enum.auto() # '<', '>', '==', '>=', '<=', '!=', + # 'in', 'not in', 'is', 'is not' EXPR = enum.auto() - BOR = EXPR # '|' - BXOR = enum.auto() # '^' - BAND = enum.auto() # '&' - SHIFT = enum.auto() # '<<', '>>' - ARITH = enum.auto() # '+', '-' - TERM = enum.auto() # '*', '@', '/', '%', '//' - FACTOR = enum.auto() # unary '+', '-', '~' - POWER = enum.auto() # '**' - AWAIT = enum.auto() # 'await' + BOR = EXPR # '|' + BXOR = enum.auto() # '^' + BAND = enum.auto() # '&' + SHIFT = enum.auto() # '<<', '>>' + ARITH = enum.auto() # '+', '-' + TERM = enum.auto() # '*', '@', '/', '%', '//' + FACTOR = enum.auto() # unary '+', '-', '~' + POWER = enum.auto() # '**' + AWAIT = enum.auto() # 'await' ATOM = enum.auto() + def next(self): try: return self.__class__(self + 1) except ValueError: return self + enum._test_simple_enum(_Precedence, ast._Precedence) @support.cpython_only @@ -1172,8 +970,7 @@ def check_limit(prefix, repeated): ast.parse(expect_ok) for depth in (fail_depth, crash_depth): broken = prefix + repeated * depth - details = "Compiling ({!r} + {!r} * {})".format( - prefix, repeated, depth) + details = "Compiling ({!r} + {!r} * {})".format(prefix, repeated, depth) with self.assertRaises(RecursionError, msg=details): with support.infinite_recursion(): ast.parse(broken) @@ -1184,8 +981,9 @@ def check_limit(prefix, repeated): check_limit("a", "*a") def test_null_bytes(self): - with self.assertRaises(SyntaxError, - msg="source code string cannot contain null bytes"): + with self.assertRaises( + SyntaxError, msg="source code string cannot contain null bytes" + ): ast.parse("a\0b") def assert_none_check(self, node: type[ast.AST], attr: str, source: str) -> None: @@ -1282,9 +1080,16 @@ def test_copy_with_parents(self): for node in ast.walk(tree2): for child in ast.iter_child_nodes(node): - if hasattr(child, "parent") and not isinstance(child, ( - ast.expr_context, ast.boolop, ast.unaryop, ast.cmpop, ast.operator, - )): + if hasattr(child, "parent") and not isinstance( + child, + ( + ast.expr_context, + ast.boolop, + ast.unaryop, + ast.cmpop, + ast.operator, + ), + ): self.assertEqual(to_tuple(child.parent), to_tuple(node)) @@ -1292,13 +1097,13 @@ class ASTHelpers_Test(unittest.TestCase): maxDiff = None def test_parse(self): - a = ast.parse('foo(1 + 1)') - b = compile('foo(1 + 1)', '', 'exec', ast.PyCF_ONLY_AST) + a = ast.parse("foo(1 + 1)") + b = compile("foo(1 + 1)", "", "exec", ast.PyCF_ONLY_AST) self.assertEqual(ast.dump(a), ast.dump(b)) def test_parse_in_error(self): try: - 1/0 + 1 / 0 except Exception: with self.assertRaises(SyntaxError) as e: ast.literal_eval(r"'\U'") @@ -1306,27 +1111,32 @@ def test_parse_in_error(self): def test_dump(self): node = ast.parse('spam(eggs, "and cheese")') - self.assertEqual(ast.dump(node), + self.assertEqual( + ast.dump(node), "Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load()), " - "args=[Name(id='eggs', ctx=Load()), Constant(value='and cheese')]))])" + "args=[Name(id='eggs', ctx=Load()), Constant(value='and cheese')]))])", ) - self.assertEqual(ast.dump(node, annotate_fields=False), + self.assertEqual( + ast.dump(node, annotate_fields=False), "Module([Expr(Call(Name('spam', Load()), [Name('eggs', Load()), " - "Constant('and cheese')]))])" + "Constant('and cheese')]))])", ) - self.assertEqual(ast.dump(node, include_attributes=True), + self.assertEqual( + ast.dump(node, include_attributes=True), "Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load(), " "lineno=1, col_offset=0, end_lineno=1, end_col_offset=4), " "args=[Name(id='eggs', ctx=Load(), lineno=1, col_offset=5, " "end_lineno=1, end_col_offset=9), Constant(value='and cheese', " "lineno=1, col_offset=11, end_lineno=1, end_col_offset=23)], " "lineno=1, col_offset=0, end_lineno=1, end_col_offset=24), " - "lineno=1, col_offset=0, end_lineno=1, end_col_offset=24)])" + "lineno=1, col_offset=0, end_lineno=1, end_col_offset=24)])", ) def test_dump_indent(self): node = ast.parse('spam(eggs, "and cheese")') - self.assertEqual(ast.dump(node, indent=3), """\ + self.assertEqual( + ast.dump(node, indent=3), + """\ Module( body=[ Expr( @@ -1334,8 +1144,11 @@ def test_dump_indent(self): func=Name(id='spam', ctx=Load()), args=[ Name(id='eggs', ctx=Load()), - Constant(value='and cheese')]))])""") - self.assertEqual(ast.dump(node, annotate_fields=False, indent='\t'), """\ + Constant(value='and cheese')]))])""", + ) + self.assertEqual( + ast.dump(node, annotate_fields=False, indent="\t"), + """\ Module( \t[ \t\tExpr( @@ -1343,8 +1156,11 @@ def test_dump_indent(self): \t\t\t\tName('spam', Load()), \t\t\t\t[ \t\t\t\t\tName('eggs', Load()), -\t\t\t\t\tConstant('and cheese')]))])""") - self.assertEqual(ast.dump(node, include_attributes=True, indent=3), """\ +\t\t\t\t\tConstant('and cheese')]))])""", + ) + self.assertEqual( + ast.dump(node, include_attributes=True, indent=3), + """\ Module( body=[ Expr( @@ -1377,69 +1193,73 @@ def test_dump_indent(self): lineno=1, col_offset=0, end_lineno=1, - end_col_offset=24)])""") + end_col_offset=24)])""", + ) def test_dump_incomplete(self): node = ast.Raise(lineno=3, col_offset=4) - self.assertEqual(ast.dump(node), - "Raise()" + self.assertEqual(ast.dump(node), "Raise()") + self.assertEqual( + ast.dump(node, include_attributes=True), "Raise(lineno=3, col_offset=4)" ) - self.assertEqual(ast.dump(node, include_attributes=True), - "Raise(lineno=3, col_offset=4)" + node = ast.Raise(exc=ast.Name(id="e", ctx=ast.Load()), lineno=3, col_offset=4) + self.assertEqual(ast.dump(node), "Raise(exc=Name(id='e', ctx=Load()))") + self.assertEqual( + ast.dump(node, annotate_fields=False), "Raise(Name('e', Load()))" ) - node = ast.Raise(exc=ast.Name(id='e', ctx=ast.Load()), lineno=3, col_offset=4) - self.assertEqual(ast.dump(node), - "Raise(exc=Name(id='e', ctx=Load()))" + self.assertEqual( + ast.dump(node, include_attributes=True), + "Raise(exc=Name(id='e', ctx=Load()), lineno=3, col_offset=4)", ) - self.assertEqual(ast.dump(node, annotate_fields=False), - "Raise(Name('e', Load()))" + self.assertEqual( + ast.dump(node, annotate_fields=False, include_attributes=True), + "Raise(Name('e', Load()), lineno=3, col_offset=4)", ) - self.assertEqual(ast.dump(node, include_attributes=True), - "Raise(exc=Name(id='e', ctx=Load()), lineno=3, col_offset=4)" - ) - self.assertEqual(ast.dump(node, annotate_fields=False, include_attributes=True), - "Raise(Name('e', Load()), lineno=3, col_offset=4)" - ) - node = ast.Raise(cause=ast.Name(id='e', ctx=ast.Load())) - self.assertEqual(ast.dump(node), - "Raise(cause=Name(id='e', ctx=Load()))" - ) - self.assertEqual(ast.dump(node, annotate_fields=False), - "Raise(cause=Name('e', Load()))" + node = ast.Raise(cause=ast.Name(id="e", ctx=ast.Load())) + self.assertEqual(ast.dump(node), "Raise(cause=Name(id='e', ctx=Load()))") + self.assertEqual( + ast.dump(node, annotate_fields=False), "Raise(cause=Name('e', Load()))" ) # Arguments: node = ast.arguments(args=[ast.arg("x")]) - self.assertEqual(ast.dump(node, annotate_fields=False), + self.assertEqual( + ast.dump(node, annotate_fields=False), "arguments([], [arg('x')])", ) node = ast.arguments(posonlyargs=[ast.arg("x")]) - self.assertEqual(ast.dump(node, annotate_fields=False), + self.assertEqual( + ast.dump(node, annotate_fields=False), "arguments([arg('x')])", ) - node = ast.arguments(posonlyargs=[ast.arg("x")], kwonlyargs=[ast.arg('y')]) - self.assertEqual(ast.dump(node, annotate_fields=False), + node = ast.arguments(posonlyargs=[ast.arg("x")], kwonlyargs=[ast.arg("y")]) + self.assertEqual( + ast.dump(node, annotate_fields=False), "arguments([arg('x')], kwonlyargs=[arg('y')])", ) - node = ast.arguments(args=[ast.arg("x")], kwonlyargs=[ast.arg('y')]) - self.assertEqual(ast.dump(node, annotate_fields=False), + node = ast.arguments(args=[ast.arg("x")], kwonlyargs=[ast.arg("y")]) + self.assertEqual( + ast.dump(node, annotate_fields=False), "arguments([], [arg('x')], kwonlyargs=[arg('y')])", ) node = ast.arguments() - self.assertEqual(ast.dump(node, annotate_fields=False), + self.assertEqual( + ast.dump(node, annotate_fields=False), "arguments()", ) # Classes: node = ast.ClassDef( - 'T', + "T", [], - [ast.keyword('a', ast.Constant(None))], + [ast.keyword("a", ast.Constant(None))], [], - [ast.Name('dataclass', ctx=ast.Load())], + [ast.Name("dataclass", ctx=ast.Load())], ) - self.assertEqual(ast.dump(node), + self.assertEqual( + ast.dump(node), "ClassDef(name='T', keywords=[keyword(arg='a', value=Constant(value=None))], decorator_list=[Name(id='dataclass', ctx=Load())])", ) - self.assertEqual(ast.dump(node, annotate_fields=False), + self.assertEqual( + ast.dump(node, annotate_fields=False), "ClassDef('T', [], [keyword('a', Constant(None))], [], [Name('dataclass', Load())])", ) @@ -1467,7 +1287,7 @@ def check_text(code, empty, full, **kwargs): check_node( # Corner case: there are no real `Name` instances with `id=''`: - ast.Name(id='', ctx=ast.Load()), + ast.Name(id="", ctx=ast.Load()), empty="Name(id='', ctx=Load())", full="Name(id='', ctx=Load())", ) @@ -1485,7 +1305,7 @@ def check_text(code, empty, full, **kwargs): ) check_node( - ast.Constant(value=''), + ast.Constant(value=""), empty="Constant(value='')", full="Constant(value='')", ) @@ -1522,16 +1342,19 @@ def check_text(code, empty, full, **kwargs): ) def test_copy_location(self): - src = ast.parse('1 + 1', mode='eval') + src = ast.parse("1 + 1", mode="eval") src.body.right = ast.copy_location(ast.Constant(2), src.body.right) - self.assertEqual(ast.dump(src, include_attributes=True), - 'Expression(body=BinOp(left=Constant(value=1, lineno=1, col_offset=0, ' - 'end_lineno=1, end_col_offset=1), op=Add(), right=Constant(value=2, ' - 'lineno=1, col_offset=4, end_lineno=1, end_col_offset=5), lineno=1, ' - 'col_offset=0, end_lineno=1, end_col_offset=5))' - ) - func = ast.Name('spam', ast.Load()) - src = ast.Call(col_offset=1, lineno=1, end_lineno=1, end_col_offset=1, func=func) + self.assertEqual( + ast.dump(src, include_attributes=True), + "Expression(body=BinOp(left=Constant(value=1, lineno=1, col_offset=0, " + "end_lineno=1, end_col_offset=1), op=Add(), right=Constant(value=2, " + "lineno=1, col_offset=4, end_lineno=1, end_col_offset=5), lineno=1, " + "col_offset=0, end_lineno=1, end_col_offset=5))", + ) + func = ast.Name("spam", ast.Load()) + src = ast.Call( + col_offset=1, lineno=1, end_lineno=1, end_col_offset=1, func=func + ) new = ast.copy_location(src, ast.Call(col_offset=None, lineno=None, func=func)) self.assertIsNone(new.end_lineno) self.assertIsNone(new.end_col_offset) @@ -1540,11 +1363,13 @@ def test_copy_location(self): def test_fix_missing_locations(self): src = ast.parse('write("spam")') - src.body.append(ast.Expr(ast.Call(ast.Name('spam', ast.Load()), - [ast.Constant('eggs')], []))) + src.body.append( + ast.Expr(ast.Call(ast.Name("spam", ast.Load()), [ast.Constant("eggs")], [])) + ) self.assertEqual(src, ast.fix_missing_locations(src)) self.maxDiff = None - self.assertEqual(ast.dump(src, include_attributes=True), + self.assertEqual( + ast.dump(src, include_attributes=True), "Module(body=[Expr(value=Call(func=Name(id='write', ctx=Load(), " "lineno=1, col_offset=0, end_lineno=1, end_col_offset=5), " "args=[Constant(value='spam', lineno=1, col_offset=6, end_lineno=1, " @@ -1554,26 +1379,28 @@ def test_fix_missing_locations(self): "lineno=1, col_offset=0, end_lineno=1, end_col_offset=0), " "args=[Constant(value='eggs', lineno=1, col_offset=0, end_lineno=1, " "end_col_offset=0)], lineno=1, col_offset=0, end_lineno=1, " - "end_col_offset=0), lineno=1, col_offset=0, end_lineno=1, end_col_offset=0)])" + "end_col_offset=0), lineno=1, col_offset=0, end_lineno=1, end_col_offset=0)])", ) def test_increment_lineno(self): - src = ast.parse('1 + 1', mode='eval') + src = ast.parse("1 + 1", mode="eval") self.assertEqual(ast.increment_lineno(src, n=3), src) - self.assertEqual(ast.dump(src, include_attributes=True), - 'Expression(body=BinOp(left=Constant(value=1, lineno=4, col_offset=0, ' - 'end_lineno=4, end_col_offset=1), op=Add(), right=Constant(value=1, ' - 'lineno=4, col_offset=4, end_lineno=4, end_col_offset=5), lineno=4, ' - 'col_offset=0, end_lineno=4, end_col_offset=5))' + self.assertEqual( + ast.dump(src, include_attributes=True), + "Expression(body=BinOp(left=Constant(value=1, lineno=4, col_offset=0, " + "end_lineno=4, end_col_offset=1), op=Add(), right=Constant(value=1, " + "lineno=4, col_offset=4, end_lineno=4, end_col_offset=5), lineno=4, " + "col_offset=0, end_lineno=4, end_col_offset=5))", ) # issue10869: do not increment lineno of root twice - src = ast.parse('1 + 1', mode='eval') + src = ast.parse("1 + 1", mode="eval") self.assertEqual(ast.increment_lineno(src.body, n=3), src.body) - self.assertEqual(ast.dump(src, include_attributes=True), - 'Expression(body=BinOp(left=Constant(value=1, lineno=4, col_offset=0, ' - 'end_lineno=4, end_col_offset=1), op=Add(), right=Constant(value=1, ' - 'lineno=4, col_offset=4, end_lineno=4, end_col_offset=5), lineno=4, ' - 'col_offset=0, end_lineno=4, end_col_offset=5))' + self.assertEqual( + ast.dump(src, include_attributes=True), + "Expression(body=BinOp(left=Constant(value=1, lineno=4, col_offset=0, " + "end_lineno=4, end_col_offset=1), op=Add(), right=Constant(value=1, " + "lineno=4, col_offset=4, end_lineno=4, end_col_offset=5), lineno=4, " + "col_offset=0, end_lineno=4, end_col_offset=5))", ) src = ast.Call( func=ast.Name("test", ast.Load()), args=[], keywords=[], lineno=1 @@ -1582,74 +1409,84 @@ def test_increment_lineno(self): self.assertIsNone(ast.increment_lineno(src).end_lineno) def test_increment_lineno_on_module(self): - src = ast.parse(dedent("""\ + src = ast.parse( + dedent("""\ a = 1 b = 2 # type: ignore c = 3 d = 4 # type: ignore@tag - """), type_comments=True) + """), + type_comments=True, + ) ast.increment_lineno(src, n=5) self.assertEqual(src.type_ignores[0].lineno, 7) self.assertEqual(src.type_ignores[1].lineno, 9) - self.assertEqual(src.type_ignores[1].tag, '@tag') + self.assertEqual(src.type_ignores[1].tag, "@tag") def test_iter_fields(self): - node = ast.parse('foo()', mode='eval') + node = ast.parse("foo()", mode="eval") d = dict(ast.iter_fields(node.body)) - self.assertEqual(d.pop('func').id, 'foo') - self.assertEqual(d, {'keywords': [], 'args': []}) + self.assertEqual(d.pop("func").id, "foo") + self.assertEqual(d, {"keywords": [], "args": []}) def test_iter_child_nodes(self): - node = ast.parse("spam(23, 42, eggs='leek')", mode='eval') + node = ast.parse("spam(23, 42, eggs='leek')", mode="eval") self.assertEqual(len(list(ast.iter_child_nodes(node.body))), 4) iterator = ast.iter_child_nodes(node.body) - self.assertEqual(next(iterator).id, 'spam') + self.assertEqual(next(iterator).id, "spam") self.assertEqual(next(iterator).value, 23) self.assertEqual(next(iterator).value, 42) - self.assertEqual(ast.dump(next(iterator)), - "keyword(arg='eggs', value=Constant(value='leek'))" + self.assertEqual( + ast.dump(next(iterator)), + "keyword(arg='eggs', value=Constant(value='leek'))", ) def test_get_docstring(self): node = ast.parse('"""line one\n line two"""') - self.assertEqual(ast.get_docstring(node), - 'line one\nline two') + self.assertEqual(ast.get_docstring(node), "line one\nline two") node = ast.parse('class foo:\n """line one\n line two"""') - self.assertEqual(ast.get_docstring(node.body[0]), - 'line one\nline two') + self.assertEqual(ast.get_docstring(node.body[0]), "line one\nline two") node = ast.parse('def foo():\n """line one\n line two"""') - self.assertEqual(ast.get_docstring(node.body[0]), - 'line one\nline two') + self.assertEqual(ast.get_docstring(node.body[0]), "line one\nline two") node = ast.parse('async def foo():\n """spam\n ham"""') - self.assertEqual(ast.get_docstring(node.body[0]), 'spam\nham') + self.assertEqual(ast.get_docstring(node.body[0]), "spam\nham") + + node = ast.parse('async def foo():\n """spam\n ham"""') + self.assertEqual(ast.get_docstring(node.body[0], clean=False), "spam\n ham") + + node = ast.parse("x") + self.assertRaises(TypeError, ast.get_docstring, node.body[0]) def test_get_docstring_none(self): - self.assertIsNone(ast.get_docstring(ast.parse(''))) + self.assertIsNone(ast.get_docstring(ast.parse(""))) node = ast.parse('x = "not docstring"') self.assertIsNone(ast.get_docstring(node)) - node = ast.parse('def foo():\n pass') + node = ast.parse("def foo():\n pass") self.assertIsNone(ast.get_docstring(node)) - node = ast.parse('class foo:\n pass') + node = ast.parse("class foo:\n pass") self.assertIsNone(ast.get_docstring(node.body[0])) node = ast.parse('class foo:\n x = "not docstring"') self.assertIsNone(ast.get_docstring(node.body[0])) - node = ast.parse('class foo:\n def bar(self): pass') + node = ast.parse("class foo:\n def bar(self): pass") self.assertIsNone(ast.get_docstring(node.body[0])) - node = ast.parse('def foo():\n pass') + node = ast.parse("def foo():\n pass") self.assertIsNone(ast.get_docstring(node.body[0])) node = ast.parse('def foo():\n x = "not docstring"') self.assertIsNone(ast.get_docstring(node.body[0])) - node = ast.parse('async def foo():\n pass') + node = ast.parse("async def foo():\n pass") self.assertIsNone(ast.get_docstring(node.body[0])) node = ast.parse('async def foo():\n x = "not docstring"') self.assertIsNone(ast.get_docstring(node.body[0])) + node = ast.parse("async def foo():\n 42") + self.assertIsNone(ast.get_docstring(node.body[0])) + def test_multi_line_docstring_col_offset_and_lineno_issue16806(self): node = ast.parse( '"""line one\nline two"""\n\n' @@ -1670,75 +1507,79 @@ def test_multi_line_docstring_col_offset_and_lineno_issue16806(self): self.assertEqual(node.body[2].lineno, 13) def test_elif_stmt_start_position(self): - node = ast.parse('if a:\n pass\nelif b:\n pass\n') + node = ast.parse("if a:\n pass\nelif b:\n pass\n") elif_stmt = node.body[0].orelse[0] self.assertEqual(elif_stmt.lineno, 3) self.assertEqual(elif_stmt.col_offset, 0) def test_elif_stmt_start_position_with_else(self): - node = ast.parse('if a:\n pass\nelif b:\n pass\nelse:\n pass\n') + node = ast.parse("if a:\n pass\nelif b:\n pass\nelse:\n pass\n") elif_stmt = node.body[0].orelse[0] self.assertEqual(elif_stmt.lineno, 3) self.assertEqual(elif_stmt.col_offset, 0) def test_starred_expr_end_position_within_call(self): - node = ast.parse('f(*[0, 1])') + node = ast.parse("f(*[0, 1])") starred_expr = node.body[0].value.args[0] self.assertEqual(starred_expr.end_lineno, 1) self.assertEqual(starred_expr.end_col_offset, 9) def test_literal_eval(self): - self.assertEqual(ast.literal_eval('[1, 2, 3]'), [1, 2, 3]) + self.assertEqual(ast.literal_eval("[1, 2, 3]"), [1, 2, 3]) self.assertEqual(ast.literal_eval('{"foo": 42}'), {"foo": 42}) - self.assertEqual(ast.literal_eval('(True, False, None)'), (True, False, None)) - self.assertEqual(ast.literal_eval('{1, 2, 3}'), {1, 2, 3}) + self.assertEqual(ast.literal_eval("(True, False, None)"), (True, False, None)) + self.assertEqual(ast.literal_eval("{1, 2, 3}"), {1, 2, 3}) self.assertEqual(ast.literal_eval('b"hi"'), b"hi") - self.assertEqual(ast.literal_eval('set()'), set()) - self.assertRaises(ValueError, ast.literal_eval, 'foo()') - self.assertEqual(ast.literal_eval('6'), 6) - self.assertEqual(ast.literal_eval('+6'), 6) - self.assertEqual(ast.literal_eval('-6'), -6) - self.assertEqual(ast.literal_eval('3.25'), 3.25) - self.assertEqual(ast.literal_eval('+3.25'), 3.25) - self.assertEqual(ast.literal_eval('-3.25'), -3.25) - self.assertEqual(repr(ast.literal_eval('-0.0')), '-0.0') - self.assertRaises(ValueError, ast.literal_eval, '++6') - self.assertRaises(ValueError, ast.literal_eval, '+True') - self.assertRaises(ValueError, ast.literal_eval, '2+3') + self.assertEqual(ast.literal_eval("set()"), set()) + self.assertRaises(ValueError, ast.literal_eval, "foo()") + self.assertEqual(ast.literal_eval("6"), 6) + self.assertEqual(ast.literal_eval("+6"), 6) + self.assertEqual(ast.literal_eval("-6"), -6) + self.assertEqual(ast.literal_eval("3.25"), 3.25) + self.assertEqual(ast.literal_eval("+3.25"), 3.25) + self.assertEqual(ast.literal_eval("-3.25"), -3.25) + self.assertEqual(repr(ast.literal_eval("-0.0")), "-0.0") + self.assertRaises(ValueError, ast.literal_eval, "++6") + self.assertRaises(ValueError, ast.literal_eval, "+True") + self.assertRaises(ValueError, ast.literal_eval, "2+3") def test_literal_eval_str_int_limit(self): with support.adjust_int_max_str_digits(4000): - ast.literal_eval('3'*4000) # no error + ast.literal_eval("3" * 4000) # no error with self.assertRaises(SyntaxError) as err_ctx: - ast.literal_eval('3'*4001) - self.assertIn('Exceeds the limit ', str(err_ctx.exception)) - self.assertIn(' Consider hexadecimal ', str(err_ctx.exception)) + ast.literal_eval("3" * 4001) + self.assertIn("Exceeds the limit ", str(err_ctx.exception)) + self.assertIn(" Consider hexadecimal ", str(err_ctx.exception)) def test_literal_eval_complex(self): # Issue #4907 - self.assertEqual(ast.literal_eval('6j'), 6j) - self.assertEqual(ast.literal_eval('-6j'), -6j) - self.assertEqual(ast.literal_eval('6.75j'), 6.75j) - self.assertEqual(ast.literal_eval('-6.75j'), -6.75j) - self.assertEqual(ast.literal_eval('3+6j'), 3+6j) - self.assertEqual(ast.literal_eval('-3+6j'), -3+6j) - self.assertEqual(ast.literal_eval('3-6j'), 3-6j) - self.assertEqual(ast.literal_eval('-3-6j'), -3-6j) - self.assertEqual(ast.literal_eval('3.25+6.75j'), 3.25+6.75j) - self.assertEqual(ast.literal_eval('-3.25+6.75j'), -3.25+6.75j) - self.assertEqual(ast.literal_eval('3.25-6.75j'), 3.25-6.75j) - self.assertEqual(ast.literal_eval('-3.25-6.75j'), -3.25-6.75j) - self.assertEqual(ast.literal_eval('(3+6j)'), 3+6j) - self.assertRaises(ValueError, ast.literal_eval, '-6j+3') - self.assertRaises(ValueError, ast.literal_eval, '-6j+3j') - self.assertRaises(ValueError, ast.literal_eval, '3+-6j') - self.assertRaises(ValueError, ast.literal_eval, '3+(0+6j)') - self.assertRaises(ValueError, ast.literal_eval, '-(3+6j)') + self.assertEqual(ast.literal_eval("6j"), 6j) + self.assertEqual(ast.literal_eval("-6j"), -6j) + self.assertEqual(ast.literal_eval("6.75j"), 6.75j) + self.assertEqual(ast.literal_eval("-6.75j"), -6.75j) + self.assertEqual(ast.literal_eval("3+6j"), 3 + 6j) + self.assertEqual(ast.literal_eval("-3+6j"), -3 + 6j) + self.assertEqual(ast.literal_eval("3-6j"), 3 - 6j) + self.assertEqual(ast.literal_eval("-3-6j"), -3 - 6j) + self.assertEqual(ast.literal_eval("3.25+6.75j"), 3.25 + 6.75j) + self.assertEqual(ast.literal_eval("-3.25+6.75j"), -3.25 + 6.75j) + self.assertEqual(ast.literal_eval("3.25-6.75j"), 3.25 - 6.75j) + self.assertEqual(ast.literal_eval("-3.25-6.75j"), -3.25 - 6.75j) + self.assertEqual(ast.literal_eval("(3+6j)"), 3 + 6j) + self.assertRaises(ValueError, ast.literal_eval, "-6j+3") + self.assertRaises(ValueError, ast.literal_eval, "-6j+3j") + self.assertRaises(ValueError, ast.literal_eval, "3+-6j") + self.assertRaises(ValueError, ast.literal_eval, "3+(0+6j)") + self.assertRaises(ValueError, ast.literal_eval, "-(3+6j)") def test_literal_eval_malformed_dict_nodes(self): - malformed = ast.Dict(keys=[ast.Constant(1), ast.Constant(2)], values=[ast.Constant(3)]) + malformed = ast.Dict( + keys=[ast.Constant(1), ast.Constant(2)], values=[ast.Constant(3)] + ) self.assertRaises(ValueError, ast.literal_eval, malformed) - malformed = ast.Dict(keys=[ast.Constant(1)], values=[ast.Constant(2), ast.Constant(3)]) + malformed = ast.Dict( + keys=[ast.Constant(1)], values=[ast.Constant(2), ast.Constant(3)] + ) self.assertRaises(ValueError, ast.literal_eval, malformed) def test_literal_eval_trailing_ws(self): @@ -1748,46 +1589,54 @@ def test_literal_eval_trailing_ws(self): self.assertRaises(IndentationError, ast.literal_eval, "\n -1") def test_literal_eval_malformed_lineno(self): - msg = r'malformed node or string on line 3:' + msg = r"malformed node or string on line 3:" with self.assertRaisesRegex(ValueError, msg): ast.literal_eval("{'a': 1,\n'b':2,\n'c':++3,\n'd':4}") - node = ast.UnaryOp( - ast.UAdd(), ast.UnaryOp(ast.UAdd(), ast.Constant(6))) - self.assertIsNone(getattr(node, 'lineno', None)) - msg = r'malformed node or string:' + node = ast.UnaryOp(ast.UAdd(), ast.UnaryOp(ast.UAdd(), ast.Constant(6))) + self.assertIsNone(getattr(node, "lineno", None)) + msg = r"malformed node or string:" with self.assertRaisesRegex(ValueError, msg): ast.literal_eval(node) def test_literal_eval_syntax_errors(self): with self.assertRaisesRegex(SyntaxError, "unexpected indent"): - ast.literal_eval(r''' + ast.literal_eval(r""" \ (\ - \ ''') + \ """) def test_bad_integer(self): # issue13436: Bad error message with invalid numeric values - body = [ast.ImportFrom(module='time', - names=[ast.alias(name='sleep')], - level=None, - lineno=None, col_offset=None)] + body = [ + ast.ImportFrom( + module="time", + names=[ast.alias(name="sleep")], + level=None, + lineno=None, + col_offset=None, + ) + ] mod = ast.Module(body, []) with self.assertRaises(ValueError) as cm: - compile(mod, 'test', 'exec') + compile(mod, "test", "exec") self.assertIn("invalid integer value: None", str(cm.exception)) def test_level_as_none(self): - body = [ast.ImportFrom(module='time', - names=[ast.alias(name='sleep', - lineno=0, col_offset=0)], - level=None, - lineno=0, col_offset=0)] + body = [ + ast.ImportFrom( + module="time", + names=[ast.alias(name="sleep", lineno=0, col_offset=0)], + level=None, + lineno=0, + col_offset=0, + ) + ] mod = ast.Module(body, []) - code = compile(mod, 'test', 'exec') + code = compile(mod, "test", "exec") ns = {} exec(code, ns) - self.assertIn('sleep', ns) + self.assertIn("sleep", ns) def test_recursion_direct(self): e = ast.UnaryOp(op=ast.Not(), lineno=0, col_offset=0, operand=ast.Constant(1)) @@ -1807,7 +1656,6 @@ def test_recursion_indirect(self): class ASTValidatorTests(unittest.TestCase): - def mod(self, mod, msg=None, mode="exec", *, exc=ValueError): mod.lineno = mod.col_offset = 0 ast.fix_missing_locations(mod) @@ -1833,9 +1681,15 @@ def test_module(self): self.mod(m, "must have Load context", "eval") def _check_arguments(self, fac, check): - def arguments(args=None, posonlyargs=None, vararg=None, - kwonlyargs=None, kwarg=None, - defaults=None, kw_defaults=None): + def arguments( + args=None, + posonlyargs=None, + vararg=None, + kwonlyargs=None, + kwarg=None, + defaults=None, + kw_defaults=None, + ): if args is None: args = [] if posonlyargs is None: @@ -1846,49 +1700,67 @@ def arguments(args=None, posonlyargs=None, vararg=None, defaults = [] if kw_defaults is None: kw_defaults = [] - args = ast.arguments(args, posonlyargs, vararg, kwonlyargs, - kw_defaults, kwarg, defaults) + args = ast.arguments( + args, posonlyargs, vararg, kwonlyargs, kw_defaults, kwarg, defaults + ) return fac(args) + args = [ast.arg("x", ast.Name("x", ast.Store()))] check(arguments(args=args), "must have Load context") check(arguments(posonlyargs=args), "must have Load context") check(arguments(kwonlyargs=args), "must have Load context") - check(arguments(defaults=[ast.Constant(3)]), - "more positional defaults than args") - check(arguments(kw_defaults=[ast.Constant(4)]), - "length of kwonlyargs is not the same as kw_defaults") + check( + arguments(defaults=[ast.Constant(3)]), "more positional defaults than args" + ) + check( + arguments(kw_defaults=[ast.Constant(4)]), + "length of kwonlyargs is not the same as kw_defaults", + ) args = [ast.arg("x", ast.Name("x", ast.Load()))] - check(arguments(args=args, defaults=[ast.Name("x", ast.Store())]), - "must have Load context") - args = [ast.arg("a", ast.Name("x", ast.Load())), - ast.arg("b", ast.Name("y", ast.Load()))] - check(arguments(kwonlyargs=args, - kw_defaults=[None, ast.Name("x", ast.Store())]), - "must have Load context") + check( + arguments(args=args, defaults=[ast.Name("x", ast.Store())]), + "must have Load context", + ) + args = [ + ast.arg("a", ast.Name("x", ast.Load())), + ast.arg("b", ast.Name("y", ast.Load())), + ] + check( + arguments(kwonlyargs=args, kw_defaults=[None, ast.Name("x", ast.Store())]), + "must have Load context", + ) def test_funcdef(self): a = ast.arguments([], [], None, [], [], None, []) f = ast.FunctionDef("x", a, [], [], None, None, []) self.stmt(f, "empty body on FunctionDef") - f = ast.FunctionDef("x", a, [ast.Pass()], [ast.Name("x", ast.Store())], None, None, []) + f = ast.FunctionDef( + "x", a, [ast.Pass()], [ast.Name("x", ast.Store())], None, None, [] + ) self.stmt(f, "must have Load context") - f = ast.FunctionDef("x", a, [ast.Pass()], [], - ast.Name("x", ast.Store()), None, []) + f = ast.FunctionDef( + "x", a, [ast.Pass()], [], ast.Name("x", ast.Store()), None, [] + ) self.stmt(f, "must have Load context") f = ast.FunctionDef("x", ast.arguments(), [ast.Pass()]) self.stmt(f) + def fac(args): return ast.FunctionDef("x", args, [ast.Pass()], [], None, None, []) + self._check_arguments(fac, self.stmt) def test_funcdef_pattern_matching(self): # gh-104799: New fields on FunctionDef should be added at the end def matcher(node): match node: - case ast.FunctionDef("foo", ast.arguments(args=[ast.arg("bar")]), - [ast.Pass()], - [ast.Name("capybara", ast.Load())], - ast.Name("pacarana", ast.Load())): + case ast.FunctionDef( + "foo", + ast.arguments(args=[ast.arg("bar")]), + [ast.Pass()], + [ast.Name("capybara", ast.Load())], + ast.Name("pacarana", ast.Load()), + ): return True case _: return False @@ -1904,7 +1776,9 @@ def foo(bar) -> pacarana: self.assertTrue(matcher(funcdef)) def test_classdef(self): - def cls(bases=None, keywords=None, body=None, decorator_list=None, type_params=None): + def cls( + bases=None, keywords=None, body=None, decorator_list=None, type_params=None + ): if bases is None: bases = [] if keywords is None: @@ -1915,38 +1789,46 @@ def cls(bases=None, keywords=None, body=None, decorator_list=None, type_params=N decorator_list = [] if type_params is None: type_params = [] - return ast.ClassDef("myclass", bases, keywords, - body, decorator_list, type_params) - self.stmt(cls(bases=[ast.Name("x", ast.Store())]), - "must have Load context") - self.stmt(cls(keywords=[ast.keyword("x", ast.Name("x", ast.Store()))]), - "must have Load context") + return ast.ClassDef( + "myclass", bases, keywords, body, decorator_list, type_params + ) + + self.stmt(cls(bases=[ast.Name("x", ast.Store())]), "must have Load context") + self.stmt( + cls(keywords=[ast.keyword("x", ast.Name("x", ast.Store()))]), + "must have Load context", + ) self.stmt(cls(body=[]), "empty body on ClassDef") self.stmt(cls(body=[None]), "None disallowed") - self.stmt(cls(decorator_list=[ast.Name("x", ast.Store())]), - "must have Load context") + self.stmt( + cls(decorator_list=[ast.Name("x", ast.Store())]), "must have Load context" + ) def test_delete(self): self.stmt(ast.Delete([]), "empty targets on Delete") self.stmt(ast.Delete([None]), "None disallowed") - self.stmt(ast.Delete([ast.Name("x", ast.Load())]), - "must have Del context") + self.stmt(ast.Delete([ast.Name("x", ast.Load())]), "must have Del context") def test_assign(self): self.stmt(ast.Assign([], ast.Constant(3)), "empty targets on Assign") self.stmt(ast.Assign([None], ast.Constant(3)), "None disallowed") - self.stmt(ast.Assign([ast.Name("x", ast.Load())], ast.Constant(3)), - "must have Store context") - self.stmt(ast.Assign([ast.Name("x", ast.Store())], - ast.Name("y", ast.Store())), - "must have Load context") + self.stmt( + ast.Assign([ast.Name("x", ast.Load())], ast.Constant(3)), + "must have Store context", + ) + self.stmt( + ast.Assign([ast.Name("x", ast.Store())], ast.Name("y", ast.Store())), + "must have Load context", + ) def test_augassign(self): - aug = ast.AugAssign(ast.Name("x", ast.Load()), ast.Add(), - ast.Name("y", ast.Load())) + aug = ast.AugAssign( + ast.Name("x", ast.Load()), ast.Add(), ast.Name("y", ast.Load()) + ) self.stmt(aug, "must have Store context") - aug = ast.AugAssign(ast.Name("x", ast.Store()), ast.Add(), - ast.Name("y", ast.Store())) + aug = ast.AugAssign( + ast.Name("x", ast.Store()), ast.Add(), ast.Name("y", ast.Store()) + ) self.stmt(aug, "must have Load context") def test_for(self): @@ -1954,21 +1836,28 @@ def test_for(self): y = ast.Name("y", ast.Load()) p = ast.Pass() self.stmt(ast.For(x, y, [], []), "empty body on For") - self.stmt(ast.For(ast.Name("x", ast.Load()), y, [p], []), - "must have Store context") - self.stmt(ast.For(x, ast.Name("y", ast.Store()), [p], []), - "must have Load context") + self.stmt( + ast.For(ast.Name("x", ast.Load()), y, [p], []), "must have Store context" + ) + self.stmt( + ast.For(x, ast.Name("y", ast.Store()), [p], []), "must have Load context" + ) e = ast.Expr(ast.Name("x", ast.Store())) self.stmt(ast.For(x, y, [e], []), "must have Load context") self.stmt(ast.For(x, y, [p], [e]), "must have Load context") def test_while(self): self.stmt(ast.While(ast.Constant(3), [], []), "empty body on While") - self.stmt(ast.While(ast.Name("x", ast.Store()), [ast.Pass()], []), - "must have Load context") - self.stmt(ast.While(ast.Constant(3), [ast.Pass()], - [ast.Expr(ast.Name("x", ast.Store()))]), - "must have Load context") + self.stmt( + ast.While(ast.Name("x", ast.Store()), [ast.Pass()], []), + "must have Load context", + ) + self.stmt( + ast.While( + ast.Constant(3), [ast.Pass()], [ast.Expr(ast.Name("x", ast.Store()))] + ), + "must have Load context", + ) def test_if(self): self.stmt(ast.If(ast.Constant(3), [], []), "empty body on If") @@ -1976,8 +1865,9 @@ def test_if(self): self.stmt(i, "must have Load context") i = ast.If(ast.Constant(3), [ast.Expr(ast.Name("x", ast.Store()))], []) self.stmt(i, "must have Load context") - i = ast.If(ast.Constant(3), [ast.Pass()], - [ast.Expr(ast.Name("x", ast.Store()))]) + i = ast.If( + ast.Constant(3), [ast.Pass()], [ast.Expr(ast.Name("x", ast.Store()))] + ) self.stmt(i, "must have Load context") def test_with(self): @@ -2039,10 +1929,10 @@ def test_try_star(self): self.stmt(t, "must have Load context") def test_assert(self): - self.stmt(ast.Assert(ast.Name("x", ast.Store()), None), - "must have Load context") - assrt = ast.Assert(ast.Name("x", ast.Load()), - ast.Name("y", ast.Store())) + self.stmt( + ast.Assert(ast.Name("x", ast.Store()), None), "must have Load context" + ) + assrt = ast.Assert(ast.Name("x", ast.Load()), ast.Name("y", ast.Store())) self.stmt(assrt, "must have Load context") def test_import(self): @@ -2079,10 +1969,11 @@ def test_unaryop(self): def test_lambda(self): a = ast.arguments([], [], None, [], [], None, []) - self.expr(ast.Lambda(a, ast.Name("x", ast.Store())), - "must have Load context") + self.expr(ast.Lambda(a, ast.Name("x", ast.Store())), "must have Load context") + def fac(args): return ast.Lambda(args, ast.Name("x", ast.Load())) + self._check_arguments(fac, self.expr) def test_ifexp(self): @@ -2104,11 +1995,13 @@ def test_set(self): def _check_comprehension(self, fac): self.expr(fac([]), "comprehension with no generators") - g = ast.comprehension(ast.Name("x", ast.Load()), - ast.Name("x", ast.Load()), [], 0) + g = ast.comprehension( + ast.Name("x", ast.Load()), ast.Name("x", ast.Load()), [], 0 + ) self.expr(fac([g]), "must have Store context") - g = ast.comprehension(ast.Name("x", ast.Store()), - ast.Name("x", ast.Store()), [], 0) + g = ast.comprehension( + ast.Name("x", ast.Store()), ast.Name("x", ast.Store()), [], 0 + ) self.expr(fac([g]), "must have Load context") x = ast.Name("x", ast.Store()) y = ast.Name("y", ast.Load()) @@ -2118,12 +2011,14 @@ def _check_comprehension(self, fac): self.expr(fac([g]), "must have Load context") def _simple_comp(self, fac): - g = ast.comprehension(ast.Name("x", ast.Store()), - ast.Name("x", ast.Load()), [], 0) - self.expr(fac(ast.Name("x", ast.Store()), [g]), - "must have Load context") + g = ast.comprehension( + ast.Name("x", ast.Store()), ast.Name("x", ast.Load()), [], 0 + ) + self.expr(fac(ast.Name("x", ast.Store()), [g]), "must have Load context") + def wrap(gens): return fac(ast.Name("x", ast.Store()), gens) + self._check_comprehension(wrap) def test_listcomp(self): @@ -2136,18 +2031,19 @@ def test_generatorexp(self): self._simple_comp(ast.GeneratorExp) def test_dictcomp(self): - g = ast.comprehension(ast.Name("y", ast.Store()), - ast.Name("p", ast.Load()), [], 0) - c = ast.DictComp(ast.Name("x", ast.Store()), - ast.Name("y", ast.Load()), [g]) + g = ast.comprehension( + ast.Name("y", ast.Store()), ast.Name("p", ast.Load()), [], 0 + ) + c = ast.DictComp(ast.Name("x", ast.Store()), ast.Name("y", ast.Load()), [g]) self.expr(c, "must have Load context") - c = ast.DictComp(ast.Name("x", ast.Load()), - ast.Name("y", ast.Store()), [g]) + c = ast.DictComp(ast.Name("x", ast.Load()), ast.Name("y", ast.Store()), [g]) self.expr(c, "must have Load context") + def factory(comps): k = ast.Name("x", ast.Load()) v = ast.Name("y", ast.Load()) return ast.DictComp(k, v, comps) + self._check_comprehension(factory) def test_yield(self): @@ -2179,62 +2075,68 @@ def test_call(self): def test_num(self): with warnings.catch_warnings(record=True) as wlog: - warnings.filterwarnings('ignore', '', DeprecationWarning) + warnings.filterwarnings("ignore", "", DeprecationWarning) from ast import Num with warnings.catch_warnings(record=True) as wlog: - warnings.filterwarnings('always', '', DeprecationWarning) + warnings.filterwarnings("always", "", DeprecationWarning) + class subint(int): pass + class subfloat(float): pass + class subcomplex(complex): pass + for obj in "0", "hello": self.expr(ast.Num(obj)) for obj in subint(), subfloat(), subcomplex(): self.expr(ast.Num(obj), "invalid type", exc=TypeError) - self.assertEqual([str(w.message) for w in wlog], [ - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', - ]) + self.assertEqual( + [str(w.message) for w in wlog], + [ + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + "ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead", + ], + ) def test_attribute(self): attr = ast.Attribute(ast.Name("x", ast.Store()), "y", ast.Load()) self.expr(attr, "must have Load context") def test_subscript(self): - sub = ast.Subscript(ast.Name("x", ast.Store()), ast.Constant(3), - ast.Load()) + sub = ast.Subscript(ast.Name("x", ast.Store()), ast.Constant(3), ast.Load()) self.expr(sub, "must have Load context") x = ast.Name("x", ast.Load()) - sub = ast.Subscript(x, ast.Name("y", ast.Store()), - ast.Load()) + sub = ast.Subscript(x, ast.Name("y", ast.Store()), ast.Load()) self.expr(sub, "must have Load context") s = ast.Name("x", ast.Store()) for args in (s, None, None), (None, s, None), (None, None, s): sl = ast.Slice(*args) - self.expr(ast.Subscript(x, sl, ast.Load()), - "must have Load context") + self.expr(ast.Subscript(x, sl, ast.Load()), "must have Load context") sl = ast.Tuple([], ast.Load()) self.expr(ast.Subscript(x, sl, ast.Load())) sl = ast.Tuple([s], ast.Load()) self.expr(ast.Subscript(x, sl, ast.Load()), "must have Load context") def test_starred(self): - left = ast.List([ast.Starred(ast.Name("x", ast.Load()), ast.Store())], - ast.Store()) + left = ast.List( + [ast.Starred(ast.Name("x", ast.Load()), ast.Store())], ast.Store() + ) assign = ast.Assign([left], ast.Constant(4)) self.stmt(assign, "must have Store context") def _sequence(self, fac): self.expr(fac([None], ast.Load()), "None disallowed") - self.expr(fac([ast.Name("x", ast.Store())], ast.Load()), - "must have Load context") + self.expr( + fac([ast.Name("x", ast.Store())], ast.Load()), "must have Load context" + ) def test_list(self): self._sequence(ast.List) @@ -2244,18 +2146,21 @@ def test_tuple(self): def test_nameconstant(self): with warnings.catch_warnings(record=True) as wlog: - warnings.filterwarnings('ignore', '', DeprecationWarning) + warnings.filterwarnings("ignore", "", DeprecationWarning) from ast import NameConstant with warnings.catch_warnings(record=True) as wlog: - warnings.filterwarnings('always', '', DeprecationWarning) + warnings.filterwarnings("always", "", DeprecationWarning) self.expr(ast.NameConstant(4)) - self.assertEqual([str(w.message) for w in wlog], [ - 'ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead', - ]) + self.assertEqual( + [str(w.message) for w in wlog], + [ + "ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead", + ], + ) - @support.requires_resource('cpu') + @support.requires_resource("cpu") def test_stdlib_validates(self): stdlib = os.path.dirname(ast.__file__) tests = [fn for fn in os.listdir(stdlib) if fn.endswith(".py")] @@ -2271,130 +2176,79 @@ def test_stdlib_validates(self): constant_1 = ast.Constant(1) pattern_1 = ast.MatchValue(constant_1) - constant_x = ast.Constant('x') + constant_x = ast.Constant("x") pattern_x = ast.MatchValue(constant_x) constant_true = ast.Constant(True) pattern_true = ast.MatchSingleton(True) - name_carter = ast.Name('carter', ast.Load()) + name_carter = ast.Name("carter", ast.Load()) _MATCH_PATTERNS = [ ast.MatchValue( ast.Attribute( - ast.Attribute( - ast.Name('x', ast.Store()), - 'y', ast.Load() - ), - 'z', ast.Load() + ast.Attribute(ast.Name("x", ast.Store()), "y", ast.Load()), + "z", + ast.Load(), ) ), ast.MatchValue( ast.Attribute( - ast.Attribute( - ast.Name('x', ast.Load()), - 'y', ast.Store() - ), - 'z', ast.Load() + ast.Attribute(ast.Name("x", ast.Load()), "y", ast.Store()), + "z", + ast.Load(), ) ), - ast.MatchValue( - ast.Constant(...) - ), - ast.MatchValue( - ast.Constant(True) - ), - ast.MatchValue( - ast.Constant((1,2,3)) - ), - ast.MatchSingleton('string'), - ast.MatchSequence([ - ast.MatchSingleton('string') - ]), - ast.MatchSequence( - [ - ast.MatchSequence( - [ - ast.MatchSingleton('string') - ] - ) - ] - ), - ast.MatchMapping( - [constant_1, constant_true], - [pattern_x] - ), + ast.MatchValue(ast.Constant(...)), + ast.MatchValue(ast.Constant(True)), + ast.MatchValue(ast.Constant((1, 2, 3))), + ast.MatchSingleton("string"), + ast.MatchSequence([ast.MatchSingleton("string")]), + ast.MatchSequence([ast.MatchSequence([ast.MatchSingleton("string")])]), + ast.MatchMapping([constant_1, constant_true], [pattern_x]), ast.MatchMapping( - [constant_true, constant_1], - [pattern_x, pattern_1], - rest='True' + [constant_true, constant_1], [pattern_x, pattern_1], rest="True" ), ast.MatchMapping( - [constant_true, ast.Starred(ast.Name('lol', ast.Load()), ast.Load())], + [constant_true, ast.Starred(ast.Name("lol", ast.Load()), ast.Load())], [pattern_x, pattern_1], - rest='legit' + rest="legit", ), ast.MatchClass( - ast.Attribute( - ast.Attribute( - constant_x, - 'y', ast.Load()), - 'z', ast.Load()), - patterns=[], kwd_attrs=[], kwd_patterns=[] + ast.Attribute(ast.Attribute(constant_x, "y", ast.Load()), "z", ast.Load()), + patterns=[], + kwd_attrs=[], + kwd_patterns=[], ), ast.MatchClass( - name_carter, - patterns=[], - kwd_attrs=['True'], - kwd_patterns=[pattern_1] + name_carter, patterns=[], kwd_attrs=["True"], kwd_patterns=[pattern_1] ), ast.MatchClass( - name_carter, - patterns=[], - kwd_attrs=[], - kwd_patterns=[pattern_1] + name_carter, patterns=[], kwd_attrs=[], kwd_patterns=[pattern_1] ), ast.MatchClass( name_carter, - patterns=[ast.MatchSingleton('string')], + patterns=[ast.MatchSingleton("string")], kwd_attrs=[], - kwd_patterns=[] + kwd_patterns=[], ), ast.MatchClass( - name_carter, - patterns=[ast.MatchStar()], - kwd_attrs=[], - kwd_patterns=[] + name_carter, patterns=[ast.MatchStar()], kwd_attrs=[], kwd_patterns=[] ), ast.MatchClass( - name_carter, - patterns=[], - kwd_attrs=[], - kwd_patterns=[ast.MatchStar()] + name_carter, patterns=[], kwd_attrs=[], kwd_patterns=[ast.MatchStar()] ), ast.MatchClass( constant_true, # invalid name patterns=[], - kwd_attrs=['True'], - kwd_patterns=[pattern_1] - ), - ast.MatchSequence( - [ - ast.MatchStar("True") - ] - ), - ast.MatchAs( - name='False' - ), - ast.MatchOr( - [] - ), - ast.MatchOr( - [pattern_1] - ), - ast.MatchOr( - [pattern_1, pattern_x, ast.MatchSingleton('xxx')] + kwd_attrs=["True"], + kwd_patterns=[pattern_1], ), + ast.MatchSequence([ast.MatchStar("True")]), + ast.MatchAs(name="False"), + ast.MatchOr([]), + ast.MatchOr([pattern_1]), + ast.MatchOr([pattern_1, pattern_x, ast.MatchSingleton("xxx")]), ast.MatchAs(name="_"), ast.MatchStar(name="x"), ast.MatchSequence([ast.MatchStar("_")]), @@ -2402,17 +2256,12 @@ def test_stdlib_validates(self): ] def test_match_validation_pattern(self): - name_x = ast.Name('x', ast.Load()) + name_x = ast.Name("x", ast.Load()) for pattern in self._MATCH_PATTERNS: with self.subTest(ast.dump(pattern, indent=4)): node = ast.Match( subject=name_x, - cases = [ - ast.match_case( - pattern=pattern, - body = [ast.Pass()] - ) - ] + cases=[ast.match_case(pattern=pattern, body=[ast.Pass()])], ) node = ast.fix_missing_locations(node) module = ast.Module([node], []) @@ -2435,16 +2284,15 @@ def compile_constant(self, value): ns = {} exec(code, ns) - return ns['x'] + return ns["x"] def test_validation(self): with self.assertRaises(TypeError) as cm: self.compile_constant([1, 2, 3]) - self.assertEqual(str(cm.exception), - "got an invalid type in Constant: list") + self.assertEqual(str(cm.exception), "got an invalid type in Constant: list") def test_singletons(self): - for const in (None, False, True, Ellipsis, b'', frozenset()): + for const in (None, False, True, Ellipsis, b"", frozenset()): with self.subTest(const=const): value = self.compile_constant(const) self.assertIs(value, const) @@ -2455,10 +2303,17 @@ def test_values(self): for level in range(3): nested_tuple = (nested_tuple, 2) nested_frozenset = frozenset({nested_frozenset, 2}) - values = (123, 123.0, 123j, - "unicode", b'bytes', - tuple("tuple"), frozenset("frozenset"), - nested_tuple, nested_frozenset) + values = ( + 123, + 123.0, + 123j, + "unicode", + b"bytes", + tuple("tuple"), + frozenset("frozenset"), + nested_tuple, + nested_frozenset, + ) for value in values: with self.subTest(value=value): result = self.compile_constant(value) @@ -2474,42 +2329,35 @@ def test_assign_to_constant(self): with self.assertRaises(ValueError) as cm: compile(tree, "string", "exec") - self.assertEqual(str(cm.exception), - "expression which can't be assigned " - "to in Store context") + self.assertEqual( + str(cm.exception), + "expression which can't be assigned " "to in Store context", + ) def test_get_docstring(self): tree = ast.parse("'docstring'\nx = 1") - self.assertEqual(ast.get_docstring(tree), 'docstring') + self.assertEqual(ast.get_docstring(tree), "docstring") def get_load_const(self, tree): # Compile to bytecode, disassemble and get parameter of LOAD_CONST # instructions - co = compile(tree, '', 'exec') + co = compile(tree, "", "exec") consts = [] for instr in dis.get_instructions(co): - if instr.opname == 'LOAD_CONST' or instr.opname == 'RETURN_CONST': + if instr.opname == "LOAD_CONST" or instr.opname == "RETURN_CONST": consts.append(instr.argval) return consts @support.cpython_only def test_load_const(self): - consts = [None, - True, False, - 124, - 2.0, - 3j, - "unicode", - b'bytes', - (1, 2, 3)] - - code = '\n'.join(['x={!r}'.format(const) for const in consts]) - code += '\nx = ...' + consts = [None, True, False, 124, 2.0, 3j, "unicode", b"bytes", (1, 2, 3)] + + code = "\n".join(["x={!r}".format(const) for const in consts]) + code += "\nx = ..." consts.extend((Ellipsis, None)) tree = ast.parse(code) - self.assertEqual(self.get_load_const(tree), - consts) + self.assertEqual(self.get_load_const(tree), consts) # Replace expression nodes with constants for assign, const in zip(tree.body, consts): @@ -2518,8 +2366,7 @@ def test_load_const(self): ast.copy_location(new_node, assign.value) assign.value = new_node - self.assertEqual(self.get_load_const(tree), - consts) + self.assertEqual(self.get_load_const(tree), consts) def test_literal_eval(self): tree = ast.parse("1 + 2") @@ -2533,22 +2380,22 @@ def test_literal_eval(self): ast.copy_location(new_right, binop.right) binop.right = new_right - self.assertEqual(ast.literal_eval(binop), 10+20j) + self.assertEqual(ast.literal_eval(binop), 10 + 20j) def test_string_kind(self): - c = ast.parse('"x"', mode='eval').body + c = ast.parse('"x"', mode="eval").body self.assertEqual(c.value, "x") self.assertEqual(c.kind, None) - c = ast.parse('u"x"', mode='eval').body + c = ast.parse('u"x"', mode="eval").body self.assertEqual(c.value, "x") self.assertEqual(c.kind, "u") - c = ast.parse('r"x"', mode='eval').body + c = ast.parse('r"x"', mode="eval").body self.assertEqual(c.value, "x") self.assertEqual(c.kind, None) - c = ast.parse('b"x"', mode='eval').body + c = ast.parse('b"x"', mode="eval").body self.assertEqual(c.value, b"x") self.assertEqual(c.kind, None) @@ -2559,6 +2406,7 @@ class EndPositionTests(unittest.TestCase): Testing end positions of nodes requires a bit of extra care because of how LL parsers work. """ + def _check_end_pos(self, ast_node, end_lineno, end_col_offset): self.assertEqual(ast_node.end_lineno, end_lineno) self.assertEqual(ast_node.end_col_offset, end_col_offset) @@ -2572,55 +2420,55 @@ def _parse_value(self, s): return ast.parse(s).body[0].value def test_lambda(self): - s = 'lambda x, *y: None' + s = "lambda x, *y: None" lam = self._parse_value(s) - self._check_content(s, lam.body, 'None') - self._check_content(s, lam.args.args[0], 'x') - self._check_content(s, lam.args.vararg, 'y') + self._check_content(s, lam.body, "None") + self._check_content(s, lam.args.args[0], "x") + self._check_content(s, lam.args.vararg, "y") def test_func_def(self): - s = dedent(''' + s = dedent(""" def func(x: int, *args: str, z: float = 0, **kwargs: Any) -> bool: return True - ''').strip() + """).strip() fdef = ast.parse(s).body[0] self._check_end_pos(fdef, 5, 15) - self._check_content(s, fdef.body[0], 'return True') - self._check_content(s, fdef.args.args[0], 'x: int') - self._check_content(s, fdef.args.args[0].annotation, 'int') - self._check_content(s, fdef.args.kwarg, 'kwargs: Any') - self._check_content(s, fdef.args.kwarg.annotation, 'Any') + self._check_content(s, fdef.body[0], "return True") + self._check_content(s, fdef.args.args[0], "x: int") + self._check_content(s, fdef.args.args[0].annotation, "int") + self._check_content(s, fdef.args.kwarg, "kwargs: Any") + self._check_content(s, fdef.args.kwarg.annotation, "Any") def test_call(self): - s = 'func(x, y=2, **kw)' + s = "func(x, y=2, **kw)" call = self._parse_value(s) - self._check_content(s, call.func, 'func') - self._check_content(s, call.keywords[0].value, '2') - self._check_content(s, call.keywords[1].value, 'kw') + self._check_content(s, call.func, "func") + self._check_content(s, call.keywords[0].value, "2") + self._check_content(s, call.keywords[1].value, "kw") def test_call_noargs(self): - s = 'x[0]()' + s = "x[0]()" call = self._parse_value(s) - self._check_content(s, call.func, 'x[0]') + self._check_content(s, call.func, "x[0]") self._check_end_pos(call, 1, 6) def test_class_def(self): - s = dedent(''' + s = dedent(""" class C(A, B): x: int = 0 - ''').strip() + """).strip() cdef = ast.parse(s).body[0] self._check_end_pos(cdef, 2, 14) - self._check_content(s, cdef.bases[1], 'B') - self._check_content(s, cdef.body[0], 'x: int = 0') + self._check_content(s, cdef.bases[1], "B") + self._check_content(s, cdef.body[0], "x: int = 0") def test_class_kw(self): - s = 'class S(metaclass=abc.ABCMeta): pass' + s = "class S(metaclass=abc.ABCMeta): pass" cdef = ast.parse(s).body[0] - self._check_content(s, cdef.keywords[0].value, 'abc.ABCMeta') + self._check_content(s, cdef.keywords[0].value, "abc.ABCMeta") def test_multi_line_str(self): s = dedent(''' @@ -2633,10 +2481,10 @@ def test_multi_line_str(self): self._check_end_pos(assign.value, 3, 40) def test_continued_str(self): - s = dedent(''' + s = dedent(""" x = "first part" \\ "second part" - ''').strip() + """).strip() assign = ast.parse(s).body[0] self._check_end_pos(assign, 2, 13) self._check_end_pos(assign.value, 2, 13) @@ -2644,7 +2492,7 @@ def test_continued_str(self): def test_suites(self): # We intentionally put these into the same string to check # that empty lines are not part of the suite. - s = dedent(''' + s = dedent(""" while True: pass @@ -2664,7 +2512,7 @@ def test_suites(self): pass pass - ''').strip() + """).strip() mod = ast.parse(s) while_loop = mod.body[0] if_stmt = mod.body[1] @@ -2678,18 +2526,18 @@ def test_suites(self): self._check_end_pos(try_stmt, 17, 8) self._check_end_pos(pass_stmt, 19, 4) - self._check_content(s, while_loop.test, 'True') - self._check_content(s, if_stmt.body[0], 'x = None') - self._check_content(s, if_stmt.orelse[0].test, 'other()') - self._check_content(s, for_loop.target, 'x, y') - self._check_content(s, try_stmt.body[0], 'raise RuntimeError') - self._check_content(s, try_stmt.handlers[0].type, 'TypeError') + self._check_content(s, while_loop.test, "True") + self._check_content(s, if_stmt.body[0], "x = None") + self._check_content(s, if_stmt.orelse[0].test, "other()") + self._check_content(s, for_loop.target, "x, y") + self._check_content(s, try_stmt.body[0], "raise RuntimeError") + self._check_content(s, try_stmt.handlers[0].type, "TypeError") def test_fstring(self): s = 'x = f"abc {x + y} abc"' fstr = self._parse_value(s) binop = fstr.values[1].value - self._check_content(s, binop, 'x + y') + self._check_content(s, binop, "x + y") def test_fstring_multi_line(self): s = dedent(''' @@ -2704,200 +2552,198 @@ def test_fstring_multi_line(self): fstr = self._parse_value(s) binop = fstr.values[1].value self._check_end_pos(binop, 5, 7) - self._check_content(s, binop.left, 'arg_one') - self._check_content(s, binop.right, 'arg_two') + self._check_content(s, binop.left, "arg_one") + self._check_content(s, binop.right, "arg_two") def test_import_from_multi_line(self): - s = dedent(''' + s = dedent(""" from x.y.z import ( a, b, c as c ) - ''').strip() + """).strip() imp = ast.parse(s).body[0] self._check_end_pos(imp, 3, 1) self._check_end_pos(imp.names[2], 2, 16) def test_slices(self): - s1 = 'f()[1, 2] [0]' - s2 = 'x[ a.b: c.d]' - sm = dedent(''' + s1 = "f()[1, 2] [0]" + s2 = "x[ a.b: c.d]" + sm = dedent(""" x[ a.b: f () , g () : c.d ] - ''').strip() + """).strip() i1, i2, im = map(self._parse_value, (s1, s2, sm)) - self._check_content(s1, i1.value, 'f()[1, 2]') - self._check_content(s1, i1.value.slice, '1, 2') - self._check_content(s2, i2.slice.lower, 'a.b') - self._check_content(s2, i2.slice.upper, 'c.d') - self._check_content(sm, im.slice.elts[0].upper, 'f ()') - self._check_content(sm, im.slice.elts[1].lower, 'g ()') + self._check_content(s1, i1.value, "f()[1, 2]") + self._check_content(s1, i1.value.slice, "1, 2") + self._check_content(s2, i2.slice.lower, "a.b") + self._check_content(s2, i2.slice.upper, "c.d") + self._check_content(sm, im.slice.elts[0].upper, "f ()") + self._check_content(sm, im.slice.elts[1].lower, "g ()") self._check_end_pos(im, 3, 3) def test_binop(self): - s = dedent(''' + s = dedent(""" (1 * 2 + (3 ) + 4 ) - ''').strip() + """).strip() binop = self._parse_value(s) self._check_end_pos(binop, 2, 6) - self._check_content(s, binop.right, '4') - self._check_content(s, binop.left, '1 * 2 + (3 )') - self._check_content(s, binop.left.right, '3') + self._check_content(s, binop.right, "4") + self._check_content(s, binop.left, "1 * 2 + (3 )") + self._check_content(s, binop.left.right, "3") def test_boolop(self): - s = dedent(''' + s = dedent(""" if (one_condition and (other_condition or yet_another_one)): pass - ''').strip() + """).strip() bop = ast.parse(s).body[0].test self._check_end_pos(bop, 2, 44) - self._check_content(s, bop.values[1], - 'other_condition or yet_another_one') + self._check_content(s, bop.values[1], "other_condition or yet_another_one") def test_tuples(self): - s1 = 'x = () ;' - s2 = 'x = 1 , ;' - s3 = 'x = (1 , 2 ) ;' - sm = dedent(''' + s1 = "x = () ;" + s2 = "x = 1 , ;" + s3 = "x = (1 , 2 ) ;" + sm = dedent(""" x = ( a, b, ) - ''').strip() + """).strip() t1, t2, t3, tm = map(self._parse_value, (s1, s2, s3, sm)) - self._check_content(s1, t1, '()') - self._check_content(s2, t2, '1 ,') - self._check_content(s3, t3, '(1 , 2 )') + self._check_content(s1, t1, "()") + self._check_content(s2, t2, "1 ,") + self._check_content(s3, t3, "(1 , 2 )") self._check_end_pos(tm, 3, 1) def test_attribute_spaces(self): - s = 'func(x. y .z)' + s = "func(x. y .z)" call = self._parse_value(s) self._check_content(s, call, s) - self._check_content(s, call.args[0], 'x. y .z') + self._check_content(s, call.args[0], "x. y .z") def test_redundant_parenthesis(self): - s = '( ( ( a + b ) ) )' + s = "( ( ( a + b ) ) )" v = ast.parse(s).body[0].value - self.assertEqual(type(v).__name__, 'BinOp') - self._check_content(s, v, 'a + b') - s2 = 'await ' + s + self.assertEqual(type(v).__name__, "BinOp") + self._check_content(s, v, "a + b") + s2 = "await " + s v = ast.parse(s2).body[0].value.value - self.assertEqual(type(v).__name__, 'BinOp') - self._check_content(s2, v, 'a + b') + self.assertEqual(type(v).__name__, "BinOp") + self._check_content(s2, v, "a + b") def test_trailers_with_redundant_parenthesis(self): tests = ( - ('( ( ( a ) ) ) ( )', 'Call'), - ('( ( ( a ) ) ) ( b )', 'Call'), - ('( ( ( a ) ) ) [ b ]', 'Subscript'), - ('( ( ( a ) ) ) . b', 'Attribute'), + ("( ( ( a ) ) ) ( )", "Call"), + ("( ( ( a ) ) ) ( b )", "Call"), + ("( ( ( a ) ) ) [ b ]", "Subscript"), + ("( ( ( a ) ) ) . b", "Attribute"), ) for s, t in tests: with self.subTest(s): v = ast.parse(s).body[0].value self.assertEqual(type(v).__name__, t) self._check_content(s, v, s) - s2 = 'await ' + s + s2 = "await " + s v = ast.parse(s2).body[0].value.value self.assertEqual(type(v).__name__, t) self._check_content(s2, v, s) def test_displays(self): - s1 = '[{}, {1, }, {1, 2,} ]' - s2 = '{a: b, f (): g () ,}' + s1 = "[{}, {1, }, {1, 2,} ]" + s2 = "{a: b, f (): g () ,}" c1 = self._parse_value(s1) c2 = self._parse_value(s2) - self._check_content(s1, c1.elts[0], '{}') - self._check_content(s1, c1.elts[1], '{1, }') - self._check_content(s1, c1.elts[2], '{1, 2,}') - self._check_content(s2, c2.keys[1], 'f ()') - self._check_content(s2, c2.values[1], 'g ()') + self._check_content(s1, c1.elts[0], "{}") + self._check_content(s1, c1.elts[1], "{1, }") + self._check_content(s1, c1.elts[2], "{1, 2,}") + self._check_content(s2, c2.keys[1], "f ()") + self._check_content(s2, c2.values[1], "g ()") def test_comprehensions(self): - s = dedent(''' + s = dedent(""" x = [{x for x, y in stuff if cond.x} for stuff in things] - ''').strip() + """).strip() cmp = self._parse_value(s) self._check_end_pos(cmp, 2, 37) - self._check_content(s, cmp.generators[0].iter, 'things') - self._check_content(s, cmp.elt.generators[0].iter, 'stuff') - self._check_content(s, cmp.elt.generators[0].ifs[0], 'cond.x') - self._check_content(s, cmp.elt.generators[0].target, 'x, y') + self._check_content(s, cmp.generators[0].iter, "things") + self._check_content(s, cmp.elt.generators[0].iter, "stuff") + self._check_content(s, cmp.elt.generators[0].ifs[0], "cond.x") + self._check_content(s, cmp.elt.generators[0].target, "x, y") def test_yield_await(self): - s = dedent(''' + s = dedent(""" async def f(): yield x await y - ''').strip() + """).strip() fdef = ast.parse(s).body[0] - self._check_content(s, fdef.body[0].value, 'yield x') - self._check_content(s, fdef.body[1].value, 'await y') + self._check_content(s, fdef.body[0].value, "yield x") + self._check_content(s, fdef.body[1].value, "await y") def test_source_segment_multi(self): - s_orig = dedent(''' + s_orig = dedent(""" x = ( a, b, ) + () - ''').strip() - s_tuple = dedent(''' + """).strip() + s_tuple = dedent(""" ( a, b, ) - ''').strip() + """).strip() binop = self._parse_value(s_orig) self.assertEqual(ast.get_source_segment(s_orig, binop.left), s_tuple) def test_source_segment_padded(self): - s_orig = dedent(''' + s_orig = dedent(""" class C: def fun(self) -> None: "ЖЖЖЖЖ" - ''').strip() - s_method = ' def fun(self) -> None:\n' \ - ' "ЖЖЖЖЖ"' + """).strip() + s_method = " def fun(self) -> None:\n" ' "ЖЖЖЖЖ"' cdef = ast.parse(s_orig).body[0] - self.assertEqual(ast.get_source_segment(s_orig, cdef.body[0], padded=True), - s_method) + self.assertEqual( + ast.get_source_segment(s_orig, cdef.body[0], padded=True), s_method + ) def test_source_segment_endings(self): - s = 'v = 1\r\nw = 1\nx = 1\n\ry = 1\rz = 1\r\n' + s = "v = 1\r\nw = 1\nx = 1\n\ry = 1\rz = 1\r\n" v, w, x, y, z = ast.parse(s).body - self._check_content(s, v, 'v = 1') - self._check_content(s, w, 'w = 1') - self._check_content(s, x, 'x = 1') - self._check_content(s, y, 'y = 1') - self._check_content(s, z, 'z = 1') + self._check_content(s, v, "v = 1") + self._check_content(s, w, "w = 1") + self._check_content(s, x, "x = 1") + self._check_content(s, y, "y = 1") + self._check_content(s, z, "z = 1") def test_source_segment_tabs(self): - s = dedent(''' + s = dedent(""" class C: \t\f def fun(self) -> None: \t\f pass - ''').strip() - s_method = ' \t\f def fun(self) -> None:\n' \ - ' \t\f pass' + """).strip() + s_method = " \t\f def fun(self) -> None:\n" " \t\f pass" cdef = ast.parse(s).body[0] self.assertEqual(ast.get_source_segment(s, cdef.body[0], padded=True), s_method) def test_source_segment_newlines(self): - s = 'def f():\n pass\ndef g():\r pass\r\ndef h():\r\n pass\r\n' + s = "def f():\n pass\ndef g():\r pass\r\ndef h():\r\n pass\r\n" f, g, h = ast.parse(s).body - self._check_content(s, f, 'def f():\n pass') - self._check_content(s, g, 'def g():\r pass') - self._check_content(s, h, 'def h():\r\n pass') + self._check_content(s, f, "def f():\n pass") + self._check_content(s, g, "def g():\r pass") + self._check_content(s, h, "def h():\r\n pass") - s = 'def f():\n a = 1\r b = 2\r\n c = 3\n' + s = "def f():\n a = 1\r b = 2\r\n c = 3\n" f = ast.parse(s).body[0] self._check_content(s, f, s.rstrip()) def test_source_segment_missing_info(self): - s = 'v = 1\r\nw = 1\nx = 1\n\ry = 1\r\n' + s = "v = 1\r\nw = 1\nx = 1\n\ry = 1\r\n" v, w, x, y = ast.parse(s).body del v.lineno del w.end_lineno @@ -2908,21 +2754,28 @@ def test_source_segment_missing_info(self): self.assertIsNone(ast.get_source_segment(s, x)) self.assertIsNone(ast.get_source_segment(s, y)) + class BaseNodeVisitorCases: # Both `NodeVisitor` and `NodeTranformer` must raise these warnings: def test_old_constant_nodes(self): class Visitor(self.visitor_class): def visit_Num(self, node): - log.append((node.lineno, 'Num', node.n)) + log.append((node.lineno, "Num", node.n)) + def visit_Str(self, node): - log.append((node.lineno, 'Str', node.s)) + log.append((node.lineno, "Str", node.s)) + def visit_Bytes(self, node): - log.append((node.lineno, 'Bytes', node.s)) + log.append((node.lineno, "Bytes", node.s)) + def visit_NameConstant(self, node): - log.append((node.lineno, 'NameConstant', node.value)) + log.append((node.lineno, "NameConstant", node.value)) + def visit_Ellipsis(self, node): - log.append((node.lineno, 'Ellipsis', ...)) - mod = ast.parse(dedent('''\ + log.append((node.lineno, "Ellipsis", ...)) + + mod = ast.parse( + dedent("""\ i = 42 f = 4.25 c = 4.25j @@ -2931,37 +2784,44 @@ def visit_Ellipsis(self, node): t = True n = None e = ... - ''')) + """) + ) visitor = Visitor() log = [] with warnings.catch_warnings(record=True) as wlog: - warnings.filterwarnings('always', '', DeprecationWarning) + warnings.filterwarnings("always", "", DeprecationWarning) visitor.visit(mod) - self.assertEqual(log, [ - (1, 'Num', 42), - (2, 'Num', 4.25), - (3, 'Num', 4.25j), - (4, 'Str', 'string'), - (5, 'Bytes', b'bytes'), - (6, 'NameConstant', True), - (7, 'NameConstant', None), - (8, 'Ellipsis', ...), - ]) - self.assertEqual([str(w.message) for w in wlog], [ - 'visit_Num is deprecated; add visit_Constant', - 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', - 'visit_Num is deprecated; add visit_Constant', - 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', - 'visit_Num is deprecated; add visit_Constant', - 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', - 'visit_Str is deprecated; add visit_Constant', - 'Attribute s is deprecated and will be removed in Python 3.14; use value instead', - 'visit_Bytes is deprecated; add visit_Constant', - 'Attribute s is deprecated and will be removed in Python 3.14; use value instead', - 'visit_NameConstant is deprecated; add visit_Constant', - 'visit_NameConstant is deprecated; add visit_Constant', - 'visit_Ellipsis is deprecated; add visit_Constant', - ]) + self.assertEqual( + log, + [ + (1, "Num", 42), + (2, "Num", 4.25), + (3, "Num", 4.25j), + (4, "Str", "string"), + (5, "Bytes", b"bytes"), + (6, "NameConstant", True), + (7, "NameConstant", None), + (8, "Ellipsis", ...), + ], + ) + self.assertEqual( + [str(w.message) for w in wlog], + [ + "visit_Num is deprecated; add visit_Constant", + "Attribute n is deprecated and will be removed in Python 3.14; use value instead", + "visit_Num is deprecated; add visit_Constant", + "Attribute n is deprecated and will be removed in Python 3.14; use value instead", + "visit_Num is deprecated; add visit_Constant", + "Attribute n is deprecated and will be removed in Python 3.14; use value instead", + "visit_Str is deprecated; add visit_Constant", + "Attribute s is deprecated and will be removed in Python 3.14; use value instead", + "visit_Bytes is deprecated; add visit_Constant", + "Attribute s is deprecated and will be removed in Python 3.14; use value instead", + "visit_NameConstant is deprecated; add visit_Constant", + "visit_NameConstant is deprecated; add visit_Constant", + "visit_Ellipsis is deprecated; add visit_Constant", + ], + ) class NodeVisitorTests(BaseNodeVisitorCases, unittest.TestCase): @@ -2971,8 +2831,7 @@ class NodeVisitorTests(BaseNodeVisitorCases, unittest.TestCase): class NodeTransformerTests(ASTTestMixin, BaseNodeVisitorCases, unittest.TestCase): visitor_class = ast.NodeTransformer - def assertASTTransformation(self, tranformer_class, - initial_code, expected_code): + def assertASTTransformation(self, tranformer_class, initial_code, expected_code): initial_ast = ast.parse(dedent(initial_code)) expected_ast = ast.parse(dedent(expected_code)) @@ -2982,15 +2841,15 @@ def assertASTTransformation(self, tranformer_class, self.assertASTEqual(result_ast, expected_ast) def test_node_remove_single(self): - code = 'def func(arg) -> SomeType: ...' - expected = 'def func(arg): ...' + code = "def func(arg) -> SomeType: ..." + expected = "def func(arg): ..." # Since `FunctionDef.returns` is defined as a single value, we test # the `if isinstance(old_value, AST):` branch here. class SomeTypeRemover(ast.NodeTransformer): def visit_Name(self, node: ast.Name): self.generic_visit(node) - if node.id == 'SomeType': + if node.id == "SomeType": return None return node @@ -3029,11 +2888,11 @@ class DSL(Base, kw1=True, kw2=True, kw3=False): ... class ExtendKeywords(ast.NodeTransformer): def visit_keyword(self, node: ast.keyword): self.generic_visit(node) - if node.arg == 'kw1': + if node.arg == "kw1": return [ node, - ast.keyword('kw2', ast.Constant(True)), - ast.keyword('kw3', ast.Constant(False)), + ast.keyword("kw2", ast.Constant(True)), + ast.keyword("kw3", ast.Constant(False)), ] return node @@ -3052,8 +2911,8 @@ def func(arg): class PrintToLog(ast.NodeTransformer): def visit_Call(self, node: ast.Call): self.generic_visit(node) - if isinstance(node.func, ast.Name) and node.func.id == 'print': - node.func.id = 'log' + if isinstance(node.func, ast.Name) and node.func.id == "print": + node.func.id = "log" return node self.assertASTTransformation(PrintToLog, code, expected) @@ -3071,15 +2930,15 @@ def func(arg): class PrintToLog(ast.NodeTransformer): def visit_Call(self, node: ast.Call): self.generic_visit(node) - if isinstance(node.func, ast.Name) and node.func.id == 'print': + if isinstance(node.func, ast.Name) and node.func.id == "print": return ast.Call( func=ast.Attribute( - ast.Name('logger', ctx=ast.Load()), - attr='log', + ast.Name("logger", ctx=ast.Load()), + attr="log", ctx=ast.Load(), ), args=node.args, - keywords=[ast.keyword('debug', ast.Constant(True))], + keywords=[ast.keyword("debug", ast.Constant(True))], ) return node @@ -3093,13 +2952,15 @@ def test_FunctionDef(self): args = ast.arguments() self.assertEqual(args.args, []) self.assertEqual(args.posonlyargs, []) - with self.assertWarnsRegex(DeprecationWarning, - r"FunctionDef\.__init__ missing 1 required positional argument: 'name'"): + with self.assertWarnsRegex( + DeprecationWarning, + r"FunctionDef\.__init__ missing 1 required positional argument: 'name'", + ): node = ast.FunctionDef(args=args) self.assertFalse(hasattr(node, "name")) self.assertEqual(node.decorator_list, []) - node = ast.FunctionDef(name='foo', args=args) - self.assertEqual(node.name, 'foo') + node = ast.FunctionDef(name="foo", args=args) + self.assertEqual(node.name, "foo") self.assertEqual(node.decorator_list, []) def test_expr_context(self): @@ -3115,8 +2976,10 @@ def test_expr_context(self): self.assertEqual(name3.id, "x") self.assertIsInstance(name3.ctx, ast.Del) - with self.assertWarnsRegex(DeprecationWarning, - r"Name\.__init__ missing 1 required positional argument: 'id'"): + with self.assertWarnsRegex( + DeprecationWarning, + r"Name\.__init__ missing 1 required positional argument: 'id'", + ): name3 = ast.Name() def test_custom_subclass_with_no_fields(self): @@ -3129,7 +2992,7 @@ class NoInit(ast.AST): def test_fields_but_no_field_types(self): class Fields(ast.AST): - _fields = ('a',) + _fields = ("a",) obj = Fields() with self.assertRaises(AttributeError): @@ -3139,8 +3002,8 @@ class Fields(ast.AST): def test_fields_and_types(self): class FieldsAndTypes(ast.AST): - _fields = ('a',) - _field_types = {'a': int | None} + _fields = ("a",) + _field_types = {"a": int | None} a: int | None = None obj = FieldsAndTypes() @@ -3148,13 +3011,29 @@ class FieldsAndTypes(ast.AST): obj = FieldsAndTypes(a=1) self.assertEqual(obj.a, 1) + def test_custom_attributes(self): + class MyAttrs(ast.AST): + _attributes = ("a", "b") + + obj = MyAttrs(a=1, b=2) + self.assertEqual(obj.a, 1) + self.assertEqual(obj.b, 2) + + with self.assertWarnsRegex( + DeprecationWarning, + r"MyAttrs.__init__ got an unexpected keyword argument 'c'.", + ): + obj = MyAttrs(c=3) + def test_fields_and_types_no_default(self): class FieldsAndTypesNoDefault(ast.AST): - _fields = ('a',) - _field_types = {'a': int} + _fields = ("a",) + _field_types = {"a": int} - with self.assertWarnsRegex(DeprecationWarning, - r"FieldsAndTypesNoDefault\.__init__ missing 1 required positional argument: 'a'\."): + with self.assertWarnsRegex( + DeprecationWarning, + r"FieldsAndTypesNoDefault\.__init__ missing 1 required positional argument: 'a'\.", + ): obj = FieldsAndTypesNoDefault() with self.assertRaises(AttributeError): obj.a @@ -3163,14 +3042,14 @@ class FieldsAndTypesNoDefault(ast.AST): def test_incomplete_field_types(self): class MoreFieldsThanTypes(ast.AST): - _fields = ('a', 'b') - _field_types = {'a': int | None} + _fields = ("a", "b") + _field_types = {"a": int | None} a: int | None = None b: int | None = None with self.assertWarnsRegex( DeprecationWarning, - r"Field 'b' is missing from MoreFieldsThanTypes\._field_types" + r"Field 'b' is missing from MoreFieldsThanTypes\._field_types", ): obj = MoreFieldsThanTypes() self.assertIs(obj.a, None) @@ -3182,8 +3061,8 @@ class MoreFieldsThanTypes(ast.AST): def test_complete_field_types(self): class _AllFieldTypes(ast.AST): - _fields = ('a', 'b') - _field_types = {'a': int | None, 'b': list[str]} + _fields = ("a", "b") + _field_types = {"a": int | None, "b": list[str]} # This must be set explicitly a: int | None = None # This will add an implicit empty list default @@ -3200,9 +3079,9 @@ class ModuleStateTests(unittest.TestCase): def check_ast_module(self): # Check that the _ast module still works as expected - code = 'x + 1' - filename = '' - mode = 'eval' + code = "x + 1" + filename = "" + mode = "eval" # Create _ast.AST subclasses instances ast_tree = compile(code, filename, mode, flags=ast.PyCF_ONLY_AST) @@ -3213,11 +3092,11 @@ def check_ast_module(self): def test_reload_module(self): # bpo-41194: Importing the _ast module twice must not crash. - with support.swap_item(sys.modules, '_ast', None): - del sys.modules['_ast'] + with support.swap_item(sys.modules, "_ast", None): + del sys.modules["_ast"] import _ast as ast1 - del sys.modules['_ast'] + del sys.modules["_ast"] import _ast as ast2 self.check_ast_module() @@ -3238,22 +3117,23 @@ def my_import(name, *args, **kw): sys.modules[name] = lazy_mod return lazy_mod - with support.swap_item(sys.modules, '_ast', None): - del sys.modules['_ast'] + with support.swap_item(sys.modules, "_ast", None): + del sys.modules["_ast"] - with support.swap_attr(builtins, '__import__', my_import): + with support.swap_attr(builtins, "__import__", my_import): # Test that compile() does not import the _ast module self.check_ast_module() - self.assertNotIn('_ast', sys.modules) + self.assertNotIn("_ast", sys.modules) # Sanity check of the test itself import _ast + self.assertIs(_ast, lazy_mod) def test_subinterpreter(self): # bpo-41631: Importing and using the _ast module in a subinterpreter # must not crash. - code = dedent(''' + code = dedent(""" import _ast import ast import gc @@ -3271,7 +3151,7 @@ def test_subinterpreter(self): del ast, _ast del sys.modules['ast'], sys.modules['_ast'] gc.collect() - ''') + """) res = support.run_in_subinterp(code) self.assertEqual(res, 0) @@ -3285,167 +3165,312 @@ def test_cli_file_input(self): with os_helper.temp_dir() as tmp_dir: filename = os.path.join(tmp_dir, "test_module.py") - with open(filename, 'w', encoding='utf-8') as f: + with open(filename, "w", encoding="utf-8") as f: f.write(code) res, _ = script_helper.run_python_until_end("-m", "ast", filename) self.assertEqual(res.err, b"") - self.assertEqual(expected.splitlines(), - res.out.decode("utf8").splitlines()) + self.assertEqual(expected.splitlines(), res.out.decode("utf8").splitlines()) self.assertEqual(res.rc, 0) +def compare(left, right): + return ast.dump(left) == ast.dump(right) + +class ASTOptimiziationTests(unittest.TestCase): + binop = { + "+": ast.Add(), + "-": ast.Sub(), + "*": ast.Mult(), + "/": ast.Div(), + "%": ast.Mod(), + "<<": ast.LShift(), + ">>": ast.RShift(), + "|": ast.BitOr(), + "^": ast.BitXor(), + "&": ast.BitAnd(), + "//": ast.FloorDiv(), + "**": ast.Pow(), + } + + unaryop = { + "~": ast.Invert(), + "+": ast.UAdd(), + "-": ast.USub(), + } + + def wrap_expr(self, expr): + return ast.Module(body=[ast.Expr(value=expr)]) + + def wrap_statement(self, statement): + return ast.Module(body=[statement]) + + def assert_ast(self, code, non_optimized_target, optimized_target): + + non_optimized_tree = ast.parse(code, optimize=-1) + optimized_tree = ast.parse(code, optimize=1) + + # Is a non-optimized tree equal to a non-optimized target? + self.assertTrue( + compare(non_optimized_tree, non_optimized_target), + f"{ast.dump(non_optimized_target)} must equal " + f"{ast.dump(non_optimized_tree)}", + ) -def main(): - if __name__ != '__main__': - return - if sys.argv[1:] == ['-g']: - for statements, kind in ((exec_tests, "exec"), (single_tests, "single"), - (eval_tests, "eval")): - print(kind+"_results = [") - for statement in statements: - tree = ast.parse(statement, "?", kind) - print("%r," % (to_tuple(tree),)) - print("]") - print("main()") - raise SystemExit - unittest.main() + # Is a optimized tree equal to a non-optimized target? + self.assertFalse( + compare(optimized_tree, non_optimized_target), + f"{ast.dump(non_optimized_target)} must not equal " + f"{ast.dump(non_optimized_tree)}" + ) + + # Is a optimized tree is equal to an optimized target? + self.assertTrue( + compare(optimized_tree, optimized_target), + f"{ast.dump(optimized_target)} must equal " + f"{ast.dump(optimized_tree)}", + ) + + def create_binop(self, operand, left=ast.Constant(1), right=ast.Constant(1)): + return ast.BinOp(left=left, op=self.binop[operand], right=right) + + def test_folding_binop(self): + code = "1 %s 1" + operators = self.binop.keys() + + for op in operators: + result_code = code % op + non_optimized_target = self.wrap_expr(self.create_binop(op)) + optimized_target = self.wrap_expr(ast.Constant(value=eval(result_code))) + + with self.subTest( + result_code=result_code, + non_optimized_target=non_optimized_target, + optimized_target=optimized_target + ): + self.assert_ast(result_code, non_optimized_target, optimized_target) + + # Multiplication of constant tuples must be folded + code = "(1,) * 3" + non_optimized_target = self.wrap_expr(self.create_binop("*", ast.Tuple(elts=[ast.Constant(value=1)]), ast.Constant(value=3))) + optimized_target = self.wrap_expr(ast.Constant(eval(code))) + + self.assert_ast(code, non_optimized_target, optimized_target) + + def test_folding_unaryop(self): + code = "%s1" + operators = self.unaryop.keys() + + def create_unaryop(operand): + return ast.UnaryOp(op=self.unaryop[operand], operand=ast.Constant(1)) + + for op in operators: + result_code = code % op + non_optimized_target = self.wrap_expr(create_unaryop(op)) + optimized_target = self.wrap_expr(ast.Constant(eval(result_code))) + + with self.subTest( + result_code=result_code, + non_optimized_target=non_optimized_target, + optimized_target=optimized_target + ): + self.assert_ast(result_code, non_optimized_target, optimized_target) + + def test_folding_not(self): + code = "not (1 %s (1,))" + operators = { + "in": ast.In(), + "is": ast.Is(), + } + opt_operators = { + "is": ast.IsNot(), + "in": ast.NotIn(), + } + + def create_notop(operand): + return ast.UnaryOp(op=ast.Not(), operand=ast.Compare( + left=ast.Constant(value=1), + ops=[operators[operand]], + comparators=[ast.Tuple(elts=[ast.Constant(value=1)])] + )) + + for op in operators.keys(): + result_code = code % op + non_optimized_target = self.wrap_expr(create_notop(op)) + optimized_target = self.wrap_expr( + ast.Compare(left=ast.Constant(1), ops=[opt_operators[op]], comparators=[ast.Constant(value=(1,))]) + ) + + with self.subTest( + result_code=result_code, + non_optimized_target=non_optimized_target, + optimized_target=optimized_target + ): + self.assert_ast(result_code, non_optimized_target, optimized_target) + + def test_folding_format(self): + code = "'%s' % (a,)" + + non_optimized_target = self.wrap_expr( + ast.BinOp( + left=ast.Constant(value="%s"), + op=ast.Mod(), + right=ast.Tuple(elts=[ast.Name(id='a')])) + ) + optimized_target = self.wrap_expr( + ast.JoinedStr( + values=[ + ast.FormattedValue(value=ast.Name(id='a'), conversion=115) + ] + ) + ) + + self.assert_ast(code, non_optimized_target, optimized_target) + + + def test_folding_tuple(self): + code = "(1,)" + + non_optimized_target = self.wrap_expr(ast.Tuple(elts=[ast.Constant(1)])) + optimized_target = self.wrap_expr(ast.Constant(value=(1,))) -#### EVERYTHING BELOW IS GENERATED BY python Lib/test/test_ast.py -g ##### -exec_results = [ -('Module', [('Expr', (1, 0, 1, 4), ('Constant', (1, 0, 1, 4), None, None))], []), -('Module', [('Expr', (1, 0, 1, 18), ('Constant', (1, 0, 1, 18), 'module docstring', None))], []), -('Module', [('FunctionDef', (1, 0, 1, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 9, 1, 13))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 29), ('Constant', (1, 9, 1, 29), 'function docstring', None))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 14), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None)], None, [], [], None, []), [('Pass', (1, 10, 1, 14))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None)], None, [], [], None, [('Constant', (1, 8, 1, 9), 0, None)]), [('Pass', (1, 12, 1, 16))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 18), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 11), 'args', None, None), [], [], None, []), [('Pass', (1, 14, 1, 18))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 23), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 16), 'args', ('Starred', (1, 13, 1, 16), ('Name', (1, 14, 1, 16), 'Ts', ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 19, 1, 23))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 36), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 29), 'args', ('Starred', (1, 13, 1, 29), ('Subscript', (1, 14, 1, 29), ('Name', (1, 14, 1, 19), 'tuple', ('Load',)), ('Tuple', (1, 20, 1, 28), [('Name', (1, 20, 1, 23), 'int', ('Load',)), ('Constant', (1, 25, 1, 28), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 32, 1, 36))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 36), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 29), 'args', ('Starred', (1, 13, 1, 29), ('Subscript', (1, 14, 1, 29), ('Name', (1, 14, 1, 19), 'tuple', ('Load',)), ('Tuple', (1, 20, 1, 28), [('Name', (1, 20, 1, 23), 'int', ('Load',)), ('Starred', (1, 25, 1, 28), ('Name', (1, 26, 1, 28), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 32, 1, 36))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 21), 'f', ('arguments', [], [], None, [], [], ('arg', (1, 8, 1, 14), 'kwargs', None, None), []), [('Pass', (1, 17, 1, 21))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 71), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None), ('arg', (1, 9, 1, 10), 'b', None, None), ('arg', (1, 14, 1, 15), 'c', None, None), ('arg', (1, 22, 1, 23), 'd', None, None), ('arg', (1, 28, 1, 29), 'e', None, None)], ('arg', (1, 35, 1, 39), 'args', None, None), [('arg', (1, 41, 1, 42), 'f', None, None)], [('Constant', (1, 43, 1, 45), 42, None)], ('arg', (1, 49, 1, 55), 'kwargs', None, None), [('Constant', (1, 11, 1, 12), 1, None), ('Constant', (1, 16, 1, 20), None, None), ('List', (1, 24, 1, 26), [], ('Load',)), ('Dict', (1, 30, 1, 32), [], [])]), [('Expr', (1, 58, 1, 71), ('Constant', (1, 58, 1, 71), 'doc for f()', None))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 27), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 23, 1, 27))], [], ('Subscript', (1, 11, 1, 21), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 20), [('Starred', (1, 17, 1, 20), ('Name', (1, 18, 1, 20), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 32), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 28, 1, 32))], [], ('Subscript', (1, 11, 1, 26), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 25), [('Name', (1, 17, 1, 20), 'int', ('Load',)), ('Starred', (1, 22, 1, 25), ('Name', (1, 23, 1, 25), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 45), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 41, 1, 45))], [], ('Subscript', (1, 11, 1, 39), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 38), [('Name', (1, 17, 1, 20), 'int', ('Load',)), ('Starred', (1, 22, 1, 38), ('Subscript', (1, 23, 1, 38), ('Name', (1, 23, 1, 28), 'tuple', ('Load',)), ('Tuple', (1, 29, 1, 37), [('Name', (1, 29, 1, 32), 'int', ('Load',)), ('Constant', (1, 34, 1, 37), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []), -('Module', [('ClassDef', (1, 0, 1, 12), 'C', [], [], [('Pass', (1, 8, 1, 12))], [], [])], []), -('Module', [('ClassDef', (1, 0, 1, 32), 'C', [], [], [('Expr', (1, 9, 1, 32), ('Constant', (1, 9, 1, 32), 'docstring for class C', None))], [], [])], []), -('Module', [('ClassDef', (1, 0, 1, 21), 'C', [('Name', (1, 8, 1, 14), 'object', ('Load',))], [], [('Pass', (1, 17, 1, 21))], [], [])], []), -('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Return', (1, 8, 1, 16), ('Constant', (1, 15, 1, 16), 1, None))], [], None, None, [])], []), -('Module', [('Delete', (1, 0, 1, 5), [('Name', (1, 4, 1, 5), 'v', ('Del',))])], []), -('Module', [('Assign', (1, 0, 1, 5), [('Name', (1, 0, 1, 1), 'v', ('Store',))], ('Constant', (1, 4, 1, 5), 1, None), None)], []), -('Module', [('Assign', (1, 0, 1, 7), [('Tuple', (1, 0, 1, 3), [('Name', (1, 0, 1, 1), 'a', ('Store',)), ('Name', (1, 2, 1, 3), 'b', ('Store',))], ('Store',))], ('Name', (1, 6, 1, 7), 'c', ('Load',)), None)], []), -('Module', [('Assign', (1, 0, 1, 9), [('Tuple', (1, 0, 1, 5), [('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Name', (1, 3, 1, 4), 'b', ('Store',))], ('Store',))], ('Name', (1, 8, 1, 9), 'c', ('Load',)), None)], []), -('Module', [('Assign', (1, 0, 1, 9), [('List', (1, 0, 1, 5), [('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Name', (1, 3, 1, 4), 'b', ('Store',))], ('Store',))], ('Name', (1, 8, 1, 9), 'c', ('Load',)), None)], []), -('Module', [('AnnAssign', (1, 0, 1, 13), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 13), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 12), [('Starred', (1, 9, 1, 12), ('Name', (1, 10, 1, 12), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []), -('Module', [('AnnAssign', (1, 0, 1, 18), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 18), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 17), [('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Starred', (1, 14, 1, 17), ('Name', (1, 15, 1, 17), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []), -('Module', [('AnnAssign', (1, 0, 1, 31), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 31), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 30), [('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Starred', (1, 14, 1, 30), ('Subscript', (1, 15, 1, 30), ('Name', (1, 15, 1, 20), 'tuple', ('Load',)), ('Tuple', (1, 21, 1, 29), [('Name', (1, 21, 1, 24), 'str', ('Load',)), ('Constant', (1, 26, 1, 29), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []), -('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Add',), ('Constant', (1, 5, 1, 6), 1, None))], []), -('Module', [('For', (1, 0, 1, 15), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Pass', (1, 11, 1, 15))], [], None)], []), -('Module', [('While', (1, 0, 1, 12), ('Name', (1, 6, 1, 7), 'v', ('Load',)), [('Pass', (1, 8, 1, 12))], [])], []), -('Module', [('If', (1, 0, 1, 9), ('Name', (1, 3, 1, 4), 'v', ('Load',)), [('Pass', (1, 5, 1, 9))], [])], []), -('Module', [('If', (1, 0, 4, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 4, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [])])], []), -('Module', [('If', (1, 0, 6, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 6, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [('Pass', (6, 2, 6, 6))])])], []), -('Module', [('With', (1, 0, 1, 17), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), ('Name', (1, 10, 1, 11), 'y', ('Store',)))], [('Pass', (1, 13, 1, 17))], None)], []), -('Module', [('With', (1, 0, 1, 25), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), ('Name', (1, 10, 1, 11), 'y', ('Store',))), ('withitem', ('Name', (1, 13, 1, 14), 'z', ('Load',)), ('Name', (1, 18, 1, 19), 'q', ('Store',)))], [('Pass', (1, 21, 1, 25))], None)], []), -('Module', [('With', (1, 0, 1, 19), [('withitem', ('Name', (1, 6, 1, 7), 'x', ('Load',)), ('Name', (1, 11, 1, 12), 'y', ('Store',)))], [('Pass', (1, 15, 1, 19))], None)], []), -('Module', [('With', (1, 0, 1, 17), [('withitem', ('Name', (1, 6, 1, 7), 'x', ('Load',)), None), ('withitem', ('Name', (1, 9, 1, 10), 'y', ('Load',)), None)], [('Pass', (1, 13, 1, 17))], None)], []), -('Module', [('Raise', (1, 0, 1, 25), ('Call', (1, 6, 1, 25), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), [('Constant', (1, 16, 1, 24), 'string', None)], []), None)], []), -('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [], [])], []), -('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [], [], [('Pass', (4, 2, 4, 6))])], []), -('Module', [('TryStar', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [], [])], []), -('Module', [('Assert', (1, 0, 1, 8), ('Name', (1, 7, 1, 8), 'v', ('Load',)), None)], []), -('Module', [('Import', (1, 0, 1, 10), [('alias', (1, 7, 1, 10), 'sys', None)])], []), -('Module', [('ImportFrom', (1, 0, 1, 17), 'sys', [('alias', (1, 16, 1, 17), 'v', None)], 0)], []), -('Module', [('Global', (1, 0, 1, 8), ['v'])], []), -('Module', [('Expr', (1, 0, 1, 1), ('Constant', (1, 0, 1, 1), 1, None))], []), -('Module', [('Pass', (1, 0, 1, 4))], []), -('Module', [('For', (1, 0, 1, 16), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Break', (1, 11, 1, 16))], [], None)], []), -('Module', [('For', (1, 0, 1, 19), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Continue', (1, 11, 1, 19))], [], None)], []), -('Module', [('For', (1, 0, 1, 18), ('Tuple', (1, 4, 1, 7), [('Name', (1, 4, 1, 5), 'a', ('Store',)), ('Name', (1, 6, 1, 7), 'b', ('Store',))], ('Store',)), ('Name', (1, 11, 1, 12), 'c', ('Load',)), [('Pass', (1, 14, 1, 18))], [], None)], []), -('Module', [('For', (1, 0, 1, 20), ('Tuple', (1, 4, 1, 9), [('Name', (1, 5, 1, 6), 'a', ('Store',)), ('Name', (1, 7, 1, 8), 'b', ('Store',))], ('Store',)), ('Name', (1, 13, 1, 14), 'c', ('Load',)), [('Pass', (1, 16, 1, 20))], [], None)], []), -('Module', [('For', (1, 0, 1, 20), ('List', (1, 4, 1, 9), [('Name', (1, 5, 1, 6), 'a', ('Store',)), ('Name', (1, 7, 1, 8), 'b', ('Store',))], ('Store',)), ('Name', (1, 13, 1, 14), 'c', ('Load',)), [('Pass', (1, 16, 1, 20))], [], None)], []), -('Module', [('Expr', (1, 0, 11, 5), ('GeneratorExp', (1, 0, 11, 5), ('Tuple', (2, 4, 6, 5), [('Name', (3, 4, 3, 6), 'Aa', ('Load',)), ('Name', (5, 7, 5, 9), 'Bb', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (8, 4, 10, 6), [('Name', (8, 4, 8, 6), 'Aa', ('Store',)), ('Name', (10, 4, 10, 6), 'Bb', ('Store',))], ('Store',)), ('Name', (10, 10, 10, 12), 'Cc', ('Load',)), [], 0)]))], []), -('Module', [('Expr', (1, 0, 1, 34), ('DictComp', (1, 0, 1, 34), ('Name', (1, 1, 1, 2), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',)), [('comprehension', ('Name', (1, 11, 1, 12), 'w', ('Store',)), ('Name', (1, 16, 1, 17), 'x', ('Load',)), [], 0), ('comprehension', ('Name', (1, 22, 1, 23), 'm', ('Store',)), ('Name', (1, 27, 1, 28), 'p', ('Load',)), [('Name', (1, 32, 1, 33), 'g', ('Load',))], 0)]))], []), -('Module', [('Expr', (1, 0, 1, 20), ('DictComp', (1, 0, 1, 20), ('Name', (1, 1, 1, 2), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'v', ('Store',)), ('Name', (1, 13, 1, 14), 'w', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'x', ('Load',)), [], 0)]))], []), -('Module', [('Expr', (1, 0, 1, 19), ('SetComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'r', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'l', ('Store',)), ('Name', (1, 12, 1, 13), 'x', ('Load',)), [('Name', (1, 17, 1, 18), 'g', ('Load',))], 0)]))], []), -('Module', [('Expr', (1, 0, 1, 16), ('SetComp', (1, 0, 1, 16), ('Name', (1, 1, 1, 2), 'r', ('Load',)), [('comprehension', ('Tuple', (1, 7, 1, 10), [('Name', (1, 7, 1, 8), 'l', ('Store',)), ('Name', (1, 9, 1, 10), 'm', ('Store',))], ('Store',)), ('Name', (1, 14, 1, 15), 'x', ('Load',)), [], 0)]))], []), -('Module', [('AsyncFunctionDef', (1, 0, 3, 18), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (2, 1, 2, 17), ('Constant', (2, 1, 2, 17), 'async function', None)), ('Expr', (3, 1, 3, 18), ('Await', (3, 1, 3, 18), ('Call', (3, 7, 3, 18), ('Name', (3, 7, 3, 16), 'something', ('Load',)), [], [])))], [], None, None, [])], []), -('Module', [('AsyncFunctionDef', (1, 0, 3, 8), 'f', ('arguments', [], [], None, [], [], None, []), [('AsyncFor', (2, 1, 3, 8), ('Name', (2, 11, 2, 12), 'e', ('Store',)), ('Name', (2, 16, 2, 17), 'i', ('Load',)), [('Expr', (2, 19, 2, 20), ('Constant', (2, 19, 2, 20), 1, None))], [('Expr', (3, 7, 3, 8), ('Constant', (3, 7, 3, 8), 2, None))], None)], [], None, None, [])], []), -('Module', [('AsyncFunctionDef', (1, 0, 2, 21), 'f', ('arguments', [], [], None, [], [], None, []), [('AsyncWith', (2, 1, 2, 21), [('withitem', ('Name', (2, 12, 2, 13), 'a', ('Load',)), ('Name', (2, 17, 2, 18), 'b', ('Store',)))], [('Expr', (2, 20, 2, 21), ('Constant', (2, 20, 2, 21), 1, None))], None)], [], None, None, [])], []), -('Module', [('Expr', (1, 0, 1, 14), ('Dict', (1, 0, 1, 14), [None, ('Constant', (1, 10, 1, 11), 2, None)], [('Dict', (1, 3, 1, 8), [('Constant', (1, 4, 1, 5), 1, None)], [('Constant', (1, 6, 1, 7), 2, None)]), ('Constant', (1, 12, 1, 13), 3, None)]))], []), -('Module', [('Expr', (1, 0, 1, 12), ('Set', (1, 0, 1, 12), [('Starred', (1, 1, 1, 8), ('Set', (1, 2, 1, 8), [('Constant', (1, 3, 1, 4), 1, None), ('Constant', (1, 6, 1, 7), 2, None)]), ('Load',)), ('Constant', (1, 10, 1, 11), 3, None)]))], []), -('Module', [('AsyncFunctionDef', (1, 0, 2, 21), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (2, 1, 2, 21), ('ListComp', (2, 1, 2, 21), ('Name', (2, 2, 2, 3), 'i', ('Load',)), [('comprehension', ('Name', (2, 14, 2, 15), 'b', ('Store',)), ('Name', (2, 19, 2, 20), 'c', ('Load',)), [], 1)]))], [], None, None, [])], []), -('Module', [('FunctionDef', (4, 0, 4, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (4, 9, 4, 13))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], None, None, [])], []), -('Module', [('AsyncFunctionDef', (4, 0, 4, 19), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (4, 15, 4, 19))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], None, None, [])], []), -('Module', [('ClassDef', (4, 0, 4, 13), 'C', [], [], [('Pass', (4, 9, 4, 13))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], [])], []), -('Module', [('FunctionDef', (2, 0, 2, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (2, 9, 2, 13))], [('Call', (1, 1, 1, 19), ('Name', (1, 1, 1, 5), 'deco', ('Load',)), [('GeneratorExp', (1, 5, 1, 19), ('Name', (1, 6, 1, 7), 'a', ('Load',)), [('comprehension', ('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 17, 1, 18), 'b', ('Load',)), [], 0)])], [])], None, None, [])], []), -('Module', [('FunctionDef', (2, 0, 2, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (2, 9, 2, 13))], [('Attribute', (1, 1, 1, 6), ('Attribute', (1, 1, 1, 4), ('Name', (1, 1, 1, 2), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',))], None, None, [])], []), -('Module', [('Expr', (1, 0, 1, 8), ('NamedExpr', (1, 1, 1, 7), ('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Constant', (1, 6, 1, 7), 1, None)))], []), -('Module', [('FunctionDef', (1, 0, 1, 18), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [], None, [], [], None, []), [('Pass', (1, 14, 1, 18))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 26), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None), ('arg', (1, 15, 1, 16), 'd', None, None), ('arg', (1, 18, 1, 19), 'e', None, None)], None, [], [], None, []), [('Pass', (1, 22, 1, 26))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None)], None, [('arg', (1, 18, 1, 19), 'd', None, None), ('arg', (1, 21, 1, 22), 'e', None, None)], [None, None], None, []), [('Pass', (1, 25, 1, 29))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 39), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None)], None, [('arg', (1, 18, 1, 19), 'd', None, None), ('arg', (1, 21, 1, 22), 'e', None, None)], [None, None], ('arg', (1, 26, 1, 32), 'kwargs', None, None), []), [('Pass', (1, 35, 1, 39))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 20), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [], None, [], [], None, [('Constant', (1, 8, 1, 9), 1, None)]), [('Pass', (1, 16, 1, 20))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None), ('arg', (1, 19, 1, 20), 'c', None, None)], None, [], [], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None), ('Constant', (1, 21, 1, 22), 4, None)]), [('Pass', (1, 25, 1, 29))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 32), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [('Constant', (1, 24, 1, 25), 4, None)], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 28, 1, 32))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 30), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [None], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 26, 1, 30))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 42), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [('Constant', (1, 24, 1, 25), 4, None)], ('arg', (1, 29, 1, 35), 'kwargs', None, None), [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 38, 1, 42))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 40), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [None], ('arg', (1, 27, 1, 33), 'kwargs', None, None), [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 36, 1, 40))], [], None, None, [])], []), -('Module', [('TypeAlias', (1, 0, 1, 12), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [], ('Name', (1, 9, 1, 12), 'int', ('Load',)))], []), -('Module', [('TypeAlias', (1, 0, 1, 15), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 8), 'T', None, None)], ('Name', (1, 12, 1, 15), 'int', ('Load',)))], []), -('Module', [('TypeAlias', (1, 0, 1, 32), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 8), 'T', None, None), ('TypeVarTuple', (1, 10, 1, 13), 'Ts', None), ('ParamSpec', (1, 15, 1, 18), 'P', None)], ('Tuple', (1, 22, 1, 32), [('Name', (1, 23, 1, 24), 'T', ('Load',)), ('Name', (1, 26, 1, 28), 'Ts', ('Load',)), ('Name', (1, 30, 1, 31), 'P', ('Load',))], ('Load',)))], []), -('Module', [('TypeAlias', (1, 0, 1, 37), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 13), 'T', ('Name', (1, 10, 1, 13), 'int', ('Load',)), None), ('TypeVarTuple', (1, 15, 1, 18), 'Ts', None), ('ParamSpec', (1, 20, 1, 23), 'P', None)], ('Tuple', (1, 27, 1, 37), [('Name', (1, 28, 1, 29), 'T', ('Load',)), ('Name', (1, 31, 1, 33), 'Ts', ('Load',)), ('Name', (1, 35, 1, 36), 'P', ('Load',))], ('Load',)))], []), -('Module', [('TypeAlias', (1, 0, 1, 44), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 20), 'T', ('Tuple', (1, 10, 1, 20), [('Name', (1, 11, 1, 14), 'int', ('Load',)), ('Name', (1, 16, 1, 19), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 22, 1, 25), 'Ts', None), ('ParamSpec', (1, 27, 1, 30), 'P', None)], ('Tuple', (1, 34, 1, 44), [('Name', (1, 35, 1, 36), 'T', ('Load',)), ('Name', (1, 38, 1, 40), 'Ts', ('Load',)), ('Name', (1, 42, 1, 43), 'P', ('Load',))], ('Load',)))], []), -('Module', [('TypeAlias', (1, 0, 1, 48), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 17), 'T', ('Name', (1, 10, 1, 13), 'int', ('Load',)), ('Constant', (1, 16, 1, 17), 1, None)), ('TypeVarTuple', (1, 19, 1, 26), 'Ts', ('Constant', (1, 25, 1, 26), 2, None)), ('ParamSpec', (1, 28, 1, 34), 'P', ('Constant', (1, 33, 1, 34), 3, None))], ('Tuple', (1, 38, 1, 48), [('Name', (1, 39, 1, 40), 'T', ('Load',)), ('Name', (1, 42, 1, 44), 'Ts', ('Load',)), ('Name', (1, 46, 1, 47), 'P', ('Load',))], ('Load',)))], []), -('Module', [('ClassDef', (1, 0, 1, 16), 'X', [], [], [('Pass', (1, 12, 1, 16))], [], [('TypeVar', (1, 8, 1, 9), 'T', None, None)])], []), -('Module', [('ClassDef', (1, 0, 1, 26), 'X', [], [], [('Pass', (1, 22, 1, 26))], [], [('TypeVar', (1, 8, 1, 9), 'T', None, None), ('TypeVarTuple', (1, 11, 1, 14), 'Ts', None), ('ParamSpec', (1, 16, 1, 19), 'P', None)])], []), -('Module', [('ClassDef', (1, 0, 1, 31), 'X', [], [], [('Pass', (1, 27, 1, 31))], [], [('TypeVar', (1, 8, 1, 14), 'T', ('Name', (1, 11, 1, 14), 'int', ('Load',)), None), ('TypeVarTuple', (1, 16, 1, 19), 'Ts', None), ('ParamSpec', (1, 21, 1, 24), 'P', None)])], []), -('Module', [('ClassDef', (1, 0, 1, 38), 'X', [], [], [('Pass', (1, 34, 1, 38))], [], [('TypeVar', (1, 8, 1, 21), 'T', ('Tuple', (1, 11, 1, 21), [('Name', (1, 12, 1, 15), 'int', ('Load',)), ('Name', (1, 17, 1, 20), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 23, 1, 26), 'Ts', None), ('ParamSpec', (1, 28, 1, 31), 'P', None)])], []), -('Module', [('ClassDef', (1, 0, 1, 43), 'X', [], [], [('Pass', (1, 39, 1, 43))], [], [('TypeVar', (1, 8, 1, 18), 'T', ('Name', (1, 11, 1, 14), 'int', ('Load',)), ('Constant', (1, 17, 1, 18), 1, None)), ('TypeVarTuple', (1, 20, 1, 27), 'Ts', ('Constant', (1, 26, 1, 27), 2, None)), ('ParamSpec', (1, 29, 1, 36), 'P', ('Constant', (1, 35, 1, 36), 3, None))])], []), -('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 12, 1, 16))], [], None, None, [('TypeVar', (1, 6, 1, 7), 'T', None, None)])], []), -('Module', [('FunctionDef', (1, 0, 1, 26), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 22, 1, 26))], [], None, None, [('TypeVar', (1, 6, 1, 7), 'T', None, None), ('TypeVarTuple', (1, 9, 1, 12), 'Ts', None), ('ParamSpec', (1, 14, 1, 17), 'P', None)])], []), -('Module', [('FunctionDef', (1, 0, 1, 31), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 27, 1, 31))], [], None, None, [('TypeVar', (1, 6, 1, 12), 'T', ('Name', (1, 9, 1, 12), 'int', ('Load',)), None), ('TypeVarTuple', (1, 14, 1, 17), 'Ts', None), ('ParamSpec', (1, 19, 1, 22), 'P', None)])], []), -('Module', [('FunctionDef', (1, 0, 1, 38), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 34, 1, 38))], [], None, None, [('TypeVar', (1, 6, 1, 19), 'T', ('Tuple', (1, 9, 1, 19), [('Name', (1, 10, 1, 13), 'int', ('Load',)), ('Name', (1, 15, 1, 18), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 21, 1, 24), 'Ts', None), ('ParamSpec', (1, 26, 1, 29), 'P', None)])], []), -('Module', [('FunctionDef', (1, 0, 1, 43), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 39, 1, 43))], [], None, None, [('TypeVar', (1, 6, 1, 16), 'T', ('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Constant', (1, 15, 1, 16), 1, None)), ('TypeVarTuple', (1, 18, 1, 25), 'Ts', ('Constant', (1, 24, 1, 25), 2, None)), ('ParamSpec', (1, 27, 1, 34), 'P', ('Constant', (1, 33, 1, 34), 3, None))])], []), -] -single_results = [ -('Interactive', [('Expr', (1, 0, 1, 3), ('BinOp', (1, 0, 1, 3), ('Constant', (1, 0, 1, 1), 1, None), ('Add',), ('Constant', (1, 2, 1, 3), 2, None)))]), -] -eval_results = [ -('Expression', ('Constant', (1, 0, 1, 4), None, None)), -('Expression', ('BoolOp', (1, 0, 1, 7), ('And',), [('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 6, 1, 7), 'b', ('Load',))])), -('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Add',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), -('Expression', ('UnaryOp', (1, 0, 1, 5), ('Not',), ('Name', (1, 4, 1, 5), 'v', ('Load',)))), -('Expression', ('Lambda', (1, 0, 1, 11), ('arguments', [], [], None, [], [], None, []), ('Constant', (1, 7, 1, 11), None, None))), -('Expression', ('Dict', (1, 0, 1, 7), [('Constant', (1, 2, 1, 3), 1, None)], [('Constant', (1, 4, 1, 5), 2, None)])), -('Expression', ('Dict', (1, 0, 1, 2), [], [])), -('Expression', ('Set', (1, 0, 1, 7), [('Constant', (1, 1, 1, 5), None, None)])), -('Expression', ('Dict', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None)], [('Constant', (4, 10, 4, 11), 2, None)])), -('Expression', ('ListComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])), -('Expression', ('GeneratorExp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])), -('Expression', ('ListComp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])), -('Expression', ('ListComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -('Expression', ('ListComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -('Expression', ('SetComp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])), -('Expression', ('SetComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -('Expression', ('SetComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -('Expression', ('GeneratorExp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])), -('Expression', ('GeneratorExp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -('Expression', ('GeneratorExp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -('Expression', ('Compare', (1, 0, 1, 9), ('Constant', (1, 0, 1, 1), 1, None), [('Lt',), ('Lt',)], [('Constant', (1, 4, 1, 5), 2, None), ('Constant', (1, 8, 1, 9), 3, None)])), -('Expression', ('Call', (1, 0, 1, 17), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('Constant', (1, 2, 1, 3), 1, None), ('Constant', (1, 4, 1, 5), 2, None), ('Starred', (1, 10, 1, 12), ('Name', (1, 11, 1, 12), 'd', ('Load',)), ('Load',))], [('keyword', (1, 6, 1, 9), 'c', ('Constant', (1, 8, 1, 9), 3, None)), ('keyword', (1, 13, 1, 16), None, ('Name', (1, 15, 1, 16), 'e', ('Load',)))])), -('Expression', ('Call', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('Starred', (1, 2, 1, 9), ('List', (1, 3, 1, 9), [('Constant', (1, 4, 1, 5), 0, None), ('Constant', (1, 7, 1, 8), 1, None)], ('Load',)), ('Load',))], [])), -('Expression', ('Call', (1, 0, 1, 15), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('GeneratorExp', (1, 1, 1, 15), ('Name', (1, 2, 1, 3), 'a', ('Load',)), [('comprehension', ('Name', (1, 8, 1, 9), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Load',)), [], 0)])], [])), -('Expression', ('Constant', (1, 0, 1, 2), 10, None)), -('Expression', ('Constant', (1, 0, 1, 8), 'string', None)), -('Expression', ('Attribute', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'a', ('Load',)), 'b', ('Load',))), -('Expression', ('Subscript', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Slice', (1, 2, 1, 5), ('Name', (1, 2, 1, 3), 'b', ('Load',)), ('Name', (1, 4, 1, 5), 'c', ('Load',)), None), ('Load',))), -('Expression', ('Name', (1, 0, 1, 1), 'v', ('Load',))), -('Expression', ('List', (1, 0, 1, 7), [('Constant', (1, 1, 1, 2), 1, None), ('Constant', (1, 3, 1, 4), 2, None), ('Constant', (1, 5, 1, 6), 3, None)], ('Load',))), -('Expression', ('List', (1, 0, 1, 2), [], ('Load',))), -('Expression', ('Tuple', (1, 0, 1, 5), [('Constant', (1, 0, 1, 1), 1, None), ('Constant', (1, 2, 1, 3), 2, None), ('Constant', (1, 4, 1, 5), 3, None)], ('Load',))), -('Expression', ('Tuple', (1, 0, 1, 7), [('Constant', (1, 1, 1, 2), 1, None), ('Constant', (1, 3, 1, 4), 2, None), ('Constant', (1, 5, 1, 6), 3, None)], ('Load',))), -('Expression', ('Tuple', (1, 0, 1, 2), [], ('Load',))), -('Expression', ('Call', (1, 0, 1, 17), ('Attribute', (1, 0, 1, 7), ('Attribute', (1, 0, 1, 5), ('Attribute', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',)), 'd', ('Load',)), [('Subscript', (1, 8, 1, 16), ('Attribute', (1, 8, 1, 11), ('Name', (1, 8, 1, 9), 'a', ('Load',)), 'b', ('Load',)), ('Slice', (1, 12, 1, 15), ('Constant', (1, 12, 1, 13), 1, None), ('Constant', (1, 14, 1, 15), 2, None), None), ('Load',))], [])), -] -main() + self.assert_ast(code, non_optimized_target, optimized_target) + + def test_folding_comparator(self): + code = "1 %s %s1%s" + operators = [("in", ast.In()), ("not in", ast.NotIn())] + braces = [ + ("[", "]", ast.List, (1,)), + ("{", "}", ast.Set, frozenset({1})), + ] + for left, right, non_optimized_comparator, optimized_comparator in braces: + for op, node in operators: + non_optimized_target = self.wrap_expr(ast.Compare( + left=ast.Constant(1), ops=[node], + comparators=[non_optimized_comparator(elts=[ast.Constant(1)])] + )) + optimized_target = self.wrap_expr(ast.Compare( + left=ast.Constant(1), ops=[node], + comparators=[ast.Constant(value=optimized_comparator)] + )) + self.assert_ast(code % (op, left, right), non_optimized_target, optimized_target) + + def test_folding_iter(self): + code = "for _ in %s1%s: pass" + braces = [ + ("[", "]", ast.List, (1,)), + ("{", "}", ast.Set, frozenset({1})), + ] + + for left, right, ast_cls, optimized_iter in braces: + non_optimized_target = self.wrap_statement(ast.For( + target=ast.Name(id="_", ctx=ast.Store()), + iter=ast_cls(elts=[ast.Constant(1)]), + body=[ast.Pass()] + )) + optimized_target = self.wrap_statement(ast.For( + target=ast.Name(id="_", ctx=ast.Store()), + iter=ast.Constant(value=optimized_iter), + body=[ast.Pass()] + )) + + self.assert_ast(code % (left, right), non_optimized_target, optimized_target) + + def test_folding_subscript(self): + code = "(1,)[0]" + + non_optimized_target = self.wrap_expr( + ast.Subscript(value=ast.Tuple(elts=[ast.Constant(value=1)]), slice=ast.Constant(value=0)) + ) + optimized_target = self.wrap_expr(ast.Constant(value=1)) + + self.assert_ast(code, non_optimized_target, optimized_target) + + def test_folding_type_param_in_function_def(self): + code = "def foo[%s = 1 + 1](): pass" + + unoptimized_binop = self.create_binop("+") + unoptimized_type_params = [ + ("T", "T", ast.TypeVar), + ("**P", "P", ast.ParamSpec), + ("*Ts", "Ts", ast.TypeVarTuple), + ] + + for type, name, type_param in unoptimized_type_params: + result_code = code % type + optimized_target = self.wrap_statement( + ast.FunctionDef( + name='foo', + args=ast.arguments(), + body=[ast.Pass()], + type_params=[type_param(name=name, default_value=ast.Constant(2))] + ) + ) + non_optimized_target = self.wrap_statement( + ast.FunctionDef( + name='foo', + args=ast.arguments(), + body=[ast.Pass()], + type_params=[type_param(name=name, default_value=unoptimized_binop)] + ) + ) + self.assert_ast(result_code, non_optimized_target, optimized_target) + + def test_folding_type_param_in_class_def(self): + code = "class foo[%s = 1 + 1]: pass" + + unoptimized_binop = self.create_binop("+") + unoptimized_type_params = [ + ("T", "T", ast.TypeVar), + ("**P", "P", ast.ParamSpec), + ("*Ts", "Ts", ast.TypeVarTuple), + ] + + for type, name, type_param in unoptimized_type_params: + result_code = code % type + optimized_target = self.wrap_statement( + ast.ClassDef( + name='foo', + body=[ast.Pass()], + type_params=[type_param(name=name, default_value=ast.Constant(2))] + ) + ) + non_optimized_target = self.wrap_statement( + ast.ClassDef( + name='foo', + body=[ast.Pass()], + type_params=[type_param(name=name, default_value=unoptimized_binop)] + ) + ) + self.assert_ast(result_code, non_optimized_target, optimized_target) + + def test_folding_type_param_in_type_alias(self): + code = "type foo[%s = 1 + 1] = 1" + + unoptimized_binop = self.create_binop("+") + unoptimized_type_params = [ + ("T", "T", ast.TypeVar), + ("**P", "P", ast.ParamSpec), + ("*Ts", "Ts", ast.TypeVarTuple), + ] + + for type, name, type_param in unoptimized_type_params: + result_code = code % type + optimized_target = self.wrap_statement( + ast.TypeAlias( + name=ast.Name(id='foo', ctx=ast.Store()), + type_params=[type_param(name=name, default_value=ast.Constant(2))], + value=ast.Constant(value=1), + ) + ) + non_optimized_target = self.wrap_statement( + ast.TypeAlias( + name=ast.Name(id='foo', ctx=ast.Store()), + type_params=[type_param(name=name, default_value=unoptimized_binop)], + value=ast.Constant(value=1), + ) + ) + self.assert_ast(result_code, non_optimized_target, optimized_target) + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_ast/utils.py b/Lib/test/test_ast/utils.py new file mode 100644 index 00000000000..145e89ee94e --- /dev/null +++ b/Lib/test/test_ast/utils.py @@ -0,0 +1,15 @@ +def to_tuple(t): + if t is None or isinstance(t, (str, int, complex, float, bytes)) or t is Ellipsis: + return t + elif isinstance(t, list): + return [to_tuple(e) for e in t] + result = [t.__class__.__name__] + if hasattr(t, 'lineno') and hasattr(t, 'col_offset'): + result.append((t.lineno, t.col_offset)) + if hasattr(t, 'end_lineno') and hasattr(t, 'end_col_offset'): + result[-1] += (t.end_lineno, t.end_col_offset) + if t._fields is None: + return tuple(result) + for f in t._fields: + result.append(to_tuple(getattr(t, f))) + return tuple(result) diff --git a/Lib/test/test_asyncio/test_eager_task_factory.py b/Lib/test/test_asyncio/test_eager_task_factory.py index 0f8212dbec4..0777f39b572 100644 --- a/Lib/test/test_asyncio/test_eager_task_factory.py +++ b/Lib/test/test_asyncio/test_eager_task_factory.py @@ -241,6 +241,18 @@ class DummyLoop: _, out, err = assert_python_ok("-c", code) self.assertFalse(err) + def test_issue122332(self): + async def coro(): + pass + + async def run(): + task = self.loop.create_task(coro()) + await task + self.assertIsNone(task.get_coro()) + + self.run_coro(run()) + + class AsyncTaskCounter: def __init__(self, loop, *, task_class, eager): self.suspense_count = 0 diff --git a/Lib/test/test_asyncio/test_futures.py b/Lib/test/test_asyncio/test_futures.py index 458b70451a3..2417712a9c9 100644 --- a/Lib/test/test_asyncio/test_futures.py +++ b/Lib/test/test_asyncio/test_futures.py @@ -675,6 +675,14 @@ def test_future_del_segfault(self): with self.assertRaises(AttributeError): del fut._log_traceback + def test_future_iter_get_referents_segfault(self): + # See https://github.com/python/cpython/issues/122695 + import _asyncio + it = iter(self._new_future(loop=self.loop)) + del it + evil = gc.get_referents(_asyncio) + gc.collect() + @unittest.skipUnless(hasattr(futures, '_CFuture'), 'requires the C _asyncio module') diff --git a/Lib/test/test_asyncio/test_sendfile.py b/Lib/test/test_asyncio/test_sendfile.py index d33ff197bbf..2509d4382cd 100644 --- a/Lib/test/test_asyncio/test_sendfile.py +++ b/Lib/test/test_asyncio/test_sendfile.py @@ -93,13 +93,10 @@ async def wait_closed(self): class SendfileBase: - # 256 KiB plus small unaligned to buffer chunk - # Newer versions of Windows seems to have increased its internal - # buffer and tries to send as much of the data as it can as it - # has some form of buffering for this which is less than 256KiB - # on newer server versions and Windows 11. - # So DATA should be larger than 256 KiB to make this test reliable. - DATA = b"x" * (1024 * 256 + 1) + # Linux >= 6.10 seems buffering up to 17 pages of data. + # So DATA should be large enough to make this test reliable even with a + # 64 KiB page configuration. + DATA = b"x" * (1024 * 17 * 64 + 1) # Reduce socket buffer size to test on relative small data sets. BUF_SIZE = 4 * 1024 # 4 KiB diff --git a/Lib/test/test_asyncio/test_server.py b/Lib/test/test_asyncio/test_server.py index 4ca8a166a0f..60a40cc8349 100644 --- a/Lib/test/test_asyncio/test_server.py +++ b/Lib/test/test_asyncio/test_server.py @@ -227,7 +227,7 @@ async def serve(rd, wr): (s_rd, s_wr) = await fut - # Limit the socket buffers so we can reliably overfill them + # Limit the socket buffers so we can more reliably overfill them s_sock = s_wr.get_extra_info('socket') s_sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 65536) c_sock = c_wr.get_extra_info('socket') @@ -242,10 +242,18 @@ async def serve(rd, wr): await asyncio.sleep(0) # Get the writer in a waiting state by sending data until the - # socket buffers are full on both server and client sockets and - # the kernel stops accepting more data - s_wr.write(b'a' * c_sock.getsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF)) - s_wr.write(b'a' * s_sock.getsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF)) + # kernel stops accepting more data in the send buffer. + # gh-122136: getsockopt() does not reliably report the buffer size + # available for message content. + # We loop until we start filling up the asyncio buffer. + # To avoid an infinite loop we cap at 10 times the expected value + c_bufsize = c_sock.getsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF) + s_bufsize = s_sock.getsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF) + for i in range(10): + s_wr.write(b'a' * c_bufsize) + s_wr.write(b'a' * s_bufsize) + if s_wr.transport.get_write_buffer_size() > 0: + break self.assertNotEqual(s_wr.transport.get_write_buffer_size(), 0) task = asyncio.create_task(srv.wait_closed()) diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py index cf1a1985338..c822d8045b0 100644 --- a/Lib/test/test_asyncio/test_subprocess.py +++ b/Lib/test/test_asyncio/test_subprocess.py @@ -873,6 +873,21 @@ async def main(): self.loop.run_until_complete(main()) + @unittest.skipIf(sys.platform != 'linux', "Linux only") + def test_subprocess_send_signal_race(self): + # See https://github.com/python/cpython/issues/87744 + async def main(): + for _ in range(10): + proc = await asyncio.create_subprocess_exec('sleep', '0.1') + await asyncio.sleep(0.1) + try: + proc.send_signal(signal.SIGUSR1) + except ProcessLookupError: + pass + self.assertNotEqual(await proc.wait(), 255) + + self.loop.run_until_complete(main()) + if sys.platform != 'win32': # Unix diff --git a/Lib/test/test_asyncio/test_threads.py b/Lib/test/test_asyncio/test_threads.py index 1138a93e0f7..774380270a7 100644 --- a/Lib/test/test_asyncio/test_threads.py +++ b/Lib/test/test_asyncio/test_threads.py @@ -30,7 +30,9 @@ async def test_to_thread_once(self): func.assert_called_once() async def test_to_thread_concurrent(self): - func = mock.Mock() + calls = [] + def func(): + calls.append(1) futs = [] for _ in range(10): @@ -38,7 +40,7 @@ async def test_to_thread_concurrent(self): futs.append(fut) await asyncio.gather(*futs) - self.assertEqual(func.call_count, 10) + self.assertEqual(sum(calls), 10) async def test_to_thread_args_kwargs(self): # Unlike run_in_executor(), to_thread() should directly accept kwargs. diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py index 9452213c685..0e5488da272 100644 --- a/Lib/test/test_asyncio/test_unix_events.py +++ b/Lib/test/test_asyncio/test_unix_events.py @@ -1903,6 +1903,7 @@ async def test_fork_not_share_event_loop(self): wait_process(pid, exitcode=0) @hashlib_helper.requires_hashdigest('md5') + @support.skip_if_sanitizer("TSAN doesn't support threads after fork", thread=True) def test_fork_signal_handling(self): self.addCleanup(multiprocessing_cleanup_tests) @@ -1949,6 +1950,7 @@ async def func(): self.assertTrue(child_handled.is_set()) @hashlib_helper.requires_hashdigest('md5') + @support.skip_if_sanitizer("TSAN doesn't support threads after fork", thread=True) def test_fork_asyncio_run(self): self.addCleanup(multiprocessing_cleanup_tests) @@ -1968,6 +1970,7 @@ async def child_main(): self.assertEqual(result.value, 42) @hashlib_helper.requires_hashdigest('md5') + @support.skip_if_sanitizer("TSAN doesn't support threads after fork", thread=True) def test_fork_asyncio_subprocess(self): self.addCleanup(multiprocessing_cleanup_tests) diff --git a/Lib/test/test_asyncio/utils.py b/Lib/test/test_asyncio/utils.py index 44943e1fa7b..ce2408fc1aa 100644 --- a/Lib/test/test_asyncio/utils.py +++ b/Lib/test/test_asyncio/utils.py @@ -301,12 +301,17 @@ def run_udp_echo_server(*, host='127.0.0.1', port=0): family, type, proto, _, sockaddr = addr_info[0] sock = socket.socket(family, type, proto) sock.bind((host, port)) + sockname = sock.getsockname() thread = threading.Thread(target=lambda: echo_datagrams(sock)) thread.start() try: - yield sock.getsockname() + yield sockname finally: - sock.sendto(b'STOP', sock.getsockname()) + # gh-122187: use a separate socket to send the stop message to avoid + # TSan reported race on the same socket. + sock2 = socket.socket(family, type, proto) + sock2.sendto(b'STOP', sockname) + sock2.close() thread.join() diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index 034c83a19fd..3a553192497 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -4,6 +4,7 @@ import asyncio import builtins import collections +import contextlib import decimal import fractions import gc @@ -30,6 +31,7 @@ from operator import neg from test import support from test.support import (cpython_only, swap_attr, maybe_get_event_loop_policy) +from test.support.import_helper import import_module from test.support.os_helper import (EnvironmentVarGuard, TESTFN, unlink) from test.support.script_helper import assert_python_ok from test.support.warnings_helper import check_warnings @@ -2369,7 +2371,8 @@ def child(wpipe): print(ascii(input(prompt)), file=wpipe) except BaseException as e: print(ascii(f'{e.__class__.__name__}: {e!s}'), file=wpipe) - lines = self.run_child(child, terminal_input + b"\r\n") + with self.detach_readline(): + lines = self.run_child(child, terminal_input + b"\r\n") # Check we did exercise the GNU readline path self.assertIn(lines[0], {'tty = True', 'tty = False'}) if lines[0] != 'tty = True': @@ -2382,28 +2385,36 @@ def child(wpipe): expected = terminal_input.decode(sys.stdin.encoding) # what else? self.assertEqual(input_result, expected) - def test_input_tty(self): - # Test input() functionality when wired to a tty (the code path - # is different and invokes GNU readline if available). - self.check_input_tty("prompt", b"quux") - - def skip_if_readline(self): + @contextlib.contextmanager + def detach_readline(self): # bpo-13886: When the readline module is loaded, PyOS_Readline() uses # the readline implementation. In some cases, the Python readline # callback rlhandler() is called by readline with a string without - # non-ASCII characters. Skip tests on non-ASCII characters if the - # readline module is loaded, since test_builtin is not intended to test + # non-ASCII characters. + # Unlink readline temporarily from PyOS_Readline() for those tests, + # since test_builtin is not intended to test # the readline module, but the builtins module. - if 'readline' in sys.modules: - self.skipTest("the readline module is loaded") + if "readline" in sys.modules: + c = import_module("ctypes") + fp_api = "PyOS_ReadlineFunctionPointer" + prev_value = c.c_void_p.in_dll(c.pythonapi, fp_api).value + c.c_void_p.in_dll(c.pythonapi, fp_api).value = None + try: + yield + finally: + c.c_void_p.in_dll(c.pythonapi, fp_api).value = prev_value + else: + yield + + def test_input_tty(self): + # Test input() functionality when wired to a tty + self.check_input_tty("prompt", b"quux") def test_input_tty_non_ascii(self): - self.skip_if_readline() # Check stdin/stdout encoding is used when invoking PyOS_Readline() self.check_input_tty("prompté", b"quux\xc3\xa9", "utf-8") def test_input_tty_non_ascii_unicode_errors(self): - self.skip_if_readline() # Check stdin/stdout error handler is used when invoking PyOS_Readline() self.check_input_tty("prompté", b"quux\xe9", "ascii") @@ -2413,14 +2424,12 @@ def test_input_tty_null_in_prompt(self): 'null characters') def test_input_tty_nonencodable_prompt(self): - self.skip_if_readline() self.check_input_tty("prompté", b"quux", "ascii", stdout_errors='strict', expected="UnicodeEncodeError: 'ascii' codec can't encode " "character '\\xe9' in position 6: ordinal not in " "range(128)") def test_input_tty_nondecodable_input(self): - self.skip_if_readline() self.check_input_tty("prompt", b"quux\xe9", "ascii", stdin_errors='strict', expected="UnicodeDecodeError: 'ascii' codec can't decode " "byte 0xe9 in position 4: ordinal not in " @@ -2555,6 +2564,7 @@ def test_new_type(self): self.assertEqual(A.__module__, __name__) self.assertEqual(A.__bases__, (object,)) self.assertIs(A.__base__, object) + self.assertNotIn('__firstlineno__', A.__dict__) x = A() self.assertIs(type(x), A) self.assertIs(x.__class__, A) @@ -2633,6 +2643,17 @@ def test_type_qualname(self): A.__qualname__ = b'B' self.assertEqual(A.__qualname__, 'D.E') + def test_type_firstlineno(self): + A = type('A', (), {'__firstlineno__': 42}) + self.assertEqual(A.__name__, 'A') + self.assertEqual(A.__module__, __name__) + self.assertEqual(A.__dict__['__firstlineno__'], 42) + A.__module__ = 'testmodule' + self.assertEqual(A.__module__, 'testmodule') + self.assertNotIn('__firstlineno__', A.__dict__) + A.__firstlineno__ = 43 + self.assertEqual(A.__dict__['__firstlineno__'], 43) + def test_type_typeparams(self): class A[T]: pass diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py index e4d1381be5f..7d786be1d25 100644 --- a/Lib/test/test_bz2.py +++ b/Lib/test/test_bz2.py @@ -476,7 +476,6 @@ def testReadlinesNoNewline(self): self.assertEqual(xlines, [b'Test']) def testContextProtocol(self): - f = None with BZ2File(self.filename, "wb") as f: f.write(b"xxx") f = BZ2File(self.filename, "rb") diff --git a/Lib/test/test_capi/test_abstract.py b/Lib/test/test_capi/test_abstract.py index bc39036e90b..dd55596b96d 100644 --- a/Lib/test/test_capi/test_abstract.py +++ b/Lib/test/test_capi/test_abstract.py @@ -994,13 +994,6 @@ def test_sequence_tuple(self): self.assertRaises(TypeError, xtuple, 42) self.assertRaises(SystemError, xtuple, NULL) - def test_number_check(self): - number_check = _testlimitedcapi.number_check - self.assertTrue(number_check(1 + 1j)) - self.assertTrue(number_check(1)) - self.assertTrue(number_check(0.5)) - self.assertFalse(number_check("1 + 1j")) - def test_object_generichash(self): # Test PyObject_GenericHash() generichash = _testcapi.object_generichash diff --git a/Lib/test/test_capi/test_bytes.py b/Lib/test/test_capi/test_bytes.py index f14d5545c82..d5f047bcf18 100644 --- a/Lib/test/test_capi/test_bytes.py +++ b/Lib/test/test_capi/test_bytes.py @@ -53,6 +53,8 @@ def test_fromstringandsize(self): self.assertEqual(fromstringandsize(b'abc'), b'abc') self.assertEqual(fromstringandsize(b'abc', 2), b'ab') self.assertEqual(fromstringandsize(b'abc\0def'), b'abc\0def') + self.assertEqual(fromstringandsize(b'a'), b'a') + self.assertEqual(fromstringandsize(b'a', 1), b'a') self.assertEqual(fromstringandsize(b'', 0), b'') self.assertEqual(fromstringandsize(NULL, 0), b'') self.assertEqual(len(fromstringandsize(NULL, 3)), 3) diff --git a/Lib/test/test_capi/test_getargs.py b/Lib/test/test_capi/test_getargs.py index 232aa2a8002..703d228f92e 100644 --- a/Lib/test/test_capi/test_getargs.py +++ b/Lib/test/test_capi/test_getargs.py @@ -6,6 +6,7 @@ from test.support import import_helper from test.support import script_helper from test.support import warnings_helper +from test.support.testcase import FloatsAreIdenticalMixin # Skip this test if the _testcapi module isn't available. _testcapi = import_helper.import_module('_testcapi') from _testcapi import getargs_keywords, getargs_keyword_only @@ -436,11 +437,7 @@ def test_K(self): self.assertEqual(VERY_LARGE & ULLONG_MAX, getargs_K(VERY_LARGE)) -class Float_TestCase(unittest.TestCase): - def assertEqualWithSign(self, actual, expected): - self.assertEqual(actual, expected) - self.assertEqual(math.copysign(1, actual), math.copysign(1, expected)) - +class Float_TestCase(unittest.TestCase, FloatsAreIdenticalMixin): def test_f(self): from _testcapi import getargs_f self.assertEqual(getargs_f(4.25), 4.25) @@ -462,10 +459,10 @@ def test_f(self): self.assertEqual(getargs_f(DBL_MAX), INF) self.assertEqual(getargs_f(-DBL_MAX), -INF) if FLT_MIN > DBL_MIN: - self.assertEqualWithSign(getargs_f(DBL_MIN), 0.0) - self.assertEqualWithSign(getargs_f(-DBL_MIN), -0.0) - self.assertEqualWithSign(getargs_f(0.0), 0.0) - self.assertEqualWithSign(getargs_f(-0.0), -0.0) + self.assertFloatsAreIdentical(getargs_f(DBL_MIN), 0.0) + self.assertFloatsAreIdentical(getargs_f(-DBL_MIN), -0.0) + self.assertFloatsAreIdentical(getargs_f(0.0), 0.0) + self.assertFloatsAreIdentical(getargs_f(-0.0), -0.0) r = getargs_f(NAN) self.assertNotEqual(r, r) @@ -494,8 +491,8 @@ def test_d(self): self.assertEqual(getargs_d(x), x) self.assertRaises(OverflowError, getargs_d, 1< C integer -> object) + values = (0, 1, 1234, max_val) + if min_val < 0: + values += (-1, min_val) + for value in values: + with self.subTest(value=value): + self.assertEqual(func(value), value) + self.assertEqual(func(IntSubclass(value)), value) + if use_index: + self.assertEqual(func(Index(value)), value) + + if use_index: + self.assertEqual(func(MyIndexAndInt()), 10) + else: + self.assertRaises(TypeError, func, Index(42)) + self.assertRaises(TypeError, func, MyIndexAndInt()) + + if mask: + self.assertEqual(func(min_val - 1), max_val) + self.assertEqual(func(max_val + 1), min_val) + self.assertEqual(func(-1 << 1000), 0) + self.assertEqual(func(1 << 1000), 0) + else: + self.assertRaises(negative_value_error, func, min_val - 1) + self.assertRaises(negative_value_error, func, -1 << 1000) + self.assertRaises(OverflowError, func, max_val + 1) + self.assertRaises(OverflowError, func, 1 << 1000) + self.assertRaises(TypeError, func, 1.0) + self.assertRaises(TypeError, func, b'2') + self.assertRaises(TypeError, func, '3') + self.assertRaises(SystemError, func, NULL) + + def check_long_asintandoverflow(self, func, min_val, max_val): + # round trip (object -> C integer -> object) + for value in (min_val, max_val, -1, 0, 1, 1234): + with self.subTest(value=value): + self.assertEqual(func(value), (value, 0)) + self.assertEqual(func(IntSubclass(value)), (value, 0)) + self.assertEqual(func(Index(value)), (value, 0)) + + self.assertEqual(func(MyIndexAndInt()), (10, 0)) + + self.assertEqual(func(min_val - 1), (-1, -1)) + self.assertEqual(func(max_val + 1), (-1, +1)) + + # CRASHES func(1.0) + # CRASHES func(NULL) + def test_long_asint(self): # Test PyLong_AsInt() PyLong_AsInt = _testlimitedcapi.PyLong_AsInt from _testcapi import INT_MIN, INT_MAX - - # round trip (object -> int -> object) - for value in (INT_MIN, INT_MAX, -1, 0, 1, 123): - with self.subTest(value=value): - self.assertEqual(PyLong_AsInt(value), value) - self.assertEqual(PyLong_AsInt(IntSubclass(42)), 42) - self.assertEqual(PyLong_AsInt(Index(42)), 42) - self.assertEqual(PyLong_AsInt(MyIndexAndInt()), 10) - - # bound checking - self.assertRaises(OverflowError, PyLong_AsInt, INT_MIN - 1) - self.assertRaises(OverflowError, PyLong_AsInt, INT_MAX + 1) - - # invalid type - self.assertRaises(TypeError, PyLong_AsInt, 1.0) - self.assertRaises(TypeError, PyLong_AsInt, b'2') - self.assertRaises(TypeError, PyLong_AsInt, '3') - self.assertRaises(SystemError, PyLong_AsInt, NULL) + self.check_long_asint(PyLong_AsInt, INT_MIN, INT_MAX) def test_long_aslong(self): # Test PyLong_AsLong() and PyLong_FromLong() aslong = _testlimitedcapi.pylong_aslong from _testcapi import LONG_MIN, LONG_MAX - # round trip (object -> long -> object) - for value in (LONG_MIN, LONG_MAX, -1, 0, 1, 1234): - with self.subTest(value=value): - self.assertEqual(aslong(value), value) - - self.assertEqual(aslong(IntSubclass(42)), 42) - self.assertEqual(aslong(Index(42)), 42) - self.assertEqual(aslong(MyIndexAndInt()), 10) - - self.assertRaises(OverflowError, aslong, LONG_MIN - 1) - self.assertRaises(OverflowError, aslong, LONG_MAX + 1) - self.assertRaises(TypeError, aslong, 1.0) - self.assertRaises(TypeError, aslong, b'2') - self.assertRaises(TypeError, aslong, '3') - self.assertRaises(SystemError, aslong, NULL) + self.check_long_asint(aslong, LONG_MIN, LONG_MAX) def test_long_aslongandoverflow(self): # Test PyLong_AsLongAndOverflow() aslongandoverflow = _testlimitedcapi.pylong_aslongandoverflow from _testcapi import LONG_MIN, LONG_MAX - # round trip (object -> long -> object) - for value in (LONG_MIN, LONG_MAX, -1, 0, 1, 1234): - with self.subTest(value=value): - self.assertEqual(aslongandoverflow(value), (value, 0)) - - self.assertEqual(aslongandoverflow(IntSubclass(42)), (42, 0)) - self.assertEqual(aslongandoverflow(Index(42)), (42, 0)) - self.assertEqual(aslongandoverflow(MyIndexAndInt()), (10, 0)) - - self.assertEqual(aslongandoverflow(LONG_MIN - 1), (-1, -1)) - self.assertEqual(aslongandoverflow(LONG_MAX + 1), (-1, 1)) - # CRASHES aslongandoverflow(1.0) - # CRASHES aslongandoverflow(NULL) + self.check_long_asintandoverflow(aslongandoverflow, LONG_MIN, LONG_MAX) def test_long_asunsignedlong(self): # Test PyLong_AsUnsignedLong() and PyLong_FromUnsignedLong() asunsignedlong = _testlimitedcapi.pylong_asunsignedlong from _testcapi import ULONG_MAX - # round trip (object -> unsigned long -> object) - for value in (ULONG_MAX, 0, 1, 1234): - with self.subTest(value=value): - self.assertEqual(asunsignedlong(value), value) - - self.assertEqual(asunsignedlong(IntSubclass(42)), 42) - self.assertRaises(TypeError, asunsignedlong, Index(42)) - self.assertRaises(TypeError, asunsignedlong, MyIndexAndInt()) - - self.assertRaises(OverflowError, asunsignedlong, -1) - self.assertRaises(OverflowError, asunsignedlong, ULONG_MAX + 1) - self.assertRaises(TypeError, asunsignedlong, 1.0) - self.assertRaises(TypeError, asunsignedlong, b'2') - self.assertRaises(TypeError, asunsignedlong, '3') - self.assertRaises(SystemError, asunsignedlong, NULL) + self.check_long_asint(asunsignedlong, 0, ULONG_MAX, + use_index=False) def test_long_asunsignedlongmask(self): # Test PyLong_AsUnsignedLongMask() asunsignedlongmask = _testlimitedcapi.pylong_asunsignedlongmask from _testcapi import ULONG_MAX - # round trip (object -> unsigned long -> object) - for value in (ULONG_MAX, 0, 1, 1234): - with self.subTest(value=value): - self.assertEqual(asunsignedlongmask(value), value) - - self.assertEqual(asunsignedlongmask(IntSubclass(42)), 42) - self.assertEqual(asunsignedlongmask(Index(42)), 42) - self.assertEqual(asunsignedlongmask(MyIndexAndInt()), 10) - - self.assertEqual(asunsignedlongmask(-1), ULONG_MAX) - self.assertEqual(asunsignedlongmask(ULONG_MAX + 1), 0) - self.assertRaises(TypeError, asunsignedlongmask, 1.0) - self.assertRaises(TypeError, asunsignedlongmask, b'2') - self.assertRaises(TypeError, asunsignedlongmask, '3') - self.assertRaises(SystemError, asunsignedlongmask, NULL) + self.check_long_asint(asunsignedlongmask, 0, ULONG_MAX, mask=True) def test_long_aslonglong(self): # Test PyLong_AsLongLong() and PyLong_FromLongLong() aslonglong = _testlimitedcapi.pylong_aslonglong from _testcapi import LLONG_MIN, LLONG_MAX - # round trip (object -> long long -> object) - for value in (LLONG_MIN, LLONG_MAX, -1, 0, 1, 1234): - with self.subTest(value=value): - self.assertEqual(aslonglong(value), value) - - self.assertEqual(aslonglong(IntSubclass(42)), 42) - self.assertEqual(aslonglong(Index(42)), 42) - self.assertEqual(aslonglong(MyIndexAndInt()), 10) - - self.assertRaises(OverflowError, aslonglong, LLONG_MIN - 1) - self.assertRaises(OverflowError, aslonglong, LLONG_MAX + 1) - self.assertRaises(TypeError, aslonglong, 1.0) - self.assertRaises(TypeError, aslonglong, b'2') - self.assertRaises(TypeError, aslonglong, '3') - self.assertRaises(SystemError, aslonglong, NULL) + self.check_long_asint(aslonglong, LLONG_MIN, LLONG_MAX) def test_long_aslonglongandoverflow(self): # Test PyLong_AsLongLongAndOverflow() aslonglongandoverflow = _testlimitedcapi.pylong_aslonglongandoverflow from _testcapi import LLONG_MIN, LLONG_MAX - # round trip (object -> long long -> object) - for value in (LLONG_MIN, LLONG_MAX, -1, 0, 1, 1234): - with self.subTest(value=value): - self.assertEqual(aslonglongandoverflow(value), (value, 0)) - - self.assertEqual(aslonglongandoverflow(IntSubclass(42)), (42, 0)) - self.assertEqual(aslonglongandoverflow(Index(42)), (42, 0)) - self.assertEqual(aslonglongandoverflow(MyIndexAndInt()), (10, 0)) - - self.assertEqual(aslonglongandoverflow(LLONG_MIN - 1), (-1, -1)) - self.assertEqual(aslonglongandoverflow(LLONG_MAX + 1), (-1, 1)) - # CRASHES aslonglongandoverflow(1.0) - # CRASHES aslonglongandoverflow(NULL) + self.check_long_asintandoverflow(aslonglongandoverflow, LLONG_MIN, LLONG_MAX) def test_long_asunsignedlonglong(self): # Test PyLong_AsUnsignedLongLong() and PyLong_FromUnsignedLongLong() asunsignedlonglong = _testlimitedcapi.pylong_asunsignedlonglong from _testcapi import ULLONG_MAX - # round trip (object -> unsigned long long -> object) - for value in (ULLONG_MAX, 0, 1, 1234): - with self.subTest(value=value): - self.assertEqual(asunsignedlonglong(value), value) - - self.assertEqual(asunsignedlonglong(IntSubclass(42)), 42) - self.assertRaises(TypeError, asunsignedlonglong, Index(42)) - self.assertRaises(TypeError, asunsignedlonglong, MyIndexAndInt()) - - self.assertRaises(OverflowError, asunsignedlonglong, -1) - self.assertRaises(OverflowError, asunsignedlonglong, ULLONG_MAX + 1) - self.assertRaises(TypeError, asunsignedlonglong, 1.0) - self.assertRaises(TypeError, asunsignedlonglong, b'2') - self.assertRaises(TypeError, asunsignedlonglong, '3') - self.assertRaises(SystemError, asunsignedlonglong, NULL) + self.check_long_asint(asunsignedlonglong, 0, ULLONG_MAX, use_index=False) def test_long_asunsignedlonglongmask(self): # Test PyLong_AsUnsignedLongLongMask() asunsignedlonglongmask = _testlimitedcapi.pylong_asunsignedlonglongmask from _testcapi import ULLONG_MAX - # round trip (object -> unsigned long long -> object) - for value in (ULLONG_MAX, 0, 1, 1234): - with self.subTest(value=value): - self.assertEqual(asunsignedlonglongmask(value), value) - - self.assertEqual(asunsignedlonglongmask(IntSubclass(42)), 42) - self.assertEqual(asunsignedlonglongmask(Index(42)), 42) - self.assertEqual(asunsignedlonglongmask(MyIndexAndInt()), 10) - - self.assertEqual(asunsignedlonglongmask(-1), ULLONG_MAX) - self.assertEqual(asunsignedlonglongmask(ULLONG_MAX + 1), 0) - self.assertRaises(TypeError, asunsignedlonglongmask, 1.0) - self.assertRaises(TypeError, asunsignedlonglongmask, b'2') - self.assertRaises(TypeError, asunsignedlonglongmask, '3') - self.assertRaises(SystemError, asunsignedlonglongmask, NULL) + self.check_long_asint(asunsignedlonglongmask, 0, ULLONG_MAX, mask=True) def test_long_as_ssize_t(self): # Test PyLong_AsSsize_t() and PyLong_FromSsize_t() as_ssize_t = _testlimitedcapi.pylong_as_ssize_t from _testcapi import PY_SSIZE_T_MIN, PY_SSIZE_T_MAX - # round trip (object -> Py_ssize_t -> object) - for value in (PY_SSIZE_T_MIN, PY_SSIZE_T_MAX, -1, 0, 1, 1234): - with self.subTest(value=value): - self.assertEqual(as_ssize_t(value), value) - - self.assertEqual(as_ssize_t(IntSubclass(42)), 42) - self.assertRaises(TypeError, as_ssize_t, Index(42)) - self.assertRaises(TypeError, as_ssize_t, MyIndexAndInt()) - - self.assertRaises(OverflowError, as_ssize_t, PY_SSIZE_T_MIN - 1) - self.assertRaises(OverflowError, as_ssize_t, PY_SSIZE_T_MAX + 1) - self.assertRaises(TypeError, as_ssize_t, 1.0) - self.assertRaises(TypeError, as_ssize_t, b'2') - self.assertRaises(TypeError, as_ssize_t, '3') - self.assertRaises(SystemError, as_ssize_t, NULL) + self.check_long_asint(as_ssize_t, PY_SSIZE_T_MIN, PY_SSIZE_T_MAX, + use_index=False) def test_long_as_size_t(self): # Test PyLong_AsSize_t() and PyLong_FromSize_t() as_size_t = _testlimitedcapi.pylong_as_size_t from _testcapi import SIZE_MAX - # round trip (object -> size_t -> object) - for value in (SIZE_MAX, 0, 1, 1234): - with self.subTest(value=value): - self.assertEqual(as_size_t(value), value) - - self.assertEqual(as_size_t(IntSubclass(42)), 42) - self.assertRaises(TypeError, as_size_t, Index(42)) - self.assertRaises(TypeError, as_size_t, MyIndexAndInt()) - - self.assertRaises(OverflowError, as_size_t, -1) - self.assertRaises(OverflowError, as_size_t, SIZE_MAX + 1) - self.assertRaises(TypeError, as_size_t, 1.0) - self.assertRaises(TypeError, as_size_t, b'2') - self.assertRaises(TypeError, as_size_t, '3') - self.assertRaises(SystemError, as_size_t, NULL) + self.check_long_asint(as_size_t, 0, SIZE_MAX, use_index=False) def test_long_asdouble(self): # Test PyLong_AsDouble() @@ -431,21 +332,7 @@ def _test_long_aspid(self, aspid): bits = 8 * SIZEOF_PID_T PID_T_MIN = -2**(bits-1) PID_T_MAX = 2**(bits-1) - 1 - # round trip (object -> long -> object) - for value in (PID_T_MIN, PID_T_MAX, -1, 0, 1, 1234): - with self.subTest(value=value): - self.assertEqual(aspid(value), value) - - self.assertEqual(aspid(IntSubclass(42)), 42) - self.assertEqual(aspid(Index(42)), 42) - self.assertEqual(aspid(MyIndexAndInt()), 10) - - self.assertRaises(OverflowError, aspid, PID_T_MIN - 1) - self.assertRaises(OverflowError, aspid, PID_T_MAX + 1) - self.assertRaises(TypeError, aspid, 1.0) - self.assertRaises(TypeError, aspid, b'2') - self.assertRaises(TypeError, aspid, '3') - self.assertRaises(SystemError, aspid, NULL) + self.check_long_asint(aspid, PID_T_MIN, PID_T_MAX) def test_long_aspid(self): self._test_long_aspid(_testcapi.pylong_aspid) @@ -496,8 +383,9 @@ def test_long_asnativebytes(self): "PyLong_AsNativeBytes(v, , 0, -1)") self.assertEqual(buffer, b"\x5a", "buffer overwritten when it should not have been") - # Also check via the __index__ path - self.assertEqual(expect, asnativebytes(Index(v), buffer, 0, -1), + # Also check via the __index__ path. + # We pass Py_ASNATIVEBYTES_NATIVE_ENDIAN | ALLOW_INDEX + self.assertEqual(expect, asnativebytes(Index(v), buffer, 0, 3 | 16), "PyLong_AsNativeBytes(Index(v), , 0, -1)") self.assertEqual(buffer, b"\x5a", "buffer overwritten when it should not have been") @@ -607,6 +495,12 @@ def test_long_asnativebytes(self): with self.assertRaises(ValueError): asnativebytes(-1, buffer, 0, 8) + # Ensure omitting Py_ASNATIVEBYTES_ALLOW_INDEX raises on __index__ value + with self.assertRaises(TypeError): + asnativebytes(Index(1), buffer, 0, -1) + with self.assertRaises(TypeError): + asnativebytes(Index(1), buffer, 0, 3) + # Check a few error conditions. These are validated in code, but are # unspecified in docs, so if we make changes to the implementation, it's # fine to just update these tests rather than preserve the behaviour. diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py index f3d16e4a2fc..080b3e65332 100644 --- a/Lib/test/test_capi/test_misc.py +++ b/Lib/test/test_capi/test_misc.py @@ -1180,6 +1180,19 @@ def genf(): yield gen = genf() self.assertEqual(_testcapi.gen_get_code(gen), gen.gi_code) + def test_pyeval_getlocals(self): + # Test PyEval_GetLocals() + x = 1 + self.assertEqual(_testcapi.pyeval_getlocals(), + {'self': self, + 'x': 1}) + + y = 2 + self.assertEqual(_testcapi.pyeval_getlocals(), + {'self': self, + 'x': 1, + 'y': 2}) + @requires_limited_api class TestHeapTypeRelative(unittest.TestCase): diff --git a/Lib/test/test_capi/test_number.py b/Lib/test/test_capi/test_number.py new file mode 100644 index 00000000000..3c1f0f248c3 --- /dev/null +++ b/Lib/test/test_capi/test_number.py @@ -0,0 +1,335 @@ +import itertools +import operator +import sys +import unittest +import warnings + +from test.support import cpython_only, import_helper + +_testcapi = import_helper.import_module('_testcapi') +from _testcapi import PY_SSIZE_T_MAX, PY_SSIZE_T_MIN + +try: + from _testbuffer import ndarray +except ImportError: + ndarray = None + +NULL = None + +class BadDescr: + def __get__(self, obj, objtype=None): + raise RuntimeError + +class WithDunder: + def _meth(self, *args): + if self.val: + return self.val + if self.exc: + raise self.exc + @classmethod + def with_val(cls, val): + obj = super().__new__(cls) + obj.val = val + obj.exc = None + setattr(cls, cls.methname, cls._meth) + return obj + + @classmethod + def with_exc(cls, exc): + obj = super().__new__(cls) + obj.val = None + obj.exc = exc + setattr(cls, cls.methname, cls._meth) + return obj + +class HasBadAttr: + def __new__(cls): + obj = super().__new__(cls) + setattr(cls, cls.methname, BadDescr()) + return obj + + +class IndexLike(WithDunder): + methname = '__index__' + +class IntLike(WithDunder): + methname = '__int__' + +class FloatLike(WithDunder): + methname = '__float__' + + +def subclassof(base): + return type(base.__name__ + 'Subclass', (base,), {}) + + +class SomeError(Exception): + pass + +class OtherError(Exception): + pass + + +class CAPITest(unittest.TestCase): + def test_check(self): + # Test PyNumber_Check() + check = _testcapi.number_check + + self.assertTrue(check(1)) + self.assertTrue(check(IndexLike.with_val(1))) + self.assertTrue(check(IntLike.with_val(99))) + self.assertTrue(check(0.5)) + self.assertTrue(check(FloatLike.with_val(4.25))) + self.assertTrue(check(1+2j)) + + self.assertFalse(check([])) + self.assertFalse(check("abc")) + self.assertFalse(check(object())) + self.assertFalse(check(NULL)) + + def test_unary_ops(self): + methmap = {'__neg__': _testcapi.number_negative, # PyNumber_Negative() + '__pos__': _testcapi.number_positive, # PyNumber_Positive() + '__abs__': _testcapi.number_absolute, # PyNumber_Absolute() + '__invert__': _testcapi.number_invert} # PyNumber_Invert() + + for name, func in methmap.items(): + # Generic object, has no tp_as_number structure + self.assertRaises(TypeError, func, object()) + + # C-API function accepts NULL + self.assertRaises(SystemError, func, NULL) + + # Behave as corresponding unary operation + op = getattr(operator, name) + for x in [0, 42, -1, 3.14, 1+2j]: + try: + op(x) + except TypeError: + self.assertRaises(TypeError, func, x) + else: + self.assertEqual(func(x), op(x)) + + def test_binary_ops(self): + methmap = {'__add__': _testcapi.number_add, # PyNumber_Add() + '__sub__': _testcapi.number_subtract, # PyNumber_Subtract() + '__mul__': _testcapi.number_multiply, # PyNumber_Multiply() + '__matmul__': _testcapi.number_matrixmultiply, # PyNumber_MatrixMultiply() + '__floordiv__': _testcapi.number_floordivide, # PyNumber_FloorDivide() + '__truediv__': _testcapi.number_truedivide, # PyNumber_TrueDivide() + '__mod__': _testcapi.number_remainder, # PyNumber_Remainder() + '__divmod__': _testcapi.number_divmod, # PyNumber_Divmod() + '__lshift__': _testcapi.number_lshift, # PyNumber_Lshift() + '__rshift__': _testcapi.number_rshift, # PyNumber_Rshift() + '__and__': _testcapi.number_and, # PyNumber_And() + '__xor__': _testcapi.number_xor, # PyNumber_Xor() + '__or__': _testcapi.number_or, # PyNumber_Or() + '__pow__': _testcapi.number_power, # PyNumber_Power() + '__iadd__': _testcapi.number_inplaceadd, # PyNumber_InPlaceAdd() + '__isub__': _testcapi.number_inplacesubtract, # PyNumber_InPlaceSubtract() + '__imul__': _testcapi.number_inplacemultiply, # PyNumber_InPlaceMultiply() + '__imatmul__': _testcapi.number_inplacematrixmultiply, # PyNumber_InPlaceMatrixMultiply() + '__ifloordiv__': _testcapi.number_inplacefloordivide, # PyNumber_InPlaceFloorDivide() + '__itruediv__': _testcapi.number_inplacetruedivide, # PyNumber_InPlaceTrueDivide() + '__imod__': _testcapi.number_inplaceremainder, # PyNumber_InPlaceRemainder() + '__ilshift__': _testcapi.number_inplacelshift, # PyNumber_InPlaceLshift() + '__irshift__': _testcapi.number_inplacershift, # PyNumber_InPlaceRshift() + '__iand__': _testcapi.number_inplaceand, # PyNumber_InPlaceAnd() + '__ixor__': _testcapi.number_inplacexor, # PyNumber_InPlaceXor() + '__ior__': _testcapi.number_inplaceor, # PyNumber_InPlaceOr() + '__ipow__': _testcapi.number_inplacepower, # PyNumber_InPlacePower() + } + + for name, func in methmap.items(): + cases = [0, 42, 3.14, -1, 123, 1+2j] + + # Generic object, has no tp_as_number structure + for x in cases: + self.assertRaises(TypeError, func, object(), x) + self.assertRaises(TypeError, func, x, object()) + + # Behave as corresponding binary operation + op = getattr(operator, name, divmod) + for x, y in itertools.combinations(cases, 2): + try: + op(x, y) + except (TypeError, ValueError, ZeroDivisionError) as exc: + self.assertRaises(exc.__class__, func, x, y) + else: + self.assertEqual(func(x, y), op(x, y)) + + # CRASHES func(NULL, object()) + # CRASHES func(object(), NULL) + + @unittest.skipIf(ndarray is None, "needs _testbuffer") + def test_misc_add(self): + # PyNumber_Add(), PyNumber_InPlaceAdd() + add = _testcapi.number_add + inplaceadd = _testcapi.number_inplaceadd + + # test sq_concat/sq_inplace_concat slots + a, b, r = [1, 2], [3, 4], [1, 2, 3, 4] + self.assertEqual(add(a, b), r) + self.assertEqual(a, [1, 2]) + self.assertRaises(TypeError, add, ndarray([1], (1,)), 2) + a, b, r = [1, 2], [3, 4], [1, 2, 3, 4] + self.assertEqual(inplaceadd(a, b), r) + self.assertEqual(a, r) + self.assertRaises(TypeError, inplaceadd, ndarray([1], (1,)), 2) + + @unittest.skipIf(ndarray is None, "needs _testbuffer") + def test_misc_multiply(self): + # PyNumber_Multiply(), PyNumber_InPlaceMultiply() + multiply = _testcapi.number_multiply + inplacemultiply = _testcapi.number_inplacemultiply + + # test sq_repeat/sq_inplace_repeat slots + a, b, r = [1], 2, [1, 1] + self.assertEqual(multiply(a, b), r) + self.assertEqual((a, b), ([1], 2)) + self.assertEqual(multiply(b, a), r) + self.assertEqual((a, b), ([1], 2)) + self.assertEqual(multiply([1], -1), []) + self.assertRaises(TypeError, multiply, ndarray([1], (1,)), 2) + self.assertRaises(TypeError, multiply, [1], 0.5) + self.assertRaises(OverflowError, multiply, [1], PY_SSIZE_T_MAX + 1) + self.assertRaises(MemoryError, multiply, [1, 2], PY_SSIZE_T_MAX//2 + 1) + a, b, r = [1], 2, [1, 1] + self.assertEqual(inplacemultiply(a, b), r) + self.assertEqual((a, b), (r, 2)) + a = [1] + self.assertEqual(inplacemultiply(b, a), r) + self.assertEqual((a, b), ([1], 2)) + self.assertRaises(TypeError, inplacemultiply, ndarray([1], (1,)), 2) + self.assertRaises(OverflowError, inplacemultiply, [1], PY_SSIZE_T_MAX + 1) + self.assertRaises(MemoryError, inplacemultiply, [1, 2], PY_SSIZE_T_MAX//2 + 1) + + def test_misc_power(self): + # PyNumber_Power() + power = _testcapi.number_power + + class HasPow(WithDunder): + methname = '__pow__' + + # ternary op + self.assertEqual(power(4, 11, 5), pow(4, 11, 5)) + self.assertRaises(TypeError, power, 4, 11, 1.25) + self.assertRaises(TypeError, power, 4, 11, HasPow.with_val(NotImplemented)) + self.assertRaises(TypeError, power, 4, 11, object()) + + @cpython_only + def test_rshift_print(self): + # This tests correct syntax hint for py2 redirection (>>). + rshift = _testcapi.number_rshift + + with self.assertRaises(TypeError) as context: + rshift(print, 42) + self.assertIn('Did you mean "print(, ' + 'file=)"?', str(context.exception)) + with self.assertRaises(TypeError) as context: + rshift(max, sys.stderr) + self.assertNotIn('Did you mean ', str(context.exception)) + with self.assertRaises(TypeError) as context: + rshift(1, "spam") + + def test_long(self): + # Test PyNumber_Long() + long = _testcapi.number_long + + self.assertEqual(long(42), 42) + self.assertEqual(long(1.25), 1) + self.assertEqual(long("42"), 42) + self.assertEqual(long(b"42"), 42) + self.assertEqual(long(bytearray(b"42")), 42) + self.assertEqual(long(memoryview(b"42")), 42) + self.assertEqual(long(IndexLike.with_val(99)), 99) + self.assertEqual(long(IntLike.with_val(99)), 99) + + self.assertRaises(TypeError, long, IntLike.with_val(1.0)) + with warnings.catch_warnings(): + warnings.simplefilter("error", DeprecationWarning) + self.assertRaises(DeprecationWarning, long, IntLike.with_val(True)) + with self.assertWarns(DeprecationWarning): + self.assertEqual(long(IntLike.with_val(True)), 1) + self.assertRaises(RuntimeError, long, IntLike.with_exc(RuntimeError)) + + self.assertRaises(TypeError, long, 1j) + self.assertRaises(TypeError, long, object()) + self.assertRaises(SystemError, long, NULL) + + def test_float(self): + # Test PyNumber_Float() + float_ = _testcapi.number_float + + self.assertEqual(float_(1.25), 1.25) + self.assertEqual(float_(123), 123.) + self.assertEqual(float_("1.25"), 1.25) + + self.assertEqual(float_(FloatLike.with_val(4.25)), 4.25) + self.assertEqual(float_(IndexLike.with_val(99)), 99.0) + self.assertEqual(float_(IndexLike.with_val(-1)), -1.0) + + self.assertRaises(TypeError, float_, FloatLike.with_val(687)) + with warnings.catch_warnings(): + warnings.simplefilter("error", DeprecationWarning) + self.assertRaises(DeprecationWarning, float_, FloatLike.with_val(subclassof(float)(4.25))) + with self.assertWarns(DeprecationWarning): + self.assertEqual(float_(FloatLike.with_val(subclassof(float)(4.25))), 4.25) + self.assertRaises(RuntimeError, float_, FloatLike.with_exc(RuntimeError)) + + self.assertRaises(TypeError, float_, IndexLike.with_val(1.25)) + self.assertRaises(OverflowError, float_, IndexLike.with_val(2**2000)) + + self.assertRaises(TypeError, float_, 1j) + self.assertRaises(TypeError, float_, object()) + self.assertRaises(SystemError, float_, NULL) + + def test_index(self): + # Test PyNumber_Index() + index = _testcapi.number_index + + self.assertEqual(index(11), 11) + + with warnings.catch_warnings(): + warnings.simplefilter("error", DeprecationWarning) + self.assertRaises(DeprecationWarning, index, IndexLike.with_val(True)) + with self.assertWarns(DeprecationWarning): + self.assertEqual(index(IndexLike.with_val(True)), 1) + self.assertRaises(TypeError, index, IndexLike.with_val(1.0)) + self.assertRaises(RuntimeError, index, IndexLike.with_exc(RuntimeError)) + + self.assertRaises(TypeError, index, 1.25) + self.assertRaises(TypeError, index, "42") + self.assertRaises(TypeError, index, object()) + self.assertRaises(SystemError, index, NULL) + + def test_tobase(self): + # Test PyNumber_ToBase() + tobase = _testcapi.number_tobase + + self.assertEqual(tobase(10, 2), bin(10)) + self.assertEqual(tobase(11, 8), oct(11)) + self.assertEqual(tobase(16, 10), str(16)) + self.assertEqual(tobase(13, 16), hex(13)) + + self.assertRaises(SystemError, tobase, NULL, 2) + self.assertRaises(SystemError, tobase, 2, 3) + self.assertRaises(TypeError, tobase, 1.25, 2) + self.assertRaises(TypeError, tobase, "42", 2) + + def test_asssizet(self): + # Test PyNumber_AsSsize_t() + asssizet = _testcapi.number_asssizet + + for n in [*range(-6, 7), PY_SSIZE_T_MIN, PY_SSIZE_T_MAX]: + self.assertEqual(asssizet(n, OverflowError), n) + self.assertEqual(asssizet(PY_SSIZE_T_MAX+10, NULL), PY_SSIZE_T_MAX) + self.assertEqual(asssizet(PY_SSIZE_T_MIN-10, NULL), PY_SSIZE_T_MIN) + + self.assertRaises(OverflowError, asssizet, PY_SSIZE_T_MAX + 10, OverflowError) + self.assertRaises(RuntimeError, asssizet, PY_SSIZE_T_MAX + 10, RuntimeError) + self.assertRaises(SystemError, asssizet, NULL, TypeError) + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_set.py b/Lib/test/test_capi/test_set.py index 499a5148d78..62d90a3f943 100644 --- a/Lib/test/test_capi/test_set.py +++ b/Lib/test/test_capi/test_set.py @@ -265,3 +265,7 @@ def test_set_next_entry(self): with self.assertRaises(SystemError): set_next(object(), 0) # CRASHES: set_next(NULL, 0) + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_time.py b/Lib/test/test_capi/test_time.py index 17ebd7c1962..989c158818c 100644 --- a/Lib/test/test_capi/test_time.py +++ b/Lib/test/test_capi/test_time.py @@ -72,3 +72,7 @@ def test_time(self): # Test PyTime_Time() and PyTime_TimeRaw() self.check_clock(_testcapi.PyTime_Time, time.time) self.check_clock(_testcapi.PyTime_TimeRaw, time.time) + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_tuple.py b/Lib/test/test_capi/test_tuple.py new file mode 100644 index 00000000000..e6b49caeb51 --- /dev/null +++ b/Lib/test/test_capi/test_tuple.py @@ -0,0 +1,261 @@ +import unittest +import sys +from collections import namedtuple +from test.support import import_helper + +_testcapi = import_helper.import_module('_testcapi') +_testlimitedcapi = import_helper.import_module('_testlimitedcapi') + +NULL = None +PY_SSIZE_T_MIN = _testcapi.PY_SSIZE_T_MIN +PY_SSIZE_T_MAX = _testcapi.PY_SSIZE_T_MAX + +class TupleSubclass(tuple): + pass + + +class CAPITest(unittest.TestCase): + def test_check(self): + # Test PyTuple_Check() + check = _testlimitedcapi.tuple_check + + self.assertTrue(check((1, 2))) + self.assertTrue(check(())) + self.assertTrue(check(TupleSubclass((1, 2)))) + self.assertFalse(check({1: 2})) + self.assertFalse(check([1, 2])) + self.assertFalse(check(42)) + self.assertFalse(check(object())) + + # CRASHES check(NULL) + + def test_tuple_checkexact(self): + # Test PyTuple_CheckExact() + check = _testlimitedcapi.tuple_checkexact + + self.assertTrue(check((1, 2))) + self.assertTrue(check(())) + self.assertFalse(check(TupleSubclass((1, 2)))) + self.assertFalse(check({1: 2})) + self.assertFalse(check([1, 2])) + self.assertFalse(check(42)) + self.assertFalse(check(object())) + + # CRASHES check(NULL) + + def test_tuple_new(self): + # Test PyTuple_New() + tuple_new = _testlimitedcapi.tuple_new + size = _testlimitedcapi.tuple_size + checknull = _testcapi._check_tuple_item_is_NULL + + tup1 = tuple_new(0) + self.assertEqual(tup1, ()) + self.assertEqual(size(tup1), 0) + self.assertIs(type(tup1), tuple) + tup2 = tuple_new(1) + self.assertIs(type(tup2), tuple) + self.assertEqual(size(tup2), 1) + self.assertIsNot(tup2, tup1) + self.assertTrue(checknull(tup2, 0)) + + self.assertRaises(SystemError, tuple_new, -1) + self.assertRaises(SystemError, tuple_new, PY_SSIZE_T_MIN) + self.assertRaises(MemoryError, tuple_new, PY_SSIZE_T_MAX) + + def test_tuple_pack(self): + # Test PyTuple_Pack() + pack = _testlimitedcapi.tuple_pack + + self.assertEqual(pack(0), ()) + self.assertEqual(pack(1, [1]), ([1],)) + self.assertEqual(pack(2, [1], [2]), ([1], [2])) + + self.assertRaises(SystemError, pack, PY_SSIZE_T_MIN) + self.assertRaises(SystemError, pack, -1) + self.assertRaises(MemoryError, pack, PY_SSIZE_T_MAX) + + # CRASHES pack(1, NULL) + # CRASHES pack(2, [1]) + + def test_tuple_size(self): + # Test PyTuple_Size() + size = _testlimitedcapi.tuple_size + + self.assertEqual(size(()), 0) + self.assertEqual(size((1, 2)), 2) + self.assertEqual(size(TupleSubclass((1, 2))), 2) + + self.assertRaises(SystemError, size, []) + self.assertRaises(SystemError, size, 42) + self.assertRaises(SystemError, size, object()) + + # CRASHES size(NULL) + + def test_tuple_get_size(self): + # Test PyTuple_GET_SIZE() + size = _testcapi.tuple_get_size + + self.assertEqual(size(()), 0) + self.assertEqual(size((1, 2)), 2) + self.assertEqual(size(TupleSubclass((1, 2))), 2) + + def test_tuple_getitem(self): + # Test PyTuple_GetItem() + getitem = _testlimitedcapi.tuple_getitem + + tup = ([1], [2], [3]) + self.assertEqual(getitem(tup, 0), [1]) + self.assertEqual(getitem(tup, 2), [3]) + + tup2 = TupleSubclass(([1], [2], [3])) + self.assertEqual(getitem(tup2, 0), [1]) + self.assertEqual(getitem(tup2, 2), [3]) + + self.assertRaises(IndexError, getitem, tup, PY_SSIZE_T_MIN) + self.assertRaises(IndexError, getitem, tup, -1) + self.assertRaises(IndexError, getitem, tup, len(tup)) + self.assertRaises(IndexError, getitem, tup, PY_SSIZE_T_MAX) + self.assertRaises(SystemError, getitem, [1, 2, 3], 1) + self.assertRaises(SystemError, getitem, 42, 1) + + # CRASHES getitem(NULL, 0) + + def test_tuple_get_item(self): + # Test PyTuple_GET_ITEM() + get_item = _testcapi.tuple_get_item + + tup = ([1], [2], [3]) + self.assertEqual(get_item(tup, 0), [1]) + self.assertEqual(get_item(tup, 2), [3]) + + tup2 = TupleSubclass(([1], [2], [3])) + self.assertEqual(get_item(tup2, 0), [1]) + self.assertEqual(get_item(tup2, 2), [3]) + + # CRASHES get_item(NULL, 0) + + def test_tuple_getslice(self): + # Test PyTuple_GetSlice() + getslice = _testlimitedcapi.tuple_getslice + + # empty + tup = ([1], [2], [3]) + self.assertEqual(getslice(tup, PY_SSIZE_T_MIN, 0), ()) + self.assertEqual(getslice(tup, -1, 0), ()) + self.assertEqual(getslice(tup, 3, PY_SSIZE_T_MAX), ()) + self.assertEqual(getslice(tup, 1, 1), ()) + self.assertEqual(getslice(tup, 2, 1), ()) + tup = TupleSubclass(([1], [2], [3])) + self.assertEqual(getslice(tup, PY_SSIZE_T_MIN, 0), ()) + self.assertEqual(getslice(tup, -1, 0), ()) + self.assertEqual(getslice(tup, 3, PY_SSIZE_T_MAX), ()) + self.assertEqual(getslice(tup, 1, 1), ()) + self.assertEqual(getslice(tup, 2, 1), ()) + + # slice + tup = ([1], [2], [3], [4]) + self.assertEqual(getslice(tup, 1, 3), ([2], [3])) + tup = TupleSubclass(([1], [2], [3], [4])) + self.assertEqual(getslice(tup, 1, 3), ([2], [3])) + + # whole + tup = ([1], [2], [3]) + self.assertEqual(getslice(tup, 0, 3), tup) + self.assertEqual(getslice(tup, 0, 100), tup) + self.assertEqual(getslice(tup, -100, 100), tup) + tup = TupleSubclass(([1], [2], [3])) + self.assertEqual(getslice(tup, 0, 3), tup) + self.assertEqual(getslice(tup, 0, 100), tup) + self.assertEqual(getslice(tup, -100, 100), tup) + + self.assertRaises(SystemError, getslice, [[1], [2], [3]], 0, 0) + self.assertRaises(SystemError, getslice, 42, 0, 0) + + # CRASHES getslice(NULL, 0, 0) + + def test_tuple_setitem(self): + # Test PyTuple_SetItem() + setitem = _testlimitedcapi.tuple_setitem + checknull = _testcapi._check_tuple_item_is_NULL + + tup = ([1], [2]) + self.assertEqual(setitem(tup, 0, []), ([], [2])) + self.assertEqual(setitem(tup, 1, []), ([1], [])) + + tup2 = setitem(tup, 1, NULL) + self.assertTrue(checknull(tup2, 1)) + + tup2 = TupleSubclass(([1], [2])) + self.assertRaises(SystemError, setitem, tup2, 0, []) + + self.assertRaises(IndexError, setitem, tup, PY_SSIZE_T_MIN, []) + self.assertRaises(IndexError, setitem, tup, -1, []) + self.assertRaises(IndexError, setitem, tup, len(tup), []) + self.assertRaises(IndexError, setitem, tup, PY_SSIZE_T_MAX, []) + self.assertRaises(SystemError, setitem, [1], 0, []) + self.assertRaises(SystemError, setitem, 42, 0, []) + + # CRASHES setitem(NULL, 0, []) + + def test_tuple_set_item(self): + # Test PyTuple_SET_ITEM() + set_item = _testcapi.tuple_set_item + checknull = _testcapi._check_tuple_item_is_NULL + + tup = ([1], [2]) + self.assertEqual(set_item(tup, 0, []), ([], [2])) + self.assertEqual(set_item(tup, 1, []), ([1], [])) + + tup2 = set_item(tup, 1, NULL) + self.assertTrue(checknull(tup2, 1)) + + tup2 = TupleSubclass(([1], [2])) + self.assertIs(set_item(tup2, 0, []), tup2) + self.assertEqual(tup2, ([], [2])) + + # CRASHES set_item(tup, -1, []) + # CRASHES set_item(tup, len(tup), []) + # CRASHES set_item([1], 0, []) + # CRASHES set_item(NULL, 0, []) + + def test__tuple_resize(self): + # Test _PyTuple_Resize() + resize = _testcapi._tuple_resize + checknull = _testcapi._check_tuple_item_is_NULL + + a = () + b = resize(a, 0, False) + self.assertEqual(len(a), 0) + self.assertEqual(len(b), 0) + b = resize(a, 2, False) + self.assertEqual(len(a), 0) + self.assertEqual(len(b), 2) + self.assertTrue(checknull(b, 0)) + self.assertTrue(checknull(b, 1)) + + a = ([1], [2], [3]) + b = resize(a, 3) + self.assertEqual(b, a) + b = resize(a, 2) + self.assertEqual(b, a[:2]) + b = resize(a, 5) + self.assertEqual(len(b), 5) + self.assertEqual(b[:3], a) + self.assertTrue(checknull(b, 3)) + self.assertTrue(checknull(b, 4)) + + a = () + self.assertRaises(MemoryError, resize, a, PY_SSIZE_T_MAX) + self.assertRaises(SystemError, resize, a, -1) + self.assertRaises(SystemError, resize, a, PY_SSIZE_T_MIN) + # refcount > 1 + a = (1, 2, 3) + self.assertRaises(SystemError, resize, a, 3, False) + self.assertRaises(SystemError, resize, a, 0, False) + # non-tuple + self.assertRaises(SystemError, resize, [1, 2, 3], 0, False) + self.assertRaises(SystemError, resize, NULL, 0, False) + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_class.py b/Lib/test/test_class.py index 655d53b8d5b..00c32254f70 100644 --- a/Lib/test/test_class.py +++ b/Lib/test/test_class.py @@ -1,6 +1,7 @@ "Test the functionality of Python classes implementing operators." import unittest +from test.support import cpython_only, import_helper, script_helper testmeths = [ @@ -882,6 +883,24 @@ class Foo: f.a = 3 self.assertEqual(f.a, 3) + def test_rematerialize_object_dict(self): + # gh-121860: rematerializing an object's managed dictionary after it + # had been deleted caused a crash. + class Foo: pass + f = Foo() + f.__dict__["attr"] = 1 + del f.__dict__ + + # Using a str subclass is a way to trigger the re-materialization + class StrSubclass(str): pass + self.assertFalse(hasattr(f, StrSubclass("attr"))) + + # Changing the __class__ also triggers the re-materialization + class Bar: pass + f.__class__ = Bar + self.assertIsInstance(f, Bar) + self.assertEqual(f.__dict__, {}) + def test_store_attr_type_cache(self): """Verifies that the type cache doesn't provide a value which is inconsistent from the dict.""" @@ -901,6 +920,36 @@ class C: C.a = X() C.a = X() + @cpython_only + def test_detach_materialized_dict_no_memory(self): + # Skip test if _testcapi is not available: + import_helper.import_module('_testcapi') + + code = """if 1: + import test.support + import _testcapi + + class A: + def __init__(self): + self.a = 1 + self.b = 2 + a = A() + d = a.__dict__ + with test.support.catch_unraisable_exception() as ex: + _testcapi.set_nomemory(0, 1) + del a + assert ex.unraisable.exc_type is MemoryError + try: + d["a"] + except KeyError: + pass + else: + assert False, "KeyError not raised" + """ + rc, out, err = script_helper.assert_python_ok("-c", code) + self.assertEqual(rc, 0) + self.assertFalse(out, msg=out.decode('utf-8')) + self.assertFalse(err, msg=err.decode('utf-8')) if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_clinic.py b/Lib/test/test_clinic.py index f3fd610414c..a7ba7f3d998 100644 --- a/Lib/test/test_clinic.py +++ b/Lib/test/test_clinic.py @@ -322,7 +322,7 @@ def __init__(self): """ self.expect_failure(block, err, lineno=8) - def test_multiple_star_in_args(self): + def test_star_after_vararg(self): err = "'my_test_func' uses '*' more than once." block = """ /*[clinic input] @@ -336,6 +336,20 @@ def test_multiple_star_in_args(self): """ self.expect_failure(block, err, lineno=6) + def test_vararg_after_star(self): + err = "'my_test_func' uses '*' more than once." + block = """ + /*[clinic input] + my_test_func + + pos_arg: object + * + *args: object + kw_arg: object + [clinic start generated code]*/ + """ + self.expect_failure(block, err, lineno=6) + def test_module_already_got_one(self): err = "Already defined module 'm'!" block = """ @@ -1787,13 +1801,43 @@ def test_parameters_required_after_depr_star2(self): ) self.expect_failure(block, err, lineno=4) + def test_parameters_required_after_depr_star3(self): + block = """ + module foo + foo.bar + a: int + * [from 3.14] + *args: object + b: int + Docstring. + """ + err = ( + "Function 'bar' specifies '* [from ...]' without " + "following parameters." + ) + self.expect_failure(block, err, lineno=4) + def test_depr_star_must_come_before_star(self): block = """ module foo foo.bar - this: int + a: int * * [from 3.14] + b: int + Docstring. + """ + err = "Function 'bar': '* [from ...]' must precede '*'" + self.expect_failure(block, err, lineno=4) + + def test_depr_star_must_come_before_vararg(self): + block = """ + module foo + foo.bar + a: int + *args: object + * [from 3.14] + b: int Docstring. """ err = "Function 'bar': '* [from ...]' must precede '*'" @@ -1908,7 +1952,7 @@ def test_double_slash(self): err = "Function 'bar' uses '/' more than once." self.expect_failure(block, err) - def test_mix_star_and_slash(self): + def test_slash_after_star(self): block = """ module foo foo.bar @@ -1921,6 +1965,19 @@ def test_mix_star_and_slash(self): err = "Function 'bar': '/' must precede '*'" self.expect_failure(block, err) + def test_slash_after_vararg(self): + block = """ + module foo + foo.bar + x: int + y: int + *args: object + z: int + / + """ + err = "Function 'bar': '/' must precede '*'" + self.expect_failure(block, err) + def test_depr_star_must_come_after_slash(self): block = """ module foo @@ -1960,6 +2017,19 @@ def test_star_must_come_after_depr_slash(self): err = "Function 'bar': '/ [from ...]' must precede '*'" self.expect_failure(block, err, lineno=4) + def test_vararg_must_come_after_depr_slash(self): + block = """ + module foo + foo.bar + a: int + *args: object + / [from 3.14] + b: int + Docstring. + """ + err = "Function 'bar': '/ [from ...]' must precede '*'" + self.expect_failure(block, err, lineno=4) + def test_depr_slash_must_come_after_slash(self): block = """ module foo @@ -1987,7 +2057,7 @@ def test_parameters_not_permitted_after_slash_for_now(self): self.expect_failure(block, err) def test_parameters_no_more_than_one_vararg(self): - err = "Too many var args" + err = "Function 'bar' uses '*' more than once." block = """ module foo foo.bar @@ -3319,13 +3389,6 @@ def test_posonly_vararg(self): with self.assertRaises(TypeError): ac_tester.posonly_vararg(1, 2, 3, b=4) - def test_vararg_and_posonly(self): - with self.assertRaises(TypeError): - ac_tester.vararg_and_posonly() - with self.assertRaises(TypeError): - ac_tester.vararg_and_posonly(1, b=2) - self.assertEqual(ac_tester.vararg_and_posonly(1, 2, 3, 4), (1, (2, 3, 4))) - def test_vararg(self): with self.assertRaises(TypeError): ac_tester.vararg() @@ -3334,11 +3397,27 @@ def test_vararg(self): self.assertEqual(ac_tester.vararg(1, 2, 3, 4), (1, (2, 3, 4))) def test_vararg_with_default(self): - with self.assertRaises(TypeError): - ac_tester.vararg_with_default() - self.assertEqual(ac_tester.vararg_with_default(1, b=False), (1, (), False)) - self.assertEqual(ac_tester.vararg_with_default(1, 2, 3, 4), (1, (2, 3, 4), False)) - self.assertEqual(ac_tester.vararg_with_default(1, 2, 3, 4, b=True), (1, (2, 3, 4), True)) + fn = ac_tester.vararg_with_default + self.assertRaises(TypeError, fn) + self.assertRaises(TypeError, fn, 1, a=2) + self.assertEqual(fn(1, b=2), (1, (), True)) + self.assertEqual(fn(1, 2, 3, 4), (1, (2, 3, 4), False)) + self.assertEqual(fn(1, 2, 3, 4, b=5), (1, (2, 3, 4), True)) + self.assertEqual(fn(a=1), (1, (), False)) + self.assertEqual(fn(a=1, b=2), (1, (), True)) + + def test_vararg_with_default2(self): + fn = ac_tester.vararg_with_default2 + self.assertRaises(TypeError, fn) + self.assertRaises(TypeError, fn, 1, a=2) + self.assertEqual(fn(1, b=2), (1, (), 2, None)) + self.assertEqual(fn(1, b=2, c=3), (1, (), 2, 3)) + self.assertEqual(fn(1, 2, 3), (1, (2, 3), None, None)) + self.assertEqual(fn(1, 2, 3, b=4), (1, (2, 3), 4, None)) + self.assertEqual(fn(1, 2, 3, b=4, c=5), (1, (2, 3), 4, 5)) + self.assertEqual(fn(a=1), (1, (), None, None)) + self.assertEqual(fn(a=1, b=2), (1, (), 2, None)) + self.assertEqual(fn(a=1, b=2, c=3), (1, (), 2, 3)) def test_vararg_with_only_defaults(self): self.assertEqual(ac_tester.vararg_with_only_defaults(), ((), None)) @@ -3347,6 +3426,17 @@ def test_vararg_with_only_defaults(self): self.assertEqual(ac_tester.vararg_with_only_defaults(1, 2, 3, 4), ((1, 2, 3, 4), None)) self.assertEqual(ac_tester.vararg_with_only_defaults(1, 2, 3, 4, b=5), ((1, 2, 3, 4), 5)) + def test_vararg_kwonly_req_opt(self): + fn = ac_tester.vararg_kwonly_req_opt + self.assertRaises(TypeError, fn) + self.assertEqual(fn(a=1), ((), 1, None, None)) + self.assertEqual(fn(a=1, b=2), ((), 1, 2, None)) + self.assertEqual(fn(a=1, b=2, c=3), ((), 1, 2, 3)) + self.assertRaises(TypeError, fn, 1) + self.assertEqual(fn(1, a=2), ((1,), 2, None, None)) + self.assertEqual(fn(1, a=2, b=3), ((1,), 2, 3, None)) + self.assertEqual(fn(1, a=2, b=3, c=4), ((1,), 2, 3, 4)) + def test_gh_32092_oob(self): ac_tester.gh_32092_oob(1, 2, 3, 4, kw1=5, kw2=6) diff --git a/Lib/test/test_cmath.py b/Lib/test/test_cmath.py index 57f80d5d8cd..a96a5780b31 100644 --- a/Lib/test/test_cmath.py +++ b/Lib/test/test_cmath.py @@ -1,4 +1,5 @@ from test.support import requires_IEEE_754, cpython_only, import_helper +from test.support.testcase import ComplexesAreIdenticalMixin from test.test_math import parse_testfile, test_file import test.test_math as test_math import unittest @@ -49,7 +50,7 @@ (INF, NAN) ]] -class CMathTests(unittest.TestCase): +class CMathTests(ComplexesAreIdenticalMixin, unittest.TestCase): # list of all functions in cmath test_functions = [getattr(cmath, fname) for fname in [ 'acos', 'acosh', 'asin', 'asinh', 'atan', 'atanh', @@ -65,39 +66,6 @@ def setUp(self): def tearDown(self): self.test_values.close() - def assertFloatIdentical(self, x, y): - """Fail unless floats x and y are identical, in the sense that: - (1) both x and y are nans, or - (2) both x and y are infinities, with the same sign, or - (3) both x and y are zeros, with the same sign, or - (4) x and y are both finite and nonzero, and x == y - - """ - msg = 'floats {!r} and {!r} are not identical' - - if math.isnan(x) or math.isnan(y): - if math.isnan(x) and math.isnan(y): - return - elif x == y: - if x != 0.0: - return - # both zero; check that signs match - elif math.copysign(1.0, x) == math.copysign(1.0, y): - return - else: - msg += ': zeros have different signs' - self.fail(msg.format(x, y)) - - def assertComplexIdentical(self, x, y): - """Fail unless complex numbers x and y have equal values and signs. - - In particular, if x and y both have real (or imaginary) part - zero, but the zeros have different signs, this test will fail. - - """ - self.assertFloatIdentical(x.real, y.real) - self.assertFloatIdentical(x.imag, y.imag) - def rAssertAlmostEqual(self, a, b, rel_err = 2e-15, abs_err = 5e-323, msg=None): """Fail if the two floating-point numbers are not almost equal. @@ -555,7 +523,7 @@ def test_isinf(self): @requires_IEEE_754 def testTanhSign(self): for z in complex_zeros: - self.assertComplexIdentical(cmath.tanh(z), z) + self.assertComplexesAreIdentical(cmath.tanh(z), z) # The algorithm used for atan and atanh makes use of the system # log1p function; If that system function doesn't respect the sign @@ -564,12 +532,12 @@ def testTanhSign(self): @requires_IEEE_754 def testAtanSign(self): for z in complex_zeros: - self.assertComplexIdentical(cmath.atan(z), z) + self.assertComplexesAreIdentical(cmath.atan(z), z) @requires_IEEE_754 def testAtanhSign(self): for z in complex_zeros: - self.assertComplexIdentical(cmath.atanh(z), z) + self.assertComplexesAreIdentical(cmath.atanh(z), z) class IsCloseTests(test_math.IsCloseTests): diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py index 9624d35d0c3..dc420f33c23 100644 --- a/Lib/test/test_cmd_line.py +++ b/Lib/test/test_cmd_line.py @@ -879,19 +879,29 @@ def test_pythondevmode_env(self): self.assertEqual(proc.stdout.rstrip(), 'True') self.assertEqual(proc.returncode, 0, proc) - @unittest.skipUnless(support.Py_GIL_DISABLED, - "PYTHON_GIL and -X gil only supported in Py_GIL_DISABLED builds") def test_python_gil(self): cases = [ # (env, opt, expected, msg) - (None, None, 'None', "no options set"), - ('0', None, '0', "PYTHON_GIL=0"), ('1', None, '1', "PYTHON_GIL=1"), - ('1', '0', '0', "-X gil=0 overrides PYTHON_GIL=1"), - (None, '0', '0', "-X gil=0"), (None, '1', '1', "-X gil=1"), ] + if support.Py_GIL_DISABLED: + cases.extend( + [ + (None, None, 'None', "no options set"), + ('0', None, '0', "PYTHON_GIL=0"), + ('1', '0', '0', "-X gil=0 overrides PYTHON_GIL=1"), + (None, '0', '0', "-X gil=0"), + ] + ) + else: + cases.extend( + [ + (None, None, '1', '-X gil=0 (unsupported by this build)'), + ('1', None, '1', 'PYTHON_GIL=0 (unsupported by this build)'), + ] + ) code = "import sys; print(sys.flags.gil)" environ = dict(os.environ) diff --git a/Lib/test/test_code_module.py b/Lib/test/test_code_module.py index 259778a5cad..37c7bc772ed 100644 --- a/Lib/test/test_code_module.py +++ b/Lib/test/test_code_module.py @@ -1,5 +1,6 @@ "Test InteractiveConsole and InteractiveInterpreter from code module" import sys +import traceback import unittest from textwrap import dedent from contextlib import ExitStack @@ -30,6 +31,7 @@ def mock_sys(self): class TestInteractiveConsole(unittest.TestCase, MockSys): + maxDiff = None def setUp(self): self.console = code.InteractiveConsole() @@ -61,21 +63,151 @@ def test_console_stderr(self): raise AssertionError("no console stdout") def test_syntax_error(self): - self.infunc.side_effect = ["undefined", EOFError('Finished')] + self.infunc.side_effect = ["def f():", + " x = ?", + "", + EOFError('Finished')] self.console.interact() - for call in self.stderr.method_calls: - if 'NameError' in ''.join(call[1]): - break - else: - raise AssertionError("No syntax error from console") + output = ''.join(''.join(call[1]) for call in self.stderr.method_calls) + output = output[output.index('(InteractiveConsole)'):] + output = output[:output.index('\nnow exiting')] + self.assertEqual(output.splitlines()[1:], [ + ' File "", line 2', + ' x = ?', + ' ^', + 'SyntaxError: invalid syntax']) + self.assertIs(self.sysmod.last_type, SyntaxError) + self.assertIs(type(self.sysmod.last_value), SyntaxError) + self.assertIsNone(self.sysmod.last_traceback) + self.assertIsNone(self.sysmod.last_value.__traceback__) + self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) + + def test_indentation_error(self): + self.infunc.side_effect = [" 1", EOFError('Finished')] + self.console.interact() + output = ''.join(''.join(call[1]) for call in self.stderr.method_calls) + output = output[output.index('(InteractiveConsole)'):] + output = output[:output.index('\nnow exiting')] + self.assertEqual(output.splitlines()[1:], [ + ' File "", line 1', + ' 1', + 'IndentationError: unexpected indent']) + self.assertIs(self.sysmod.last_type, IndentationError) + self.assertIs(type(self.sysmod.last_value), IndentationError) + self.assertIsNone(self.sysmod.last_traceback) + self.assertIsNone(self.sysmod.last_value.__traceback__) + self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) + + def test_unicode_error(self): + self.infunc.side_effect = ["'\ud800'", EOFError('Finished')] + self.console.interact() + output = ''.join(''.join(call[1]) for call in self.stderr.method_calls) + output = output[output.index('(InteractiveConsole)'):] + output = output[output.index('\n') + 1:] + self.assertTrue(output.startswith('UnicodeEncodeError: '), output) + self.assertIs(self.sysmod.last_type, UnicodeEncodeError) + self.assertIs(type(self.sysmod.last_value), UnicodeEncodeError) + self.assertIsNone(self.sysmod.last_traceback) + self.assertIsNone(self.sysmod.last_value.__traceback__) + self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) def test_sysexcepthook(self): - self.infunc.side_effect = ["raise ValueError('')", + self.infunc.side_effect = ["def f():", + " raise ValueError('BOOM!')", + "", + "f()", + EOFError('Finished')] + hook = mock.Mock() + self.sysmod.excepthook = hook + self.console.interact() + hook.assert_called() + hook.assert_called_with(self.sysmod.last_type, + self.sysmod.last_value, + self.sysmod.last_traceback) + self.assertIs(self.sysmod.last_type, ValueError) + self.assertIs(type(self.sysmod.last_value), ValueError) + self.assertIs(self.sysmod.last_traceback, self.sysmod.last_value.__traceback__) + self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) + self.assertEqual(traceback.format_exception(self.sysmod.last_exc), [ + 'Traceback (most recent call last):\n', + ' File "", line 1, in \n', + ' File "", line 2, in f\n', + 'ValueError: BOOM!\n']) + + def test_sysexcepthook_syntax_error(self): + self.infunc.side_effect = ["def f():", + " x = ?", + "", EOFError('Finished')] hook = mock.Mock() self.sysmod.excepthook = hook self.console.interact() - self.assertTrue(hook.called) + hook.assert_called() + hook.assert_called_with(self.sysmod.last_type, + self.sysmod.last_value, + self.sysmod.last_traceback) + self.assertIs(self.sysmod.last_type, SyntaxError) + self.assertIs(type(self.sysmod.last_value), SyntaxError) + self.assertIsNone(self.sysmod.last_traceback) + self.assertIsNone(self.sysmod.last_value.__traceback__) + self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) + self.assertEqual(traceback.format_exception(self.sysmod.last_exc), [ + ' File "", line 2\n', + ' x = ?\n', + ' ^\n', + 'SyntaxError: invalid syntax\n']) + + def test_sysexcepthook_indentation_error(self): + self.infunc.side_effect = [" 1", EOFError('Finished')] + hook = mock.Mock() + self.sysmod.excepthook = hook + self.console.interact() + hook.assert_called() + hook.assert_called_with(self.sysmod.last_type, + self.sysmod.last_value, + self.sysmod.last_traceback) + self.assertIs(self.sysmod.last_type, IndentationError) + self.assertIs(type(self.sysmod.last_value), IndentationError) + self.assertIsNone(self.sysmod.last_traceback) + self.assertIsNone(self.sysmod.last_value.__traceback__) + self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) + self.assertEqual(traceback.format_exception(self.sysmod.last_exc), [ + ' File "", line 1\n', + ' 1\n', + 'IndentationError: unexpected indent\n']) + + def test_sysexcepthook_crashing_doesnt_close_repl(self): + self.infunc.side_effect = ["1/0", "a = 123", "print(a)", EOFError('Finished')] + self.sysmod.excepthook = 1 + self.console.interact() + self.assertEqual(['write', ('123', ), {}], self.stdout.method_calls[0]) + error = "".join(call.args[0] for call in self.stderr.method_calls if call[0] == 'write') + self.assertIn("Error in sys.excepthook:", error) + self.assertEqual(error.count("'int' object is not callable"), 1) + self.assertIn("Original exception was:", error) + self.assertIn("division by zero", error) + + def test_sysexcepthook_raising_BaseException(self): + self.infunc.side_effect = ["1/0", "a = 123", "print(a)", EOFError('Finished')] + s = "not so fast" + def raise_base(*args, **kwargs): + raise BaseException(s) + self.sysmod.excepthook = raise_base + self.console.interact() + self.assertEqual(['write', ('123', ), {}], self.stdout.method_calls[0]) + error = "".join(call.args[0] for call in self.stderr.method_calls if call[0] == 'write') + self.assertIn("Error in sys.excepthook:", error) + self.assertEqual(error.count("not so fast"), 1) + self.assertIn("Original exception was:", error) + self.assertIn("division by zero", error) + + def test_sysexcepthook_raising_SystemExit_gets_through(self): + self.infunc.side_effect = ["1/0"] + def raise_base(*args, **kwargs): + raise SystemExit + self.sysmod.excepthook = raise_base + with self.assertRaises(SystemExit): + self.console.interact() def test_banner(self): # with banner @@ -134,6 +266,11 @@ def test_cause_tb(self): ValueError """) self.assertIn(expected, output) + self.assertIs(self.sysmod.last_type, ValueError) + self.assertIs(type(self.sysmod.last_value), ValueError) + self.assertIs(self.sysmod.last_traceback, self.sysmod.last_value.__traceback__) + self.assertIsNotNone(self.sysmod.last_traceback) + self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) def test_context_tb(self): self.infunc.side_effect = ["try: ham\nexcept: eggs\n", @@ -152,6 +289,11 @@ def test_context_tb(self): NameError: name 'eggs' is not defined """) self.assertIn(expected, output) + self.assertIs(self.sysmod.last_type, NameError) + self.assertIs(type(self.sysmod.last_value), NameError) + self.assertIs(self.sysmod.last_traceback, self.sysmod.last_value.__traceback__) + self.assertIsNotNone(self.sysmod.last_traceback) + self.assertIs(self.sysmod.last_exc, self.sysmod.last_value) class TestInteractiveConsoleLocalExit(unittest.TestCase, MockSys): diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py index e29c95e8bf2..ed4e6265eac 100644 --- a/Lib/test/test_compile.py +++ b/Lib/test/test_compile.py @@ -1,6 +1,7 @@ import contextlib import dis import io +import itertools import math import opcode import os @@ -475,6 +476,19 @@ def test_dead_code_with_except_handler_compiles(self): x = 2 """), '', 'exec') + def test_try_except_in_while_with_chained_condition_compiles(self): + # see gh-124871 + compile(textwrap.dedent(""" + name_1, name_2, name_3 = 1, 2, 3 + while name_3 <= name_2 > name_1: + try: + raise + except: + pass + finally: + pass + """), '', 'exec') + def test_compile_invalid_namedexpr(self): # gh-109351 m = ast.Module( @@ -870,6 +884,32 @@ def unused_code_at_end(): 'RETURN_CONST', list(dis.get_instructions(unused_code_at_end))[-1].opname) + @support.cpython_only + def test_docstring(self): + src = textwrap.dedent(""" + def with_docstring(): + "docstring" + + def with_fstring(): + f"not docstring" + + def with_const_expression(): + "also" + " not docstring" + """) + + for opt in [0, 1, 2]: + with self.subTest(opt=opt): + code = compile(src, "", "exec", optimize=opt) + ns = {} + exec(code, ns) + + if opt < 2: + self.assertEqual(ns['with_docstring'].__doc__, "docstring") + else: + self.assertIsNone(ns['with_docstring'].__doc__) + self.assertIsNone(ns['with_fstring'].__doc__) + self.assertIsNone(ns['with_const_expression'].__doc__) + @support.cpython_only def test_docstring_omitted(self): # See gh-115347 @@ -1172,7 +1212,7 @@ def return_genexp(): x in y) - genexp_lines = [0, 2, 0] + genexp_lines = [0, 4, 2, 0, 4] genexp_code = return_genexp.__code__.co_consts[1] code_lines = self.get_code_lines(genexp_code) @@ -1627,7 +1667,7 @@ def test_multiline_generator_expression(self): self.assertOpcodeSourcePositionIs(compiled_code, 'JUMP_BACKWARD', line=1, end_line=2, column=1, end_column=8, occurrence=1) self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_CONST', - line=1, end_line=6, column=0, end_column=32, occurrence=1) + line=4, end_line=4, column=7, end_column=14, occurrence=1) def test_multiline_async_generator_expression(self): snippet = textwrap.dedent("""\ @@ -2055,13 +2095,49 @@ def test_lambda_return_position(self): self.assertGreaterEqual(end_col, start_col) self.assertLessEqual(end_col, code_end) - -class TestExpectedAttributes(unittest.TestCase): + def test_return_in_with_positions(self): + # See gh-98442 + def f(): + with xyz: + 1 + 2 + 3 + 4 + return R + + # All instructions should have locations on a single line + for instr in dis.get_instructions(f): + start_line, end_line, _, _ = instr.positions + self.assertEqual(start_line, end_line) + + # Expect three load None instructions for the no-exception __exit__ call, + # and one RETURN_VALUE. + # They should all have the locations of the context manager ('xyz'). + + load_none = [instr for instr in dis.get_instructions(f) if + instr.opname == 'LOAD_CONST' and instr.argval is None] + return_value = [instr for instr in dis.get_instructions(f) if + instr.opname == 'RETURN_VALUE'] + + self.assertEqual(len(load_none), 3) + self.assertEqual(len(return_value), 1) + for instr in load_none + return_value: + start_line, end_line, start_col, end_col = instr.positions + self.assertEqual(start_line, f.__code__.co_firstlineno + 1) + self.assertEqual(end_line, f.__code__.co_firstlineno + 1) + self.assertEqual(start_col, 17) + self.assertEqual(end_col, 20) + + +class TestStaticAttributes(unittest.TestCase): def test_basic(self): class C: def f(self): self.a = self.b = 42 + # read fields are not included + self.f() + self.arr[3] self.assertIsInstance(C.__static_attributes__, tuple) self.assertEqual(sorted(C.__static_attributes__), ['a', 'b']) @@ -2557,6 +2633,22 @@ def test_nested(self): self.compare_instructions(seq, [('LOAD_CONST', 1, 1, 0, 0, 0)]) self.compare_instructions(seq.get_nested()[0], [('LOAD_CONST', 2, 2, 0, 0, 0)]) + def test_static_attributes_are_sorted(self): + code = ( + 'class T:\n' + ' def __init__(self):\n' + ' self.{V1} = 10\n' + ' self.{V2} = 10\n' + ' def foo(self):\n' + ' self.{V3} = 10\n' + ) + attributes = ("a", "b", "c") + for perm in itertools.permutations(attributes): + var_names = {f'V{i + 1}': name for i, name in enumerate(perm)} + ns = run_code(code.format(**var_names)) + t = ns['T'] + self.assertEqual(t.__static_attributes__, attributes) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_complex.py b/Lib/test/test_complex.py index fa3017b24e1..d5e58e3c6bc 100644 --- a/Lib/test/test_complex.py +++ b/Lib/test/test_complex.py @@ -1,11 +1,12 @@ import unittest import sys from test import support +from test.support.testcase import ComplexesAreIdenticalMixin from test.test_grammar import (VALID_UNDERSCORE_LITERALS, INVALID_UNDERSCORE_LITERALS) from random import random -from math import atan2, isnan, copysign +from math import isnan, copysign import operator INF = float("inf") @@ -21,7 +22,28 @@ (1, 0+0j), ) -class ComplexTest(unittest.TestCase): +class WithIndex: + def __init__(self, value): + self.value = value + def __index__(self): + return self.value + +class WithFloat: + def __init__(self, value): + self.value = value + def __float__(self): + return self.value + +class ComplexSubclass(complex): + pass + +class WithComplex: + def __init__(self, value): + self.value = value + def __complex__(self): + return self.value + +class ComplexTest(ComplexesAreIdenticalMixin, unittest.TestCase): def assertAlmostEqual(self, a, b): if isinstance(a, complex): @@ -50,29 +72,6 @@ def assertCloseAbs(self, x, y, eps=1e-9): # check that relative difference < eps self.assertTrue(abs((x-y)/y) < eps) - def assertFloatsAreIdentical(self, x, y): - """assert that floats x and y are identical, in the sense that: - (1) both x and y are nans, or - (2) both x and y are infinities, with the same sign, or - (3) both x and y are zeros, with the same sign, or - (4) x and y are both finite and nonzero, and x == y - - """ - msg = 'floats {!r} and {!r} are not identical' - - if isnan(x) or isnan(y): - if isnan(x) and isnan(y): - return - elif x == y: - if x != 0.0: - return - # both zero; check that signs match - elif copysign(1.0, x) == copysign(1.0, y): - return - else: - msg += ': zeros have different signs' - self.fail(msg.format(x, y)) - def assertClose(self, x, y, eps=1e-9): """Return true iff complexes x and y "are close".""" self.assertCloseAbs(x.real, y.real, eps) @@ -340,137 +339,90 @@ def test_conjugate(self): self.assertClose(complex(5.3, 9.8).conjugate(), 5.3-9.8j) def test_constructor(self): - class NS: - def __init__(self, value): self.value = value - def __complex__(self): return self.value - self.assertEqual(complex(NS(1+10j)), 1+10j) - self.assertRaises(TypeError, complex, NS(None)) - self.assertRaises(TypeError, complex, {}) - self.assertRaises(TypeError, complex, NS(1.5)) - self.assertRaises(TypeError, complex, NS(1)) - self.assertRaises(TypeError, complex, object()) - self.assertRaises(TypeError, complex, NS(4.25+0.5j), object()) - - self.assertAlmostEqual(complex("1+10j"), 1+10j) - self.assertAlmostEqual(complex(10), 10+0j) - self.assertAlmostEqual(complex(10.0), 10+0j) - self.assertAlmostEqual(complex(10), 10+0j) - self.assertAlmostEqual(complex(10+0j), 10+0j) - self.assertAlmostEqual(complex(1,10), 1+10j) - self.assertAlmostEqual(complex(1,10), 1+10j) - self.assertAlmostEqual(complex(1,10.0), 1+10j) - self.assertAlmostEqual(complex(1,10), 1+10j) - self.assertAlmostEqual(complex(1,10), 1+10j) - self.assertAlmostEqual(complex(1,10.0), 1+10j) - self.assertAlmostEqual(complex(1.0,10), 1+10j) - self.assertAlmostEqual(complex(1.0,10), 1+10j) - self.assertAlmostEqual(complex(1.0,10.0), 1+10j) - self.assertAlmostEqual(complex(3.14+0j), 3.14+0j) - self.assertAlmostEqual(complex(3.14), 3.14+0j) - self.assertAlmostEqual(complex(314), 314.0+0j) - self.assertAlmostEqual(complex(314), 314.0+0j) - self.assertAlmostEqual(complex(3.14+0j, 0j), 3.14+0j) - self.assertAlmostEqual(complex(3.14, 0.0), 3.14+0j) - self.assertAlmostEqual(complex(314, 0), 314.0+0j) - self.assertAlmostEqual(complex(314, 0), 314.0+0j) - self.assertAlmostEqual(complex(0j, 3.14j), -3.14+0j) - self.assertAlmostEqual(complex(0.0, 3.14j), -3.14+0j) - self.assertAlmostEqual(complex(0j, 3.14), 3.14j) - self.assertAlmostEqual(complex(0.0, 3.14), 3.14j) - self.assertAlmostEqual(complex("1"), 1+0j) - self.assertAlmostEqual(complex("1j"), 1j) - self.assertAlmostEqual(complex(), 0) - self.assertAlmostEqual(complex("-1"), -1) - self.assertAlmostEqual(complex("+1"), +1) - self.assertAlmostEqual(complex("(1+2j)"), 1+2j) - self.assertAlmostEqual(complex("(1.3+2.2j)"), 1.3+2.2j) - self.assertAlmostEqual(complex("3.14+1J"), 3.14+1j) - self.assertAlmostEqual(complex(" ( +3.14-6J )"), 3.14-6j) - self.assertAlmostEqual(complex(" ( +3.14-J )"), 3.14-1j) - self.assertAlmostEqual(complex(" ( +3.14+j )"), 3.14+1j) - self.assertAlmostEqual(complex("J"), 1j) - self.assertAlmostEqual(complex("( j )"), 1j) - self.assertAlmostEqual(complex("+J"), 1j) - self.assertAlmostEqual(complex("( -j)"), -1j) - self.assertAlmostEqual(complex('1e-500'), 0.0 + 0.0j) - self.assertAlmostEqual(complex('-1e-500j'), 0.0 - 0.0j) - self.assertAlmostEqual(complex('-1e-500+1e-500j'), -0.0 + 0.0j) - self.assertEqual(complex('1-1j'), 1.0 - 1j) - self.assertEqual(complex('1J'), 1j) - - class complex2(complex): pass - self.assertAlmostEqual(complex(complex2(1+1j)), 1+1j) - self.assertAlmostEqual(complex(real=17, imag=23), 17+23j) - self.assertAlmostEqual(complex(real=17+23j), 17+23j) - self.assertAlmostEqual(complex(real=17+23j, imag=23), 17+46j) - self.assertAlmostEqual(complex(real=1+2j, imag=3+4j), -3+5j) + def check(z, x, y): + self.assertIs(type(z), complex) + self.assertFloatsAreIdentical(z.real, x) + self.assertFloatsAreIdentical(z.imag, y) + + check(complex(), 0.0, 0.0) + check(complex(10), 10.0, 0.0) + check(complex(4.25), 4.25, 0.0) + check(complex(4.25+0j), 4.25, 0.0) + check(complex(4.25+0.5j), 4.25, 0.5) + check(complex(ComplexSubclass(4.25+0.5j)), 4.25, 0.5) + check(complex(WithComplex(4.25+0.5j)), 4.25, 0.5) + + check(complex(1, 10), 1.0, 10.0) + check(complex(1, 10.0), 1.0, 10.0) + check(complex(1, 4.25), 1.0, 4.25) + check(complex(1.0, 10), 1.0, 10.0) + check(complex(4.25, 10), 4.25, 10.0) + check(complex(1.0, 10.0), 1.0, 10.0) + check(complex(4.25, 0.5), 4.25, 0.5) + + check(complex(4.25+0j, 0), 4.25, 0.0) + check(complex(ComplexSubclass(4.25+0j), 0), 4.25, 0.0) + check(complex(WithComplex(4.25+0j), 0), 4.25, 0.0) + check(complex(4.25j, 0), 0.0, 4.25) + check(complex(0j, 4.25), 0.0, 4.25) + check(complex(0, 4.25+0j), 0.0, 4.25) + check(complex(0, ComplexSubclass(4.25+0j)), 0.0, 4.25) + with self.assertRaisesRegex(TypeError, + "second argument must be a number, not 'WithComplex'"): + complex(0, WithComplex(4.25+0j)) + check(complex(0.0, 4.25j), -4.25, 0.0) + check(complex(4.25+0j, 0j), 4.25, 0.0) + check(complex(4.25j, 0j), 0.0, 4.25) + check(complex(0j, 4.25+0j), 0.0, 4.25) + check(complex(0j, 4.25j), -4.25, 0.0) + + check(complex(real=4.25), 4.25, 0.0) + check(complex(real=4.25+0j), 4.25, 0.0) + check(complex(real=4.25+1.5j), 4.25, 1.5) + check(complex(imag=1.5), 0.0, 1.5) + check(complex(real=4.25, imag=1.5), 4.25, 1.5) + check(complex(4.25, imag=1.5), 4.25, 1.5) # check that the sign of a zero in the real or imaginary part - # is preserved when constructing from two floats. (These checks - # are harmless on systems without support for signed zeros.) - def split_zeros(x): - """Function that produces different results for 0. and -0.""" - return atan2(x, -1.) - - self.assertEqual(split_zeros(complex(1., 0.).imag), split_zeros(0.)) - self.assertEqual(split_zeros(complex(1., -0.).imag), split_zeros(-0.)) - self.assertEqual(split_zeros(complex(0., 1.).real), split_zeros(0.)) - self.assertEqual(split_zeros(complex(-0., 1.).real), split_zeros(-0.)) - - c = 3.14 + 1j - self.assertTrue(complex(c) is c) - del c - - self.assertRaises(TypeError, complex, "1", "1") - self.assertRaises(TypeError, complex, 1, "1") - - # SF bug 543840: complex(string) accepts strings with \0 - # Fixed in 2.3. - self.assertRaises(ValueError, complex, '1+1j\0j') - - self.assertRaises(TypeError, int, 5+3j) - self.assertRaises(TypeError, int, 5+3j) - self.assertRaises(TypeError, float, 5+3j) - self.assertRaises(ValueError, complex, "") - self.assertRaises(TypeError, complex, None) - self.assertRaisesRegex(TypeError, "not 'NoneType'", complex, None) - self.assertRaises(ValueError, complex, "\0") - self.assertRaises(ValueError, complex, "3\09") - self.assertRaises(TypeError, complex, "1", "2") - self.assertRaises(TypeError, complex, "1", 42) - self.assertRaises(TypeError, complex, 1, "2") - self.assertRaises(ValueError, complex, "1+") - self.assertRaises(ValueError, complex, "1+1j+1j") - self.assertRaises(ValueError, complex, "--") - self.assertRaises(ValueError, complex, "(1+2j") - self.assertRaises(ValueError, complex, "1+2j)") - self.assertRaises(ValueError, complex, "1+(2j)") - self.assertRaises(ValueError, complex, "(1+2j)123") - self.assertRaises(ValueError, complex, "x") - self.assertRaises(ValueError, complex, "1j+2") - self.assertRaises(ValueError, complex, "1e1ej") - self.assertRaises(ValueError, complex, "1e++1ej") - self.assertRaises(ValueError, complex, ")1+2j(") - self.assertRaisesRegex( - TypeError, + # is preserved when constructing from two floats. + for x in 1.0, -1.0: + for y in 0.0, -0.0: + check(complex(x, y), x, y) + check(complex(y, x), y, x) + + c = complex(4.25, 1.5) + self.assertIs(complex(c), c) + c2 = ComplexSubclass(c) + self.assertEqual(c2, c) + self.assertIs(type(c2), ComplexSubclass) + del c, c2 + + self.assertRaisesRegex(TypeError, "first argument must be a string or a number, not 'dict'", - complex, {1:2}, 1) - self.assertRaisesRegex( - TypeError, + complex, {}) + self.assertRaisesRegex(TypeError, + "first argument must be a string or a number, not 'NoneType'", + complex, None) + self.assertRaisesRegex(TypeError, + "first argument must be a string or a number, not 'dict'", + complex, {1:2}, 0) + self.assertRaisesRegex(TypeError, + "can't take second arg if first is a string", + complex, '1', 0) + self.assertRaisesRegex(TypeError, "second argument must be a number, not 'dict'", - complex, 1, {1:2}) - # the following three are accepted by Python 2.6 - self.assertRaises(ValueError, complex, "1..1j") - self.assertRaises(ValueError, complex, "1.11.1j") - self.assertRaises(ValueError, complex, "1e1.1j") - - # check that complex accepts long unicode strings - self.assertEqual(type(complex("1"*500)), complex) - # check whitespace processing - self.assertEqual(complex('\N{EM SPACE}(\N{EN SPACE}1+1j ) '), 1+1j) - # Invalid unicode string - # See bpo-34087 - self.assertRaises(ValueError, complex, '\u3053\u3093\u306b\u3061\u306f') + complex, 0, {1:2}) + self.assertRaisesRegex(TypeError, + "second arg can't be a string", + complex, 0, '1') + + self.assertRaises(TypeError, complex, WithComplex(1.5)) + self.assertRaises(TypeError, complex, WithComplex(1)) + self.assertRaises(TypeError, complex, WithComplex(None)) + self.assertRaises(TypeError, complex, WithComplex(4.25+0j), object()) + self.assertRaises(TypeError, complex, WithComplex(1.5), object()) + self.assertRaises(TypeError, complex, WithComplex(1), object()) + self.assertRaises(TypeError, complex, WithComplex(None), object()) class EvilExc(Exception): pass @@ -481,33 +433,33 @@ def __complex__(self): self.assertRaises(EvilExc, complex, evilcomplex()) - class float2: - def __init__(self, value): - self.value = value - def __float__(self): - return self.value - - self.assertAlmostEqual(complex(float2(42.)), 42) - self.assertAlmostEqual(complex(real=float2(17.), imag=float2(23.)), 17+23j) - self.assertRaises(TypeError, complex, float2(None)) - - class MyIndex: - def __init__(self, value): - self.value = value - def __index__(self): - return self.value - - self.assertAlmostEqual(complex(MyIndex(42)), 42.0+0.0j) - self.assertAlmostEqual(complex(123, MyIndex(42)), 123.0+42.0j) - self.assertRaises(OverflowError, complex, MyIndex(2**2000)) - self.assertRaises(OverflowError, complex, 123, MyIndex(2**2000)) + check(complex(WithFloat(4.25)), 4.25, 0.0) + check(complex(WithFloat(4.25), 1.5), 4.25, 1.5) + check(complex(1.5, WithFloat(4.25)), 1.5, 4.25) + self.assertRaises(TypeError, complex, WithFloat(42)) + self.assertRaises(TypeError, complex, WithFloat(42), 1.5) + self.assertRaises(TypeError, complex, 1.5, WithFloat(42)) + self.assertRaises(TypeError, complex, WithFloat(None)) + self.assertRaises(TypeError, complex, WithFloat(None), 1.5) + self.assertRaises(TypeError, complex, 1.5, WithFloat(None)) + + check(complex(WithIndex(42)), 42.0, 0.0) + check(complex(WithIndex(42), 1.5), 42.0, 1.5) + check(complex(1.5, WithIndex(42)), 1.5, 42.0) + self.assertRaises(OverflowError, complex, WithIndex(2**2000)) + self.assertRaises(OverflowError, complex, WithIndex(2**2000), 1.5) + self.assertRaises(OverflowError, complex, 1.5, WithIndex(2**2000)) + self.assertRaises(TypeError, complex, WithIndex(None)) + self.assertRaises(TypeError, complex, WithIndex(None), 1.5) + self.assertRaises(TypeError, complex, 1.5, WithIndex(None)) class MyInt: def __int__(self): return 42 self.assertRaises(TypeError, complex, MyInt()) - self.assertRaises(TypeError, complex, 123, MyInt()) + self.assertRaises(TypeError, complex, MyInt(), 1.5) + self.assertRaises(TypeError, complex, 1.5, MyInt()) class complex0(complex): """Test usage of __complex__() when inheriting from 'complex'""" @@ -527,9 +479,9 @@ class complex2(complex): def __complex__(self): return None - self.assertEqual(complex(complex0(1j)), 42j) + check(complex(complex0(1j)), 0.0, 42.0) with self.assertWarns(DeprecationWarning): - self.assertEqual(complex(complex1(1j)), 2j) + check(complex(complex1(1j)), 0.0, 2.0) self.assertRaises(TypeError, complex, complex2(1j)) def test___complex__(self): @@ -537,36 +489,93 @@ def test___complex__(self): self.assertEqual(z.__complex__(), z) self.assertEqual(type(z.__complex__()), complex) - class complex_subclass(complex): - pass - - z = complex_subclass(3 + 4j) + z = ComplexSubclass(3 + 4j) self.assertEqual(z.__complex__(), 3 + 4j) self.assertEqual(type(z.__complex__()), complex) @support.requires_IEEE_754 def test_constructor_special_numbers(self): - class complex2(complex): - pass for x in 0.0, -0.0, INF, -INF, NAN: for y in 0.0, -0.0, INF, -INF, NAN: with self.subTest(x=x, y=y): z = complex(x, y) self.assertFloatsAreIdentical(z.real, x) self.assertFloatsAreIdentical(z.imag, y) - z = complex2(x, y) - self.assertIs(type(z), complex2) + z = ComplexSubclass(x, y) + self.assertIs(type(z), ComplexSubclass) self.assertFloatsAreIdentical(z.real, x) self.assertFloatsAreIdentical(z.imag, y) - z = complex(complex2(x, y)) + z = complex(ComplexSubclass(x, y)) self.assertIs(type(z), complex) self.assertFloatsAreIdentical(z.real, x) self.assertFloatsAreIdentical(z.imag, y) - z = complex2(complex(x, y)) - self.assertIs(type(z), complex2) + z = ComplexSubclass(complex(x, y)) + self.assertIs(type(z), ComplexSubclass) self.assertFloatsAreIdentical(z.real, x) self.assertFloatsAreIdentical(z.imag, y) + def test_constructor_from_string(self): + def check(z, x, y): + self.assertIs(type(z), complex) + self.assertFloatsAreIdentical(z.real, x) + self.assertFloatsAreIdentical(z.imag, y) + + check(complex("1"), 1.0, 0.0) + check(complex("1j"), 0.0, 1.0) + check(complex("-1"), -1.0, 0.0) + check(complex("+1"), 1.0, 0.0) + check(complex("1+2j"), 1.0, 2.0) + check(complex("(1+2j)"), 1.0, 2.0) + check(complex("(1.5+4.25j)"), 1.5, 4.25) + check(complex("4.25+1J"), 4.25, 1.0) + check(complex(" ( +4.25-6J )"), 4.25, -6.0) + check(complex(" ( +4.25-J )"), 4.25, -1.0) + check(complex(" ( +4.25+j )"), 4.25, 1.0) + check(complex("J"), 0.0, 1.0) + check(complex("( j )"), 0.0, 1.0) + check(complex("+J"), 0.0, 1.0) + check(complex("( -j)"), 0.0, -1.0) + check(complex('1-1j'), 1.0, -1.0) + check(complex('1J'), 0.0, 1.0) + + check(complex('1e-500'), 0.0, 0.0) + check(complex('-1e-500j'), 0.0, -0.0) + check(complex('1e-500+1e-500j'), 0.0, 0.0) + check(complex('-1e-500+1e-500j'), -0.0, 0.0) + check(complex('1e-500-1e-500j'), 0.0, -0.0) + check(complex('-1e-500-1e-500j'), -0.0, -0.0) + + # SF bug 543840: complex(string) accepts strings with \0 + # Fixed in 2.3. + self.assertRaises(ValueError, complex, '1+1j\0j') + self.assertRaises(ValueError, complex, "") + self.assertRaises(ValueError, complex, "\0") + self.assertRaises(ValueError, complex, "3\09") + self.assertRaises(ValueError, complex, "1+") + self.assertRaises(ValueError, complex, "1+1j+1j") + self.assertRaises(ValueError, complex, "--") + self.assertRaises(ValueError, complex, "(1+2j") + self.assertRaises(ValueError, complex, "1+2j)") + self.assertRaises(ValueError, complex, "1+(2j)") + self.assertRaises(ValueError, complex, "(1+2j)123") + self.assertRaises(ValueError, complex, "x") + self.assertRaises(ValueError, complex, "1j+2") + self.assertRaises(ValueError, complex, "1e1ej") + self.assertRaises(ValueError, complex, "1e++1ej") + self.assertRaises(ValueError, complex, ")1+2j(") + # the following three are accepted by Python 2.6 + self.assertRaises(ValueError, complex, "1..1j") + self.assertRaises(ValueError, complex, "1.11.1j") + self.assertRaises(ValueError, complex, "1e1.1j") + + # check that complex accepts long unicode strings + self.assertIs(type(complex("1"*500)), complex) + # check whitespace processing + self.assertEqual(complex('\N{EM SPACE}(\N{EN SPACE}1+1j ) '), 1+1j) + # Invalid unicode string + # See bpo-34087 + self.assertRaises(ValueError, complex, '\u3053\u3093\u306b\u3061\u306f') + def test_constructor_negative_nans_from_string(self): self.assertEqual(copysign(1., complex("-nan").real), -1.) self.assertEqual(copysign(1., complex("-nanj").imag), -1.) @@ -588,7 +597,7 @@ def test_underscores(self): def test_hash(self): for x in range(-30, 30): self.assertEqual(hash(x), hash(complex(x, 0))) - x /= 3.0 # now check against floating point + x /= 3.0 # now check against floating-point self.assertEqual(hash(x), hash(complex(x, 0.))) self.assertNotEqual(hash(2000005 - 1j), -1) @@ -645,9 +654,6 @@ def test(v, expected, test_fn=self.assertEqual): test(complex(-0., -0.), "(-0-0j)") def test_pos(self): - class ComplexSubclass(complex): - pass - self.assertEqual(+(1+6j), 1+6j) self.assertEqual(+ComplexSubclass(1, 6), 1+6j) self.assertIs(type(+ComplexSubclass(1, 6)), complex) @@ -667,8 +673,8 @@ def test_getnewargs(self): def test_plus_minus_0j(self): # test that -0j and 0j literals are not identified z1, z2 = 0j, -0j - self.assertEqual(atan2(z1.imag, -1.), atan2(0., -1.)) - self.assertEqual(atan2(z2.imag, -1.), atan2(-0., -1.)) + self.assertFloatsAreIdentical(z1.imag, 0.0) + self.assertFloatsAreIdentical(z2.imag, -0.0) @support.requires_IEEE_754 def test_negated_imaginary_literal(self): @@ -703,8 +709,7 @@ def test_repr_roundtrip(self): for y in vals: z = complex(x, y) roundtrip = complex(repr(z)) - self.assertFloatsAreIdentical(z.real, roundtrip.real) - self.assertFloatsAreIdentical(z.imag, roundtrip.imag) + self.assertComplexesAreIdentical(z, roundtrip) # if we predefine some constants, then eval(repr(z)) should # also work, except that it might change the sign of zeros diff --git a/Lib/test/test_concurrent_futures/test_init.py b/Lib/test/test_concurrent_futures/test_init.py index a36f592b79b..df640929309 100644 --- a/Lib/test/test_concurrent_futures/test_init.py +++ b/Lib/test/test_concurrent_futures/test_init.py @@ -139,6 +139,7 @@ def _test(self, test_class): def test_spawn(self): self._test(ProcessPoolSpawnFailingInitializerTest) + @support.skip_if_sanitizer("TSAN doesn't support threads after fork", thread=True) def test_forkserver(self): self._test(ProcessPoolForkserverFailingInitializerTest) diff --git a/Lib/test/test_contextlib.py b/Lib/test/test_contextlib.py index 36c3abca80f..cf651959803 100644 --- a/Lib/test/test_contextlib.py +++ b/Lib/test/test_contextlib.py @@ -444,12 +444,10 @@ class FileContextTestCase(unittest.TestCase): def testWithOpen(self): tfn = tempfile.mktemp() try: - f = None with open(tfn, "w", encoding="utf-8") as f: self.assertFalse(f.closed) f.write("Booh\n") self.assertTrue(f.closed) - f = None with self.assertRaises(ZeroDivisionError): with open(tfn, "r", encoding="utf-8") as f: self.assertFalse(f.closed) diff --git a/Lib/test/test_copy.py b/Lib/test/test_copy.py index 89102373759..3dec64cc9a2 100644 --- a/Lib/test/test_copy.py +++ b/Lib/test/test_copy.py @@ -972,6 +972,10 @@ class C: copy.replace(c, x=1, error=2) +class MiscTestCase(unittest.TestCase): + def test__all__(self): + support.check__all__(self, copy, not_exported={"dispatch_table", "error"}) + def global_foo(x, y): return x+y diff --git a/Lib/test/test_cprofile.py b/Lib/test/test_cprofile.py index 27e8a767903..b2595eccc82 100644 --- a/Lib/test/test_cprofile.py +++ b/Lib/test/test_cprofile.py @@ -30,6 +30,43 @@ def test_bad_counter_during_dealloc(self): self.assertEqual(cm.unraisable.exc_type, TypeError) + def test_evil_external_timer(self): + # gh-120289 + # Disabling profiler in external timer should not crash + import _lsprof + class EvilTimer(): + def __init__(self, disable_count): + self.count = 0 + self.disable_count = disable_count + + def __call__(self): + self.count += 1 + if self.count == self.disable_count: + profiler_with_evil_timer.disable() + return self.count + + # this will trigger external timer to disable profiler at + # call event - in initContext in _lsprof.c + with support.catch_unraisable_exception() as cm: + profiler_with_evil_timer = _lsprof.Profiler(EvilTimer(1)) + profiler_with_evil_timer.enable() + # Make a call to trigger timer + (lambda: None)() + profiler_with_evil_timer.disable() + profiler_with_evil_timer.clear() + self.assertEqual(cm.unraisable.exc_type, RuntimeError) + + # this will trigger external timer to disable profiler at + # return event - in Stop in _lsprof.c + with support.catch_unraisable_exception() as cm: + profiler_with_evil_timer = _lsprof.Profiler(EvilTimer(2)) + profiler_with_evil_timer.enable() + # Make a call to trigger timer + (lambda: None)() + profiler_with_evil_timer.disable() + profiler_with_evil_timer.clear() + self.assertEqual(cm.unraisable.exc_type, RuntimeError) + def test_profile_enable_disable(self): prof = self.profilerclass() # Make sure we clean ourselves up if the test fails for some reason. diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py index d74ab7e016f..ce5c03659f1 100644 --- a/Lib/test/test_csv.py +++ b/Lib/test/test_csv.py @@ -454,6 +454,10 @@ def test_read_quoting(self): quoting=csv.QUOTE_STRINGS) self._read_test(['1,@,3,@,5'], [['1', ',3,', '5']], quotechar='@') self._read_test(['1,\0,3,\0,5'], [['1', ',3,', '5']], quotechar='\0') + self._read_test(['1\\.5,\\.5,.5'], [[1.5, 0.5, 0.5]], + quoting=csv.QUOTE_NONNUMERIC, escapechar='\\') + self._read_test(['1\\.5,\\.5,"\\.5"'], [[1.5, 0.5, ".5"]], + quoting=csv.QUOTE_STRINGS, escapechar='\\') def test_read_skipinitialspace(self): self._read_test(['no space, space, spaces,\ttab'], @@ -1099,6 +1103,12 @@ class mydialect(csv.Dialect): mydialect.quoting = None self.assertRaises(csv.Error, mydialect) + mydialect.quoting = 42 + with self.assertRaises(csv.Error) as cm: + mydialect() + self.assertEqual(str(cm.exception), + 'bad "quoting" value') + mydialect.doublequote = True mydialect.quoting = csv.QUOTE_ALL mydialect.quotechar = '"' diff --git a/Lib/test/test_ctypes/test_arrays.py b/Lib/test/test_ctypes/test_arrays.py index 3568cf97f40..c80fdff5de6 100644 --- a/Lib/test/test_ctypes/test_arrays.py +++ b/Lib/test/test_ctypes/test_arrays.py @@ -1,8 +1,7 @@ import ctypes import sys import unittest -import warnings -from ctypes import (Structure, Array, sizeof, addressof, +from ctypes import (Structure, Array, ARRAY, sizeof, addressof, create_string_buffer, create_unicode_buffer, c_char, c_wchar, c_byte, c_ubyte, c_short, c_ushort, c_int, c_uint, c_long, c_ulonglong, c_float, c_double, c_longdouble) @@ -17,13 +16,6 @@ c_long, c_ulonglong, c_float, c_double, c_longdouble -def ARRAY(*args): - # ignore DeprecationWarning in tests - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - return ctypes.ARRAY(*args) - - class ArrayTestCase(unittest.TestCase): def test_inheritance_hierarchy(self): self.assertEqual(Array.mro(), [Array, _CData, object]) @@ -253,7 +245,7 @@ def test_empty_element_struct(self): class EmptyStruct(Structure): _fields_ = [] - obj = (EmptyStruct * 2)() # bpo37188: Floating point exception + obj = (EmptyStruct * 2)() # bpo37188: Floating-point exception self.assertEqual(sizeof(obj), 0) def test_empty_element_array(self): @@ -261,7 +253,7 @@ class EmptyArray(Array): _type_ = c_int _length_ = 0 - obj = (EmptyArray * 2)() # bpo37188: Floating point exception + obj = (EmptyArray * 2)() # bpo37188: Floating-point exception self.assertEqual(sizeof(obj), 0) def test_bpo36504_signed_int_overflow(self): @@ -275,10 +267,6 @@ def test_bpo36504_signed_int_overflow(self): def test_large_array(self, size): c_char * size - def test_deprecation(self): - with self.assertWarns(DeprecationWarning): - CharArray = ctypes.ARRAY(c_char, 3) - if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_dataclasses/__init__.py b/Lib/test/test_dataclasses/__init__.py index ffb8bbe75c5..54e48113237 100644 --- a/Lib/test/test_dataclasses/__init__.py +++ b/Lib/test/test_dataclasses/__init__.py @@ -17,6 +17,7 @@ from typing import ClassVar, Any, List, Union, Tuple, Dict, Generic, TypeVar, Optional, Protocol, DefaultDict from typing import get_type_hints from collections import deque, OrderedDict, namedtuple, defaultdict +from copy import deepcopy from functools import total_ordering import typing # Needed for the string "typing.ClassVar[int]" to work as an annotation. @@ -3175,6 +3176,48 @@ class C: with self.assertRaisesRegex(TypeError, 'unhashable type'): hash(C({})) + def test_frozen_deepcopy_without_slots(self): + # see: https://github.com/python/cpython/issues/89683 + @dataclass(frozen=True, slots=False) + class C: + s: str + + c = C('hello') + self.assertEqual(deepcopy(c), c) + + def test_frozen_deepcopy_with_slots(self): + # see: https://github.com/python/cpython/issues/89683 + with self.subTest('generated __slots__'): + @dataclass(frozen=True, slots=True) + class C: + s: str + + c = C('hello') + self.assertEqual(deepcopy(c), c) + + with self.subTest('user-defined __slots__ and no __{get,set}state__'): + @dataclass(frozen=True, slots=False) + class C: + __slots__ = ('s',) + s: str + + # with user-defined slots, __getstate__ and __setstate__ are not + # automatically added, hence the error + err = r"^cannot\ assign\ to\ field\ 's'$" + self.assertRaisesRegex(FrozenInstanceError, err, deepcopy, C('')) + + with self.subTest('user-defined __slots__ and __{get,set}state__'): + @dataclass(frozen=True, slots=False) + class C: + __slots__ = ('s',) + __getstate__ = dataclasses._dataclass_getstate + __setstate__ = dataclasses._dataclass_setstate + + s: str + + c = C('hello') + self.assertEqual(deepcopy(c), c) + class TestSlots(unittest.TestCase): def test_simple(self): @@ -3664,6 +3707,38 @@ class A(WithDictSlot): ... self.assertEqual(A().__dict__, {}) A() + @support.cpython_only + def test_slots_with_wrong_init_subclass(self): + # TODO: This test is for a kinda-buggy behavior. + # Ideally, it should be fixed and `__init_subclass__` + # should be fully supported in the future versions. + # See https://github.com/python/cpython/issues/91126 + class WrongSuper: + def __init_subclass__(cls, arg): + pass + + with self.assertRaisesRegex( + TypeError, + "missing 1 required positional argument: 'arg'", + ): + @dataclass(slots=True) + class WithWrongSuper(WrongSuper, arg=1): + pass + + class CorrectSuper: + args = [] + def __init_subclass__(cls, arg="default"): + cls.args.append(arg) + + @dataclass(slots=True) + class WithCorrectSuper(CorrectSuper): + pass + + # __init_subclass__ is called twice: once for `WithCorrectSuper` + # and once for `WithCorrectSuper__slots__` new class + # that we create internally. + self.assertEqual(CorrectSuper.args, ["default", "default"]) + class TestDescriptors(unittest.TestCase): def test_set_name(self): diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py index 46755107de0..c591fd54430 100644 --- a/Lib/test/test_decimal.py +++ b/Lib/test/test_decimal.py @@ -2071,7 +2071,9 @@ def test_tonum_methods(self): #to quantize, which is already extensively tested test_triples = [ ('123.456', -4, '0E+4'), + ('-123.456', -4, '-0E+4'), ('123.456', -3, '0E+3'), + ('-123.456', -3, '-0E+3'), ('123.456', -2, '1E+2'), ('123.456', -1, '1.2E+2'), ('123.456', 0, '123'), @@ -4379,7 +4381,8 @@ def test_module_attributes(self): self.assertEqual(C.__version__, P.__version__) - self.assertEqual(dir(C), dir(P)) + self.assertLessEqual(set(dir(C)), set(dir(P))) + self.assertEqual([n for n in dir(C) if n[:2] != '__'], sorted(P.__all__)) def test_context_attributes(self): diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py index e5dba7cdc57..4030716efb5 100644 --- a/Lib/test/test_dict.py +++ b/Lib/test/test_dict.py @@ -1476,6 +1476,24 @@ def test_dict_items_result_gc_reversed(self): gc.collect() self.assertTrue(gc.is_tracked(next(it))) + def test_store_evilattr(self): + class EvilAttr: + def __init__(self, d): + self.d = d + + def __del__(self): + if 'attr' in self.d: + del self.d['attr'] + gc.collect() + + class Obj: + pass + + obj = Obj() + obj.__dict__ = {} + for _ in range(10): + obj.attr = EvilAttr(obj.__dict__) + def test_str_nonstr(self): # cpython uses a different lookup function if the dict only contains # `str` keys. Make sure the unoptimized path is used when a non-`str` diff --git a/Lib/test/test_dictcomps.py b/Lib/test/test_dictcomps.py index 472e3dfa0d8..26b56dac503 100644 --- a/Lib/test/test_dictcomps.py +++ b/Lib/test/test_dictcomps.py @@ -1,5 +1,8 @@ +import traceback import unittest +from test.support import BrokenIter + # For scope testing. g = "Global variable" @@ -127,6 +130,41 @@ def test_star_expression(self): self.assertEqual({i: i*i for i in [*range(4)]}, expected) self.assertEqual({i: i*i for i in (*range(4),)}, expected) + def test_exception_locations(self): + # The location of an exception raised from __init__ or + # __next__ should should be the iterator expression + def init_raises(): + try: + {x:x for x in BrokenIter(init_raises=True)} + except Exception as e: + return e + + def next_raises(): + try: + {x:x for x in BrokenIter(next_raises=True)} + except Exception as e: + return e + + def iter_raises(): + try: + {x:x for x in BrokenIter(iter_raises=True)} + except Exception as e: + return e + + for func, expected in [(init_raises, "BrokenIter(init_raises=True)"), + (next_raises, "BrokenIter(next_raises=True)"), + (iter_raises, "BrokenIter(iter_raises=True)"), + ]: + with self.subTest(func): + exc = func() + f = traceback.extract_tb(exc.__traceback__)[0] + indent = 16 + co = func.__code__ + self.assertEqual(f.lineno, co.co_firstlineno + 2) + self.assertEqual(f.end_lineno, co.co_firstlineno + 2) + self.assertEqual(f.line[f.colno - indent : f.end_colno - indent], + expected) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_email/test_generator.py b/Lib/test/test_email/test_generator.py index bfff1051262..c75a842c335 100644 --- a/Lib/test/test_email/test_generator.py +++ b/Lib/test/test_email/test_generator.py @@ -6,6 +6,7 @@ from email.generator import Generator, BytesGenerator from email.headerregistry import Address from email import policy +import email.errors from test.test_email import TestEmailBase, parameterize @@ -249,6 +250,44 @@ def test_rfc2231_wrapping_switches_to_default_len_if_too_narrow(self): g.flatten(msg) self.assertEqual(s.getvalue(), self.typ(expected)) + def test_keep_encoded_newlines(self): + msg = self.msgmaker(self.typ(textwrap.dedent("""\ + To: nobody + Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com + + None + """))) + expected = textwrap.dedent("""\ + To: nobody + Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com + + None + """) + s = self.ioclass() + g = self.genclass(s, policy=self.policy.clone(max_line_length=80)) + g.flatten(msg) + self.assertEqual(s.getvalue(), self.typ(expected)) + + def test_keep_long_encoded_newlines(self): + msg = self.msgmaker(self.typ(textwrap.dedent("""\ + To: nobody + Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com + + None + """))) + expected = textwrap.dedent("""\ + To: nobody + Subject: Bad subject + =?utf-8?q?=0A?=Bcc: + injection@example.com + + None + """) + s = self.ioclass() + g = self.genclass(s, policy=self.policy.clone(max_line_length=30)) + g.flatten(msg) + self.assertEqual(s.getvalue(), self.typ(expected)) + class TestGenerator(TestGeneratorBase, TestEmailBase): @@ -273,6 +312,29 @@ def test_flatten_unicode_linesep(self): g.flatten(msg) self.assertEqual(s.getvalue(), self.typ(expected)) + def test_verify_generated_headers(self): + """gh-121650: by default the generator prevents header injection""" + class LiteralHeader(str): + name = 'Header' + def fold(self, **kwargs): + return self + + for text in ( + 'Value\r\nBad Injection\r\n', + 'NoNewLine' + ): + with self.subTest(text=text): + message = message_from_string( + "Header: Value\r\n\r\nBody", + policy=self.policy, + ) + + del message['Header'] + message['Header'] = LiteralHeader(text) + + with self.assertRaises(email.errors.HeaderWriteError): + message.as_string() + class TestBytesGenerator(TestGeneratorBase, TestEmailBase): @@ -294,6 +356,19 @@ def test_defaults_handle_spaces_between_encoded_words_when_folded(self): g.flatten(msg) self.assertEqual(s.getvalue(), expected) + def test_defaults_handle_spaces_when_encoded_words_is_folded_in_middle(self): + source = ('A very long long long long long long long long long long long long ' + 'long long long long long long long long long long long súmmäry') + expected = ('Subject: A very long long long long long long long long long long long long\n' + ' long long long long long long long long long long long =?utf-8?q?s=C3=BAmm?=\n' + ' =?utf-8?q?=C3=A4ry?=\n\n').encode('ascii') + msg = EmailMessage() + msg['Subject'] = source + s = io.BytesIO() + g = BytesGenerator(s) + g.flatten(msg) + self.assertEqual(s.getvalue(), expected) + def test_defaults_handle_spaces_at_start_of_subject(self): source = " Уведомление" expected = b"Subject: =?utf-8?b?0KPQstC10LTQvtC80LvQtdC90LjQtQ==?=\n\n" diff --git a/Lib/test/test_email/test_policy.py b/Lib/test/test_email/test_policy.py index c6b9c80efe1..baa35fd68e4 100644 --- a/Lib/test/test_email/test_policy.py +++ b/Lib/test/test_email/test_policy.py @@ -26,6 +26,7 @@ class PolicyAPITests(unittest.TestCase): 'raise_on_defect': False, 'mangle_from_': True, 'message_factory': None, + 'verify_generated_headers': True, } # These default values are the ones set on email.policy.default. # If any of these defaults change, the docs must be updated. @@ -294,6 +295,31 @@ def test_short_maxlen_error(self): with self.assertRaises(email.errors.HeaderParseError): policy.fold("Subject", subject) + def test_verify_generated_headers(self): + """Turning protection off allows header injection""" + policy = email.policy.default.clone(verify_generated_headers=False) + for text in ( + 'Header: Value\r\nBad: Injection\r\n', + 'Header: NoNewLine' + ): + with self.subTest(text=text): + message = email.message_from_string( + "Header: Value\r\n\r\nBody", + policy=policy, + ) + class LiteralHeader(str): + name = 'Header' + def fold(self, **kwargs): + return self + + del message['Header'] + message['Header'] = LiteralHeader(text) + + self.assertEqual( + message.as_string(), + f"{text}\nBody", + ) + # XXX: Need subclassing tests. # For adding subclassed objects, make sure the usual rules apply (subclass # wins), but that the order still works (right overrides left). diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index 634513ec7a5..6026e5fb714 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -1,10 +1,11 @@ # Run the tests in Programs/_testembed.c (tests for the CPython embedding APIs) from test import support -from test.support import import_helper, os_helper, MS_WINDOWS +from test.support import import_helper, os_helper, threading_helper, MS_WINDOWS import unittest from collections import namedtuple import contextlib +import io import json import os import os.path @@ -48,6 +49,8 @@ INIT_LOOPS = 4 MAX_HASH_SEED = 4294967295 +ABI_THREAD = 't' if sysconfig.get_config_var('Py_GIL_DISABLED') else '' + # If we are running from a build dir, but the stdlib has been installed, # some tests need to expect different results. @@ -413,6 +416,73 @@ def test_datetime_reset_strptime(self): out, err = self.run_embedded_interpreter("test_repeated_init_exec", code) self.assertEqual(out, '20000101\n' * INIT_LOOPS) + def test_static_types_inherited_slots(self): + script = textwrap.dedent(""" + import test.support + + results = {} + def add(cls, slot, own): + value = getattr(cls, slot) + try: + subresults = results[cls.__name__] + except KeyError: + subresults = results[cls.__name__] = {} + subresults[slot] = [repr(value), own] + + for cls in test.support.iter_builtin_types(): + for slot, own in test.support.iter_slot_wrappers(cls): + add(cls, slot, own) + """) + + ns = {} + exec(script, ns, ns) + all_expected = ns['results'] + del ns + + script += textwrap.dedent(""" + import json + import sys + text = json.dumps(results) + print(text, file=sys.stderr) + """) + out, err = self.run_embedded_interpreter( + "test_repeated_init_exec", script, script) + results = err.split('--- Loop #')[1:] + results = [res.rpartition(' ---\n')[-1] for res in results] + + self.maxDiff = None + for i, text in enumerate(results, start=1): + result = json.loads(text) + for classname, expected in all_expected.items(): + with self.subTest(loop=i, cls=classname): + slots = result.pop(classname) + self.assertEqual(slots, expected) + self.assertEqual(result, {}) + self.assertEqual(out, '') + + def test_getargs_reset_static_parser(self): + # Test _PyArg_Parser initializations via _PyArg_UnpackKeywords() + # https://github.com/python/cpython/issues/122334 + code = textwrap.dedent(""" + try: + import _ssl + except ModuleNotFoundError: + _ssl = None + if _ssl is not None: + _ssl.txt2obj(txt='1.3') + print('1') + + import _queue + _queue.SimpleQueue().put_nowait(item=None) + print('2') + + import _zoneinfo + _zoneinfo.ZoneInfo.clear_cache(only_keys=['Foo/Bar']) + print('3') + """) + out, err = self.run_embedded_interpreter("test_repeated_init_exec", code) + self.assertEqual(out, '1\n2\n3\n' * INIT_LOOPS) + @unittest.skipIf(_testinternalcapi is None, "requires _testinternalcapi") class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase): @@ -1285,11 +1355,11 @@ def module_search_paths(self, prefix=None, exec_prefix=None): ver = sys.version_info return [ os.path.join(prefix, sys.platlibdir, - f'python{ver.major}{ver.minor}.zip'), + f'python{ver.major}{ver.minor}{ABI_THREAD}.zip'), os.path.join(prefix, sys.platlibdir, - f'python{ver.major}.{ver.minor}'), + f'python{ver.major}.{ver.minor}{ABI_THREAD}'), os.path.join(exec_prefix, sys.platlibdir, - f'python{ver.major}.{ver.minor}', 'lib-dynload'), + f'python{ver.major}.{ver.minor}{ABI_THREAD}', 'lib-dynload'), ] @contextlib.contextmanager @@ -1343,7 +1413,7 @@ def test_init_setpythonhome(self): expected_paths = [paths[0], os.path.join(home, 'DLLs'), stdlib] else: version = f'{sys.version_info.major}.{sys.version_info.minor}' - stdlib = os.path.join(home, sys.platlibdir, f'python{version}') + stdlib = os.path.join(home, sys.platlibdir, f'python{version}{ABI_THREAD}') expected_paths = self.module_search_paths(prefix=home, exec_prefix=home) config = { @@ -1384,7 +1454,7 @@ def test_init_is_python_build_with_home(self): expected_paths = [paths[0], os.path.join(home, 'DLLs'), stdlib] else: version = f'{sys.version_info.major}.{sys.version_info.minor}' - stdlib = os.path.join(home, sys.platlibdir, f'python{version}') + stdlib = os.path.join(home, sys.platlibdir, f'python{version}{ABI_THREAD}') expected_paths = self.module_search_paths(prefix=home, exec_prefix=home) config = { @@ -1515,7 +1585,7 @@ def test_init_pyvenv_cfg(self): if not MS_WINDOWS: lib_dynload = os.path.join(pyvenv_home, sys.platlibdir, - f'python{ver.major}.{ver.minor}', + f'python{ver.major}.{ver.minor}{ABI_THREAD}', 'lib-dynload') os.makedirs(lib_dynload) else: @@ -1722,6 +1792,13 @@ def test_init_main_interpreter_settings(self): self.assertEqual(out, expected) + @threading_helper.requires_working_threading() + def test_init_in_background_thread(self): + # gh-123022: Check that running Py_Initialize() in a background + # thread doesn't crash. + out, err = self.run_embedded_interpreter("test_init_in_background_thread") + self.assertEqual(err, "") + class SetConfigTests(unittest.TestCase): def test_set_config(self): diff --git a/Lib/test/test_faulthandler.py b/Lib/test/test_faulthandler.py index 61ec8fe3151..60815be96e1 100644 --- a/Lib/test/test_faulthandler.py +++ b/Lib/test/test_faulthandler.py @@ -236,7 +236,7 @@ def test_sigfpe(self): faulthandler._sigfpe() """, 3, - 'Floating point exception') + 'Floating-point exception') @unittest.skipIf(_testcapi is None, 'need _testcapi') @unittest.skipUnless(hasattr(signal, 'SIGBUS'), 'need signal.SIGBUS') diff --git a/Lib/test/test_filecmp.py b/Lib/test/test_filecmp.py index b5df7167826..2c83667b22f 100644 --- a/Lib/test/test_filecmp.py +++ b/Lib/test/test_filecmp.py @@ -1,5 +1,6 @@ import filecmp import os +import re import shutil import tempfile import unittest @@ -155,6 +156,39 @@ def test_cmpfiles(self): (['file'], ['file2'], []), "Comparing mismatched directories fails") + def test_cmpfiles_invalid_names(self): + # See https://github.com/python/cpython/issues/122400. + for file, desc in [ + ('\x00', 'NUL bytes filename'), + (__file__ + '\x00', 'filename with embedded NUL bytes'), + ("\uD834\uDD1E.py", 'surrogate codes (MUSICAL SYMBOL G CLEF)'), + ('a' * 1_000_000, 'very long filename'), + ]: + for other_dir in [self.dir, self.dir_same, self.dir_diff]: + with self.subTest(f'cmpfiles: {desc}', other_dir=other_dir): + res = filecmp.cmpfiles(self.dir, other_dir, [file]) + self.assertTupleEqual(res, ([], [], [file])) + + def test_dircmp_invalid_names(self): + for bad_dir, desc in [ + ('\x00', 'NUL bytes dirname'), + (f'Top{os.sep}Mid\x00', 'dirname with embedded NUL bytes'), + ("\uD834\uDD1E", 'surrogate codes (MUSICAL SYMBOL G CLEF)'), + ('a' * 1_000_000, 'very long dirname'), + ]: + d1 = filecmp.dircmp(self.dir, bad_dir) + d2 = filecmp.dircmp(bad_dir, self.dir) + for target in [ + # attributes where os.listdir() raises OSError or ValueError + 'left_list', 'right_list', + 'left_only', 'right_only', 'common', + ]: + with self.subTest(f'dircmp(ok, bad): {desc}', target=target): + with self.assertRaises((OSError, ValueError)): + getattr(d1, target) + with self.subTest(f'dircmp(bad, ok): {desc}', target=target): + with self.assertRaises((OSError, ValueError)): + getattr(d2, target) def _assert_lists(self, actual, expected): """Assert that two lists are equal, up to ordering.""" @@ -277,6 +311,17 @@ def test_dircmp_shallow_same_file(self): ] self._assert_report(d.report, expected_report) + def test_dircmp_shallow_is_keyword_only(self): + with self.assertRaisesRegex( + TypeError, + re.escape("dircmp.__init__() takes from 3 to 5 positional arguments but 6 were given"), + ): + filecmp.dircmp(self.dir, self.dir_same, None, None, True) + self.assertIsInstance( + filecmp.dircmp(self.dir, self.dir_same, None, None, shallow=True), + filecmp.dircmp, + ) + def test_dircmp_subdirs_type(self): """Check that dircmp.subdirs respects subclassing.""" class MyDirCmp(filecmp.dircmp): diff --git a/Lib/test/test_float.py b/Lib/test/test_float.py index 5bd640617d6..65b9cb03e61 100644 --- a/Lib/test/test_float.py +++ b/Lib/test/test_float.py @@ -8,6 +8,7 @@ import unittest from test import support +from test.support.testcase import FloatsAreIdenticalMixin from test.test_grammar import (VALID_UNDERSCORE_LITERALS, INVALID_UNDERSCORE_LITERALS) from math import isinf, isnan, copysign, ldexp @@ -829,7 +830,7 @@ def test_short_repr(self): self.assertEqual(repr(float(negs)), str(float(negs))) @support.requires_IEEE_754 -class RoundTestCase(unittest.TestCase): +class RoundTestCase(unittest.TestCase, FloatsAreIdenticalMixin): def test_inf_nan(self): self.assertRaises(OverflowError, round, INF) @@ -859,10 +860,10 @@ def test_large_n(self): def test_small_n(self): for n in [-308, -309, -400, 1-2**31, -2**31, -2**31-1, -2**100]: - self.assertEqual(round(123.456, n), 0.0) - self.assertEqual(round(-123.456, n), -0.0) - self.assertEqual(round(1e300, n), 0.0) - self.assertEqual(round(1e-320, n), 0.0) + self.assertFloatsAreIdentical(round(123.456, n), 0.0) + self.assertFloatsAreIdentical(round(-123.456, n), -0.0) + self.assertFloatsAreIdentical(round(1e300, n), 0.0) + self.assertFloatsAreIdentical(round(1e-320, n), 0.0) def test_overflow(self): self.assertRaises(OverflowError, round, 1.6e308, -308) @@ -1053,21 +1054,14 @@ def test_nan_signs(self): fromHex = float.fromhex toHex = float.hex -class HexFloatTestCase(unittest.TestCase): +class HexFloatTestCase(FloatsAreIdenticalMixin, unittest.TestCase): MAX = fromHex('0x.fffffffffffff8p+1024') # max normal MIN = fromHex('0x1p-1022') # min normal TINY = fromHex('0x0.0000000000001p-1022') # min subnormal EPS = fromHex('0x0.0000000000001p0') # diff between 1.0 and next float up def identical(self, x, y): - # check that floats x and y are identical, or that both - # are NaNs - if isnan(x) or isnan(y): - if isnan(x) == isnan(y): - return - elif x == y and (x != 0.0 or copysign(1.0, x) == copysign(1.0, y)): - return - self.fail('%r not identical to %r' % (x, y)) + self.assertFloatsAreIdentical(x, y) def test_ends(self): self.identical(self.MIN, ldexp(1.0, -1022)) diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py index 8cef621bd71..8d83880a8c0 100644 --- a/Lib/test/test_format.py +++ b/Lib/test/test_format.py @@ -36,7 +36,7 @@ def testformat(formatstr, args, output=None, limit=None, overflowok=False): # when 'limit' is specified, it determines how many characters # must match exactly; lengths must always match. # ex: limit=5, '12345678' matches '12345___' - # (mainly for floating point format tests for which an exact match + # (mainly for floating-point format tests for which an exact match # can't be guaranteed due to rounding and representation errors) elif output and limit is not None and ( len(result)!=len(output) or result[:limit]!=output[:limit]): diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py index 3a714c64278..7e20c5e8e2b 100644 --- a/Lib/test/test_fractions.py +++ b/Lib/test/test_fractions.py @@ -922,21 +922,21 @@ def testMixedPower(self): self.assertTypedEquals(Root(4) ** F(2, 1), Root(4, F(1))) self.assertTypedEquals(Root(4) ** F(-2, 1), Root(4, -F(1))) self.assertTypedEquals(Root(4) ** F(-2, 3), Root(4, -3.0)) - self.assertEqual(F(3, 2) ** SymbolicReal('X'), SymbolicReal('1.5 ** X')) + self.assertEqual(F(3, 2) ** SymbolicReal('X'), SymbolicReal('3/2 ** X')) self.assertEqual(SymbolicReal('X') ** F(3, 2), SymbolicReal('X ** 1.5')) - self.assertTypedEquals(F(3, 2) ** Rect(2, 0), Polar(2.25, 0.0)) - self.assertTypedEquals(F(1, 1) ** Rect(2, 3), Polar(1.0, 0.0)) + self.assertTypedEquals(F(3, 2) ** Rect(2, 0), Polar(F(9,4), 0.0)) + self.assertTypedEquals(F(1, 1) ** Rect(2, 3), Polar(F(1), 0.0)) self.assertTypedEquals(F(3, 2) ** RectComplex(2, 0), Polar(2.25, 0.0)) self.assertTypedEquals(F(1, 1) ** RectComplex(2, 3), Polar(1.0, 0.0)) self.assertTypedEquals(Polar(4, 2) ** F(3, 2), Polar(8.0, 3.0)) self.assertTypedEquals(Polar(4, 2) ** F(3, 1), Polar(64, 6)) self.assertTypedEquals(Polar(4, 2) ** F(-3, 1), Polar(0.015625, -6)) self.assertTypedEquals(Polar(4, 2) ** F(-3, 2), Polar(0.125, -3.0)) - self.assertEqual(F(3, 2) ** SymbolicComplex('X'), SymbolicComplex('1.5 ** X')) + self.assertEqual(F(3, 2) ** SymbolicComplex('X'), SymbolicComplex('3/2 ** X')) self.assertEqual(SymbolicComplex('X') ** F(3, 2), SymbolicComplex('X ** 1.5')) - self.assertEqual(F(3, 2) ** Symbolic('X'), Symbolic('1.5 ** X')) + self.assertEqual(F(3, 2) ** Symbolic('X'), Symbolic('3/2 ** X')) self.assertEqual(Symbolic('X') ** F(3, 2), Symbolic('X ** 1.5')) def testMixingWithDecimal(self): diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py index aee8d374b22..eedd66a4f12 100644 --- a/Lib/test/test_frame.py +++ b/Lib/test/test_frame.py @@ -13,9 +13,11 @@ except ImportError: _testcapi = None +from collections.abc import Mapping from test import support from test.support import import_helper, threading_helper, Py_GIL_DISABLED from test.support.script_helper import assert_python_ok +from test import mapping_tests class ClearTest(unittest.TestCase): @@ -420,6 +422,164 @@ def test_unsupport(self): with self.assertRaises(TypeError): copy.deepcopy(d) + def test_is_mapping(self): + x = 1 + d = sys._getframe().f_locals + self.assertIsInstance(d, Mapping) + match d: + case {"x": value}: + self.assertEqual(value, 1) + kind = "mapping" + case _: + kind = "other" + self.assertEqual(kind, "mapping") + + def _x_stringlikes(self): + class StringSubclass(str): + pass + + class ImpostorX: + def __hash__(self): + return hash('x') + + def __eq__(self, other): + return other == 'x' + + return StringSubclass('x'), ImpostorX(), 'x' + + def test_proxy_key_stringlikes_overwrite(self): + def f(obj): + x = 1 + proxy = sys._getframe().f_locals + proxy[obj] = 2 + return ( + list(proxy.keys()), + dict(proxy), + proxy + ) + + for obj in self._x_stringlikes(): + with self.subTest(cls=type(obj).__name__): + + keys_snapshot, proxy_snapshot, proxy = f(obj) + expected_keys = ['obj', 'x', 'proxy'] + expected_dict = {'obj': 'x', 'x': 2, 'proxy': proxy} + self.assertEqual(proxy.keys(), expected_keys) + self.assertEqual(proxy, expected_dict) + self.assertEqual(keys_snapshot, expected_keys) + self.assertEqual(proxy_snapshot, expected_dict) + + def test_proxy_key_stringlikes_ftrst_write(self): + def f(obj): + proxy = sys._getframe().f_locals + proxy[obj] = 2 + self.assertEqual(x, 2) + x = 1 + + for obj in self._x_stringlikes(): + with self.subTest(cls=type(obj).__name__): + f(obj) + + def test_proxy_key_unhashables(self): + class StringSubclass(str): + __hash__ = None + + class ObjectSubclass: + __hash__ = None + + proxy = sys._getframe().f_locals + + for obj in StringSubclass('x'), ObjectSubclass(): + with self.subTest(cls=type(obj).__name__): + with self.assertRaises(TypeError): + proxy[obj] + with self.assertRaises(TypeError): + proxy[obj] = 0 + + def test_constructor(self): + FrameLocalsProxy = type([sys._getframe().f_locals + for x in range(1)][0]) + self.assertEqual(FrameLocalsProxy.__name__, 'FrameLocalsProxy') + + def make_frame(): + x = 1 + y = 2 + return sys._getframe() + + proxy = FrameLocalsProxy(make_frame()) + self.assertEqual(proxy, {'x': 1, 'y': 2}) + + # constructor expects 1 frame argument + with self.assertRaises(TypeError): + FrameLocalsProxy() # no arguments + with self.assertRaises(TypeError): + FrameLocalsProxy(123) # wrong type + with self.assertRaises(TypeError): + FrameLocalsProxy(frame=sys._getframe()) # no keyword arguments + + +class FrameLocalsProxyMappingTests(mapping_tests.TestHashMappingProtocol): + """Test that FrameLocalsProxy behaves like a Mapping (with exceptions)""" + + def _f(*args, **kwargs): + def _f(): + return sys._getframe().f_locals + return _f() + type2test = _f + + @unittest.skipIf(True, 'Locals proxies for different frames never compare as equal') + def test_constructor(self): + pass + + @unittest.skipIf(True, 'Unlike a mapping: del proxy[key] fails') + def test_write(self): + pass + + @unittest.skipIf(True, 'Unlike a mapping: no proxy.popitem') + def test_popitem(self): + pass + + @unittest.skipIf(True, 'Unlike a mapping: no proxy.pop') + def test_pop(self): + pass + + @unittest.skipIf(True, 'Unlike a mapping: no proxy.clear') + def test_clear(self): + pass + + @unittest.skipIf(True, 'Unlike a mapping: no proxy.fromkeys') + def test_fromkeys(self): + pass + + # no del + def test_getitem(self): + mapping_tests.BasicTestMappingProtocol.test_getitem(self) + d = self._full_mapping({'a': 1, 'b': 2}) + self.assertEqual(d['a'], 1) + self.assertEqual(d['b'], 2) + d['c'] = 3 + d['a'] = 4 + self.assertEqual(d['c'], 3) + self.assertEqual(d['a'], 4) + + @unittest.skipIf(True, 'Unlike a mapping: no proxy.update') + def test_update(self): + pass + + # proxy.copy returns a regular dict + def test_copy(self): + d = self._full_mapping({1:1, 2:2, 3:3}) + self.assertEqual(d.copy(), {1:1, 2:2, 3:3}) + d = self._empty_mapping() + self.assertEqual(d.copy(), d) + self.assertRaises(TypeError, d.copy, None) + + self.assertIsInstance(d.copy(), dict) + + @unittest.skipIf(True, 'Locals proxies for different frames never compare as equal') + def test_eq(self): + pass + class TestFrameCApi(unittest.TestCase): def test_basic(self): diff --git a/Lib/test/test_free_threading/test_list.py b/Lib/test/test_free_threading/test_list.py index 6ad806d67a8..a705161369e 100644 --- a/Lib/test/test_free_threading/test_list.py +++ b/Lib/test/test_free_threading/test_list.py @@ -6,6 +6,10 @@ from test.support import threading_helper +NTHREAD = 10 +OBJECT_COUNT = 5_000 + + class C: def __init__(self, v): self.v = v @@ -14,9 +18,7 @@ def __init__(self, v): @threading_helper.requires_working_threading() class TestList(TestCase): def test_racing_iter_append(self): - l = [] - OBJECT_COUNT = 10000 def writer_func(): for i in range(OBJECT_COUNT): @@ -32,7 +34,7 @@ def reader_func(): writer = Thread(target=writer_func) readers = [] - for x in range(30): + for x in range(NTHREAD): reader = Thread(target=reader_func) readers.append(reader) reader.start() @@ -43,37 +45,31 @@ def reader_func(): reader.join() def test_racing_iter_extend(self): - iters = [ - lambda x: [x], - ] - for iter_case in iters: - with self.subTest(iter=iter_case): - l = [] - OBJECT_COUNT = 10000 - - def writer_func(): - for i in range(OBJECT_COUNT): - l.extend(iter_case(C(i + OBJECT_COUNT))) - - def reader_func(): - while True: - count = len(l) - for i, x in enumerate(l): - self.assertEqual(x.v, i + OBJECT_COUNT) - if count == OBJECT_COUNT: - break - - writer = Thread(target=writer_func) - readers = [] - for x in range(30): - reader = Thread(target=reader_func) - readers.append(reader) - reader.start() - - writer.start() - writer.join() - for reader in readers: - reader.join() + l = [] + + def writer_func(): + for i in range(OBJECT_COUNT): + l.extend([C(i + OBJECT_COUNT)]) + + def reader_func(): + while True: + count = len(l) + for i, x in enumerate(l): + self.assertEqual(x.v, i + OBJECT_COUNT) + if count == OBJECT_COUNT: + break + + writer = Thread(target=writer_func) + readers = [] + for x in range(NTHREAD): + reader = Thread(target=reader_func) + readers.append(reader) + reader.start() + + writer.start() + writer.join() + for reader in readers: + reader.join() if __name__ == "__main__": diff --git a/Lib/test/test_free_threading/test_monitoring.py b/Lib/test/test_free_threading/test_monitoring.py index 3a3f1ba3b60..8ccb1d51102 100644 --- a/Lib/test/test_free_threading/test_monitoring.py +++ b/Lib/test/test_free_threading/test_monitoring.py @@ -14,7 +14,7 @@ class InstrumentationMultiThreadedMixin: thread_count = 10 - func_count = 200 + func_count = 50 fib = 12 def after_threads(self): @@ -36,7 +36,7 @@ def work(self, n, funcs): def start_work(self, n, funcs): # With the GIL builds we need to make sure that the hooks have # a chance to run as it's possible to run w/o releasing the GIL. - time.sleep(1) + time.sleep(0.1) self.work(n, funcs) def after_test(self): @@ -223,23 +223,26 @@ def trace(frame, event, arg): frame.f_trace_opcodes = True return trace + loops = 1_000 + sys.settrace(trace) try: l = _PyRLock() def f(): - for i in range(3000): + for i in range(loops): with l: pass t = Thread(target=f) t.start() - for i in range(3000): + for i in range(loops): with l: pass t.join() finally: sys.settrace(None) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_free_threading/test_tokenize.py b/Lib/test/test_free_threading/test_tokenize.py new file mode 100644 index 00000000000..860cfec4d71 --- /dev/null +++ b/Lib/test/test_free_threading/test_tokenize.py @@ -0,0 +1,57 @@ +import io +import time +import unittest +import tokenize +from functools import partial +from threading import Thread + +from test.support import threading_helper + + +@threading_helper.requires_working_threading() +class TestTokenize(unittest.TestCase): + def test_tokenizer_iter(self): + source = io.StringIO("for _ in a:\n pass") + it = tokenize._tokenize.TokenizerIter(source.readline, extra_tokens=False) + + tokens = [] + def next_token(it): + while True: + try: + r = next(it) + tokens.append(tokenize.TokenInfo._make(r)) + time.sleep(0.03) + except StopIteration: + return + + threads = [] + for _ in range(5): + threads.append(Thread(target=partial(next_token, it))) + + for thread in threads: + thread.start() + + for thread in threads: + thread.join() + + expected_tokens = [ + tokenize.TokenInfo(type=1, string='for', start=(1, 0), end=(1, 3), line='for _ in a:\n'), + tokenize.TokenInfo(type=1, string='_', start=(1, 4), end=(1, 5), line='for _ in a:\n'), + tokenize.TokenInfo(type=1, string='in', start=(1, 6), end=(1, 8), line='for _ in a:\n'), + tokenize.TokenInfo(type=1, string='a', start=(1, 9), end=(1, 10), line='for _ in a:\n'), + tokenize.TokenInfo(type=11, string=':', start=(1, 10), end=(1, 11), line='for _ in a:\n'), + tokenize.TokenInfo(type=4, string='', start=(1, 11), end=(1, 11), line='for _ in a:\n'), + tokenize.TokenInfo(type=5, string='', start=(2, -1), end=(2, -1), line=' pass'), + tokenize.TokenInfo(type=1, string='pass', start=(2, 2), end=(2, 6), line=' pass'), + tokenize.TokenInfo(type=4, string='', start=(2, 6), end=(2, 6), line=' pass'), + tokenize.TokenInfo(type=6, string='', start=(2, -1), end=(2, -1), line=' pass'), + tokenize.TokenInfo(type=0, string='', start=(2, -1), end=(2, -1), line=' pass'), + ] + + tokens.sort() + expected_tokens.sort() + self.assertListEqual(tokens, expected_tokens) + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_free_threading/test_type.py b/Lib/test/test_free_threading/test_type.py index 1e84b2db2d4..3e565cb7ea0 100644 --- a/Lib/test/test_free_threading/test_type.py +++ b/Lib/test/test_free_threading/test_type.py @@ -5,7 +5,8 @@ from threading import Thread from unittest import TestCase -from test.support import threading_helper, import_helper +from test import support +from test.support import threading_helper @@ -97,6 +98,8 @@ def reader_func(): self.run_one(writer_func, reader_func) def test___class___modification(self): + loops = 200 + class Foo: pass @@ -106,7 +109,7 @@ class Bar: thing = Foo() def work(): foo = thing - for _ in range(10000): + for _ in range(loops): foo.__class__ = Bar type(foo) foo.__class__ = Foo diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index 27c7f70cef3..cf60c5084d6 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -8,6 +8,7 @@ # Unicode identifiers in tests is allowed by PEP 3131. import ast +import datetime import dis import os import re @@ -896,6 +897,7 @@ def test_missing_expression(self): "f'{:2}'", "f'''{\t\f\r\n:a}'''", "f'{:'", + "F'{[F'{:'}[F'{:'}]]]", ]) self.assertAllRaise(SyntaxError, @@ -1602,6 +1604,12 @@ def f(a): self.assertEqual(f'{f(a=4)}', '3=') self.assertEqual(x, 4) + # Check debug expressions in format spec + y = 20 + self.assertEqual(f"{2:{y=}}", "yyyyyyyyyyyyyyyyyyy2") + self.assertEqual(f"{datetime.datetime.now():h1{y=}h2{y=}h3{y=}}", + 'h1y=20h2y=20h3y=20') + # Make sure __format__ is being called. class C: def __format__(self, s): @@ -1615,9 +1623,11 @@ def __repr__(self): self.assertEqual(f'{C()=: }', 'C()=FORMAT- ') self.assertEqual(f'{C()=:x}', 'C()=FORMAT-x') self.assertEqual(f'{C()=!r:*^20}', 'C()=********REPR********') + self.assertEqual(f"{C():{20=}}", 'FORMAT-20=20') self.assertRaises(SyntaxError, eval, "f'{C=]'") + # Make sure leading and following text works. x = 'foo' self.assertEqual(f'X{x=}Y', 'Xx='+repr(x)+'Y') diff --git a/Lib/test/test_funcattrs.py b/Lib/test/test_funcattrs.py index b3fc5ad42e7..d919d62613e 100644 --- a/Lib/test/test_funcattrs.py +++ b/Lib/test/test_funcattrs.py @@ -98,7 +98,12 @@ def test___globals__(self): (AttributeError, TypeError)) def test___builtins__(self): - self.assertIs(self.b.__builtins__, __builtins__) + if __name__ == "__main__": + builtins_dict = __builtins__.__dict__ + else: + builtins_dict = __builtins__ + + self.assertIs(self.b.__builtins__, builtins_dict) self.cannot_set_attr(self.b, '__builtins__', 2, (AttributeError, TypeError)) @@ -108,7 +113,7 @@ def func(s): return len(s) ns = {} func2 = type(func)(func.__code__, ns) self.assertIs(func2.__globals__, ns) - self.assertIs(func2.__builtins__, __builtins__) + self.assertIs(func2.__builtins__, builtins_dict) # Make sure that the function actually works. self.assertEqual(func2("abc"), 3) diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py index 906f9884d67..c285fbddb1b 100644 --- a/Lib/test/test_gc.py +++ b/Lib/test/test_gc.py @@ -392,11 +392,19 @@ def test_collect_generations(self): # each call to collect(N) x = [] gc.collect(0) - # x is now in the old gen + # x is now in gen 1 a, b, c = gc.get_count() - # We don't check a since its exact values depends on + gc.collect(1) + # x is now in gen 2 + d, e, f = gc.get_count() + gc.collect(2) + # x is now in gen 3 + g, h, i = gc.get_count() + # We don't check a, d, g since their exact values depends on # internal implementation details of the interpreter. self.assertEqual((b, c), (1, 0)) + self.assertEqual((e, f), (0, 1)) + self.assertEqual((h, i), (0, 0)) def test_trashcan(self): class Ouch: @@ -835,10 +843,42 @@ def test_get_objects_generations(self): self.assertTrue( any(l is element for element in gc.get_objects(generation=0)) ) - gc.collect() + self.assertFalse( + any(l is element for element in gc.get_objects(generation=1)) + ) + self.assertFalse( + any(l is element for element in gc.get_objects(generation=2)) + ) + gc.collect(generation=0) + self.assertFalse( + any(l is element for element in gc.get_objects(generation=0)) + ) + self.assertTrue( + any(l is element for element in gc.get_objects(generation=1)) + ) + self.assertFalse( + any(l is element for element in gc.get_objects(generation=2)) + ) + gc.collect(generation=1) + self.assertFalse( + any(l is element for element in gc.get_objects(generation=0)) + ) + self.assertFalse( + any(l is element for element in gc.get_objects(generation=1)) + ) + self.assertTrue( + any(l is element for element in gc.get_objects(generation=2)) + ) + gc.collect(generation=2) self.assertFalse( any(l is element for element in gc.get_objects(generation=0)) ) + self.assertFalse( + any(l is element for element in gc.get_objects(generation=1)) + ) + self.assertTrue( + any(l is element for element in gc.get_objects(generation=2)) + ) del l gc.collect() @@ -1048,70 +1088,22 @@ class Z: callback.assert_not_called() gc.enable() - -class IncrementalGCTests(unittest.TestCase): - - def setUp(self): - # Reenable GC as it is disabled module-wide - gc.enable() - - def tearDown(self): - gc.disable() - - @requires_gil_enabled("Free threading does not support incremental GC") - # Use small increments to emulate longer running process in a shorter time - @gc_threshold(200, 10) - def test_incremental_gc_handles_fast_cycle_creation(self): - - class LinkedList: - - #Use slots to reduce number of implicit objects - __slots__ = "next", "prev", "surprise" - - def __init__(self, next=None, prev=None): - self.next = next - if next is not None: - next.prev = self - self.prev = prev - if prev is not None: - prev.next = self - - def make_ll(depth): - head = LinkedList() - for i in range(depth): - head = LinkedList(head, head.prev) - return head - - head = make_ll(1000) - count = 1000 - - # There will be some objects we aren't counting, - # e.g. the gc stats dicts. This test checks - # that the counts don't grow, so we try to - # correct for the uncounted objects - # This is just an estimate. - CORRECTION = 20 - - enabled = gc.isenabled() - gc.enable() - olds = [] - for i in range(20_000): - newhead = make_ll(20) - count += 20 - newhead.surprise = head - olds.append(newhead) - if len(olds) == 20: - stats = gc.get_stats() - young = stats[0] - incremental = stats[1] - old = stats[2] - collected = young['collected'] + incremental['collected'] + old['collected'] - count += CORRECTION - live = count - collected - self.assertLess(live, 25000) - del olds[:] - if not enabled: - gc.disable() + @cpython_only + def test_get_referents_on_capsule(self): + # gh-124538: Calling gc.get_referents() on an untracked capsule must not crash. + import _datetime + import _socket + untracked_capsule = _datetime.datetime_CAPI + tracked_capsule = _socket.CAPI + + # For whoever sees this in the future: if this is failing + # after making datetime's capsule tracked, that's fine -- this isn't something + # users are relying on. Just find a different capsule that is untracked. + self.assertFalse(gc.is_tracked(untracked_capsule)) + self.assertTrue(gc.is_tracked(tracked_capsule)) + + self.assertEqual(len(gc.get_referents(untracked_capsule)), 0) + gc.get_referents(tracked_capsule) class GCCallbackTests(unittest.TestCase): diff --git a/Lib/test/test_gdb/__init__.py b/Lib/test/test_gdb/__init__.py index 99557739af6..0dd72178023 100644 --- a/Lib/test/test_gdb/__init__.py +++ b/Lib/test/test_gdb/__init__.py @@ -24,6 +24,9 @@ if support.check_cflags_pgo(): raise unittest.SkipTest("test_gdb is not reliable on PGO builds") +if support.check_bolt_optimized(): + raise unittest.SkipTest("test_gdb is not reliable on BOLT optimized builds") + def load_tests(*args): return support.load_package_tests(os.path.dirname(__file__), *args) diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py index 7b9dd36f854..0f34922727e 100644 --- a/Lib/test/test_generated_cases.py +++ b/Lib/test/test_generated_cases.py @@ -813,6 +813,56 @@ def test_deopt_and_exit(self): with self.assertRaises(Exception): self.run_cases_test(input, output) + def test_pop_on_error_peeks(self): + + input = """ + op(FIRST, (x, y -- a, b)) { + a = x; + b = y; + } + + op(SECOND, (a, b -- a, b)) { + } + + op(THIRD, (j, k --)) { + ERROR_IF(cond, error); + } + + macro(TEST) = FIRST + SECOND + THIRD; + """ + output = """ + TARGET(TEST) { + frame->instr_ptr = next_instr; + next_instr += 1; + INSTRUCTION_STATS(TEST); + PyObject *y; + PyObject *x; + PyObject *a; + PyObject *b; + PyObject *k; + PyObject *j; + // FIRST + y = stack_pointer[-1]; + x = stack_pointer[-2]; + { + a = x; + b = y; + } + // SECOND + { + } + // THIRD + k = b; + j = a; + { + if (cond) goto pop_2_error; + } + stack_pointer += -2; + DISPATCH(); + } + """ + self.run_cases_test(input, output) + class TestGeneratedAbstractCases(unittest.TestCase): def setUp(self) -> None: super().setUp() diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index a485a9b94c1..6203db8939a 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -2247,6 +2247,16 @@ def printsolution(self, x): ... SyntaxError: 'yield' outside function +>>> f=lambda: (yield from (1,2)), (yield from (3,4)) +Traceback (most recent call last): + ... +SyntaxError: 'yield from' outside function + +>>> yield from [1,2] +Traceback (most recent call last): + ... +SyntaxError: 'yield from' outside function + >>> def f(): x = yield = y Traceback (most recent call last): ... diff --git a/Lib/test/test_getpath.py b/Lib/test/test_getpath.py index 2f7aa69efc1..6c86c3d1c8c 100644 --- a/Lib/test/test_getpath.py +++ b/Lib/test/test_getpath.py @@ -844,6 +844,7 @@ def test_explicitly_set_stdlib_dir(self): PYDEBUGEXT="", VERSION_MAJOR=9, # fixed version number for ease VERSION_MINOR=8, # of testing + ABI_THREAD="", PYWINVER=None, EXE_SUFFIX=None, diff --git a/Lib/test/test_http_cookies.py b/Lib/test/test_http_cookies.py index 925c8697f60..8879902a6e2 100644 --- a/Lib/test/test_http_cookies.py +++ b/Lib/test/test_http_cookies.py @@ -5,6 +5,7 @@ import doctest from http import cookies import pickle +from test import support class CookieTests(unittest.TestCase): @@ -58,6 +59,43 @@ def test_basic(self): for k, v in sorted(case['dict'].items()): self.assertEqual(C[k].value, v) + def test_unquote(self): + cases = [ + (r'a="b=\""', 'b="'), + (r'a="b=\\"', 'b=\\'), + (r'a="b=\="', 'b=='), + (r'a="b=\n"', 'b=n'), + (r'a="b=\042"', 'b="'), + (r'a="b=\134"', 'b=\\'), + (r'a="b=\377"', 'b=\xff'), + (r'a="b=\400"', 'b=400'), + (r'a="b=\42"', 'b=42'), + (r'a="b=\\042"', 'b=\\042'), + (r'a="b=\\134"', 'b=\\134'), + (r'a="b=\\\""', 'b=\\"'), + (r'a="b=\\\042"', 'b=\\"'), + (r'a="b=\134\""', 'b=\\"'), + (r'a="b=\134\042"', 'b=\\"'), + ] + for encoded, decoded in cases: + with self.subTest(encoded): + C = cookies.SimpleCookie() + C.load(encoded) + self.assertEqual(C['a'].value, decoded) + + @support.requires_resource('cpu') + def test_unquote_large(self): + n = 10**6 + for encoded in r'\\', r'\134': + with self.subTest(encoded): + data = 'a="b=' + encoded*n + ';"' + C = cookies.SimpleCookie() + C.load(data) + value = C['a'].value + self.assertEqual(value[:3], 'b=\\') + self.assertEqual(value[-2:], '\\;') + self.assertEqual(len(value), n + 3) + def test_load(self): C = cookies.SimpleCookie() C.load('Customer="WILE_E_COYOTE"; Version=1; Path=/acme') diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py index f9e8558d1a7..3048894350e 100644 --- a/Lib/test/test_import/__init__.py +++ b/Lib/test/test_import/__init__.py @@ -111,6 +111,24 @@ def require_frozen(module, *, skip=True): def require_pure_python(module, *, skip=False): _require_loader(module, SourceFileLoader, skip) +def create_extension_loader(modname, filename): + # Apple extensions must be distributed as frameworks. This requires + # a specialist loader. + if is_apple_mobile: + return AppleFrameworkLoader(modname, filename) + else: + return ExtensionFileLoader(modname, filename) + +def import_extension_from_file(modname, filename, *, put_in_sys_modules=True): + loader = create_extension_loader(modname, filename) + spec = importlib.util.spec_from_loader(modname, loader) + module = importlib.util.module_from_spec(spec) + loader.exec_module(module) + if put_in_sys_modules: + sys.modules[modname] = module + return module + + def remove_files(name): for f in (name + ".py", name + ".pyc", @@ -1913,6 +1931,37 @@ def test_absolute_circular_submodule(self): str(cm.exception), ) + @requires_singlephase_init + @unittest.skipIf(_testsinglephase is None, "test requires _testsinglephase module") + def test_singlephase_circular(self): + """Regression test for gh-123950 + + Import a single-phase-init module that imports itself + from the PyInit_* function (before it's added to sys.modules). + Manages its own cache (which is `static`, and so incompatible + with multiple interpreters or interpreter reset). + """ + name = '_testsinglephase_circular' + helper_name = 'test.test_import.data.circular_imports.singlephase' + with uncache(name, helper_name): + filename = _testsinglephase.__file__ + # We don't put the module in sys.modules: that the *inner* + # import should do that. + mod = import_extension_from_file(name, filename, + put_in_sys_modules=False) + + self.assertEqual(mod.helper_mod_name, helper_name) + self.assertIn(name, sys.modules) + self.assertIn(helper_name, sys.modules) + + self.assertIn(name, sys.modules) + self.assertIn(helper_name, sys.modules) + self.assertNotIn(name, sys.modules) + self.assertNotIn(helper_name, sys.modules) + self.assertIs(mod.clear_static_var(), mod) + _testinternalcapi.clear_extension('_testsinglephase_circular', + mod.__spec__.origin) + def test_unwritable_module(self): self.addCleanup(unload, "test.test_import.data.unwritable") self.addCleanup(unload, "test.test_import.data.unwritable.x") @@ -1952,14 +2001,6 @@ def pipe(self): os.set_blocking(r, False) return (r, w) - def create_extension_loader(self, modname, filename): - # Apple extensions must be distributed as frameworks. This requires - # a specialist loader. - if is_apple_mobile: - return AppleFrameworkLoader(modname, filename) - else: - return ExtensionFileLoader(modname, filename) - def import_script(self, name, fd, filename=None, check_override=None): override_text = '' if check_override is not None: @@ -2176,11 +2217,7 @@ def test_multi_init_extension_compat(self): def test_multi_init_extension_non_isolated_compat(self): modname = '_test_non_isolated' filename = _testmultiphase.__file__ - loader = self.create_extension_loader(modname, filename) - spec = importlib.util.spec_from_loader(modname, loader) - module = importlib.util.module_from_spec(spec) - loader.exec_module(module) - sys.modules[modname] = module + module = import_extension_from_file(modname, filename) require_extension(module) with self.subTest(f'{modname}: isolated'): @@ -2195,11 +2232,7 @@ def test_multi_init_extension_non_isolated_compat(self): def test_multi_init_extension_per_interpreter_gil_compat(self): modname = '_test_shared_gil_only' filename = _testmultiphase.__file__ - loader = self.create_extension_loader(modname, filename) - spec = importlib.util.spec_from_loader(modname, loader) - module = importlib.util.module_from_spec(spec) - loader.exec_module(module) - sys.modules[modname] = module + module = import_extension_from_file(modname, filename) require_extension(module) with self.subTest(f'{modname}: isolated, strict'): diff --git a/Lib/test/test_import/data/circular_imports/singlephase.py b/Lib/test/test_import/data/circular_imports/singlephase.py new file mode 100644 index 00000000000..05618bc72f9 --- /dev/null +++ b/Lib/test/test_import/data/circular_imports/singlephase.py @@ -0,0 +1,13 @@ +"""Circular import involving a single-phase-init extension. + +This module is imported from the _testsinglephase_circular module from +_testsinglephase, and imports that module again. +""" + +import importlib +import _testsinglephase +from test.test_import import import_extension_from_file + +name = '_testsinglephase_circular' +filename = _testsinglephase.__file__ +mod = import_extension_from_file(name, filename) diff --git a/Lib/test/test_importlib/resources/data01/__init__.py b/Lib/test/test_importlib/resources/data01/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/Lib/test/test_importlib/resources/data01/binary.file b/Lib/test/test_importlib/resources/data01/binary.file deleted file mode 100644 index eaf36c1dacc..00000000000 Binary files a/Lib/test/test_importlib/resources/data01/binary.file and /dev/null differ diff --git a/Lib/test/test_importlib/resources/data01/subdirectory/__init__.py b/Lib/test/test_importlib/resources/data01/subdirectory/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/Lib/test/test_importlib/resources/data01/subdirectory/binary.file b/Lib/test/test_importlib/resources/data01/subdirectory/binary.file deleted file mode 100644 index 5bd8bb897b1..00000000000 --- a/Lib/test/test_importlib/resources/data01/subdirectory/binary.file +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/Lib/test/test_importlib/resources/data01/utf-16.file b/Lib/test/test_importlib/resources/data01/utf-16.file deleted file mode 100644 index 2cb772295ef..00000000000 Binary files a/Lib/test/test_importlib/resources/data01/utf-16.file and /dev/null differ diff --git a/Lib/test/test_importlib/resources/data01/utf-8.file b/Lib/test/test_importlib/resources/data01/utf-8.file deleted file mode 100644 index 1c0132ad90a..00000000000 --- a/Lib/test/test_importlib/resources/data01/utf-8.file +++ /dev/null @@ -1 +0,0 @@ -Hello, UTF-8 world! diff --git a/Lib/test/test_importlib/resources/data02/__init__.py b/Lib/test/test_importlib/resources/data02/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/Lib/test/test_importlib/resources/data02/one/__init__.py b/Lib/test/test_importlib/resources/data02/one/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/Lib/test/test_importlib/resources/data02/one/resource1.txt b/Lib/test/test_importlib/resources/data02/one/resource1.txt deleted file mode 100644 index 61a813e4017..00000000000 --- a/Lib/test/test_importlib/resources/data02/one/resource1.txt +++ /dev/null @@ -1 +0,0 @@ -one resource diff --git a/Lib/test/test_importlib/resources/data02/subdirectory/subsubdir/resource.txt b/Lib/test/test_importlib/resources/data02/subdirectory/subsubdir/resource.txt deleted file mode 100644 index 48f587a2d0a..00000000000 --- a/Lib/test/test_importlib/resources/data02/subdirectory/subsubdir/resource.txt +++ /dev/null @@ -1 +0,0 @@ -a resource \ No newline at end of file diff --git a/Lib/test/test_importlib/resources/data02/two/__init__.py b/Lib/test/test_importlib/resources/data02/two/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/Lib/test/test_importlib/resources/data02/two/resource2.txt b/Lib/test/test_importlib/resources/data02/two/resource2.txt deleted file mode 100644 index a80ce46ea36..00000000000 --- a/Lib/test/test_importlib/resources/data02/two/resource2.txt +++ /dev/null @@ -1 +0,0 @@ -two resource diff --git a/Lib/test/test_importlib/resources/data03/__init__.py b/Lib/test/test_importlib/resources/data03/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/Lib/test/test_importlib/resources/data03/namespace/portion1/__init__.py b/Lib/test/test_importlib/resources/data03/namespace/portion1/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/Lib/test/test_importlib/resources/data03/namespace/portion2/__init__.py b/Lib/test/test_importlib/resources/data03/namespace/portion2/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/Lib/test/test_importlib/resources/data03/namespace/resource1.txt b/Lib/test/test_importlib/resources/data03/namespace/resource1.txt deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/Lib/test/test_importlib/resources/namespacedata01/binary.file b/Lib/test/test_importlib/resources/namespacedata01/binary.file deleted file mode 100644 index eaf36c1dacc..00000000000 Binary files a/Lib/test/test_importlib/resources/namespacedata01/binary.file and /dev/null differ diff --git a/Lib/test/test_importlib/resources/namespacedata01/subdirectory/binary.file b/Lib/test/test_importlib/resources/namespacedata01/subdirectory/binary.file deleted file mode 100644 index 100f50643d8..00000000000 --- a/Lib/test/test_importlib/resources/namespacedata01/subdirectory/binary.file +++ /dev/null @@ -1 +0,0 @@ -  \ No newline at end of file diff --git a/Lib/test/test_importlib/resources/namespacedata01/utf-16.file b/Lib/test/test_importlib/resources/namespacedata01/utf-16.file deleted file mode 100644 index 2cb772295ef..00000000000 Binary files a/Lib/test/test_importlib/resources/namespacedata01/utf-16.file and /dev/null differ diff --git a/Lib/test/test_importlib/resources/namespacedata01/utf-8.file b/Lib/test/test_importlib/resources/namespacedata01/utf-8.file deleted file mode 100644 index 1c0132ad90a..00000000000 --- a/Lib/test/test_importlib/resources/namespacedata01/utf-8.file +++ /dev/null @@ -1 +0,0 @@ -Hello, UTF-8 world! diff --git a/Lib/test/test_importlib/resources/test_contents.py b/Lib/test/test_importlib/resources/test_contents.py index beab67ccc21..4e4e0e9c337 100644 --- a/Lib/test/test_importlib/resources/test_contents.py +++ b/Lib/test/test_importlib/resources/test_contents.py @@ -1,7 +1,6 @@ import unittest from importlib import resources -from . import data01 from . import util @@ -19,16 +18,17 @@ def test_contents(self): assert self.expected <= contents -class ContentsDiskTests(ContentsTests, unittest.TestCase): - def setUp(self): - self.data = data01 +class ContentsDiskTests(ContentsTests, util.DiskSetup, unittest.TestCase): + pass class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase): pass -class ContentsNamespaceTests(ContentsTests, unittest.TestCase): +class ContentsNamespaceTests(ContentsTests, util.DiskSetup, unittest.TestCase): + MODULE = 'namespacedata01' + expected = { # no __init__ because of namespace design 'binary.file', @@ -36,8 +36,3 @@ class ContentsNamespaceTests(ContentsTests, unittest.TestCase): 'utf-16.file', 'utf-8.file', } - - def setUp(self): - from . import namespacedata01 - - self.data = namespacedata01 diff --git a/Lib/test/test_importlib/resources/test_files.py b/Lib/test/test_importlib/resources/test_files.py index 7df6d03ead7..08b840834df 100644 --- a/Lib/test/test_importlib/resources/test_files.py +++ b/Lib/test/test_importlib/resources/test_files.py @@ -6,11 +6,7 @@ from importlib import resources from importlib.resources.abc import Traversable -from . import data01 from . import util -from . import _path -from test.support import os_helper -from test.support import import_helper @contextlib.contextmanager @@ -48,70 +44,96 @@ def test_old_parameter(self): resources.files(package=self.data) -class OpenDiskTests(FilesTests, unittest.TestCase): - def setUp(self): - self.data = data01 +class OpenDiskTests(FilesTests, util.DiskSetup, unittest.TestCase): + pass class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase): pass -class OpenNamespaceTests(FilesTests, unittest.TestCase): - def setUp(self): - from . import namespacedata01 - - self.data = namespacedata01 +class OpenNamespaceTests(FilesTests, util.DiskSetup, unittest.TestCase): + MODULE = 'namespacedata01' class OpenNamespaceZipTests(FilesTests, util.ZipSetup, unittest.TestCase): ZIP_MODULE = 'namespacedata01' -class SiteDir: - def setUp(self): - self.fixtures = contextlib.ExitStack() - self.addCleanup(self.fixtures.close) - self.site_dir = self.fixtures.enter_context(os_helper.temp_dir()) - self.fixtures.enter_context(import_helper.DirsOnSysPath(self.site_dir)) - self.fixtures.enter_context(import_helper.isolated_modules()) +class DirectSpec: + """ + Override behavior of ModuleSetup to write a full spec directly. + """ + + MODULE = 'unused' + + def load_fixture(self, name): + self.tree_on_path(self.spec) -class ModulesFilesTests(SiteDir, unittest.TestCase): +class ModulesFiles: + spec = { + 'mod.py': '', + 'res.txt': 'resources are the best', + } + def test_module_resources(self): """ A module can have resources found adjacent to the module. """ - spec = { - 'mod.py': '', - 'res.txt': 'resources are the best', - } - _path.build(spec, self.site_dir) import mod actual = resources.files(mod).joinpath('res.txt').read_text(encoding='utf-8') - assert actual == spec['res.txt'] + assert actual == self.spec['res.txt'] + + +class ModuleFilesDiskTests(DirectSpec, util.DiskSetup, ModulesFiles, unittest.TestCase): + pass + + +class ModuleFilesZipTests(DirectSpec, util.ZipSetup, ModulesFiles, unittest.TestCase): + pass + +class ImplicitContextFiles: + set_val = textwrap.dedent( + """ + import importlib.resources as res + val = res.files().joinpath('res.txt').read_text(encoding='utf-8') + """ + ) + spec = { + 'somepkg': { + '__init__.py': set_val, + 'submod.py': set_val, + 'res.txt': 'resources are the best', + }, + } -class ImplicitContextFilesTests(SiteDir, unittest.TestCase): - def test_implicit_files(self): + def test_implicit_files_package(self): """ Without any parameter, files() will infer the location as the caller. """ - spec = { - 'somepkg': { - '__init__.py': textwrap.dedent( - """ - import importlib.resources as res - val = res.files().joinpath('res.txt').read_text(encoding='utf-8') - """ - ), - 'res.txt': 'resources are the best', - }, - } - _path.build(spec, self.site_dir) assert importlib.import_module('somepkg').val == 'resources are the best' + def test_implicit_files_submodule(self): + """ + Without any parameter, files() will infer the location as the caller. + """ + assert importlib.import_module('somepkg.submod').val == 'resources are the best' + + +class ImplicitContextFilesDiskTests( + DirectSpec, util.DiskSetup, ImplicitContextFiles, unittest.TestCase +): + pass + + +class ImplicitContextFilesZipTests( + DirectSpec, util.ZipSetup, ImplicitContextFiles, unittest.TestCase +): + pass + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_importlib/resources/test_functional.py b/Lib/test/test_importlib/resources/test_functional.py index d60a2bedd89..4317abf3162 100644 --- a/Lib/test/test_importlib/resources/test_functional.py +++ b/Lib/test/test_importlib/resources/test_functional.py @@ -1,9 +1,12 @@ import unittest import os +import importlib -from test.support.warnings_helper import ignore_warnings, check_warnings +from test.support import warnings_helper -import importlib.resources as resources +from importlib import resources + +from . import util # Since the functional API forwards to Traversable, we only test # filesystem resources here -- not zip files, namespace packages etc. @@ -11,19 +14,27 @@ class StringAnchorMixin: - anchor01 = 'test.test_importlib.resources.data01' - anchor02 = 'test.test_importlib.resources.data02' + anchor01 = 'data01' + anchor02 = 'data02' class ModuleAnchorMixin: - from . import data01 as anchor01 - from . import data02 as anchor02 + @property + def anchor01(self): + return importlib.import_module('data01') + + @property + def anchor02(self): + return importlib.import_module('data02') + +class FunctionalAPIBase(util.DiskSetup): + def setUp(self): + super().setUp() + self.load_fixture('data02') -class FunctionalAPIBase: def _gen_resourcetxt_path_parts(self): - """Yield various names of a text file in anchor02, each in a subTest - """ + """Yield various names of a text file in anchor02, each in a subTest""" for path_parts in ( ('subdirectory', 'subsubdir', 'resource.txt'), ('subdirectory/subsubdir/resource.txt',), @@ -36,7 +47,7 @@ def assertEndsWith(self, string, suffix): """Assert that `string` ends with `suffix`. Used to ignore an architecture-specific UTF-16 byte-order mark.""" - self.assertEqual(string[-len(suffix):], suffix) + self.assertEqual(string[-len(suffix) :], suffix) def test_read_text(self): self.assertEqual( @@ -45,7 +56,10 @@ def test_read_text(self): ) self.assertEqual( resources.read_text( - self.anchor02, 'subdirectory', 'subsubdir', 'resource.txt', + self.anchor02, + 'subdirectory', + 'subsubdir', + 'resource.txt', encoding='utf-8', ), 'a resource', @@ -53,7 +67,9 @@ def test_read_text(self): for path_parts in self._gen_resourcetxt_path_parts(): self.assertEqual( resources.read_text( - self.anchor02, *path_parts, encoding='utf-8', + self.anchor02, + *path_parts, + encoding='utf-8', ), 'a resource', ) @@ -67,13 +83,16 @@ def test_read_text(self): resources.read_text(self.anchor01, 'utf-16.file') self.assertEqual( resources.read_text( - self.anchor01, 'binary.file', encoding='latin1', + self.anchor01, + 'binary.file', + encoding='latin1', ), '\x00\x01\x02\x03', ) self.assertEndsWith( # ignore the BOM resources.read_text( - self.anchor01, 'utf-16.file', + self.anchor01, + 'utf-16.file', errors='backslashreplace', ), 'Hello, UTF-16 world!\n'.encode('utf-16-le').decode( @@ -97,7 +116,8 @@ def test_open_text(self): self.assertEqual(f.read(), 'Hello, UTF-8 world!\n') for path_parts in self._gen_resourcetxt_path_parts(): with resources.open_text( - self.anchor02, *path_parts, + self.anchor02, + *path_parts, encoding='utf-8', ) as f: self.assertEqual(f.read(), 'a resource') @@ -111,11 +131,14 @@ def test_open_text(self): with self.assertRaises(UnicodeDecodeError): f.read() with resources.open_text( - self.anchor01, 'binary.file', encoding='latin1', + self.anchor01, + 'binary.file', + encoding='latin1', ) as f: self.assertEqual(f.read(), '\x00\x01\x02\x03') with resources.open_text( - self.anchor01, 'utf-16.file', + self.anchor01, + 'utf-16.file', errors='backslashreplace', ) as f: self.assertEndsWith( # ignore the BOM @@ -130,16 +153,17 @@ def test_open_binary(self): self.assertEqual(f.read(), b'Hello, UTF-8 world!\n') for path_parts in self._gen_resourcetxt_path_parts(): with resources.open_binary( - self.anchor02, *path_parts, + self.anchor02, + *path_parts, ) as f: self.assertEqual(f.read(), b'a resource') def test_path(self): with resources.path(self.anchor01, 'utf-8.file') as path: - with open(str(path)) as f: + with open(str(path), encoding='utf-8') as f: self.assertEqual(f.read(), 'Hello, UTF-8 world!\n') with resources.path(self.anchor01) as path: - with open(os.path.join(path, 'utf-8.file')) as f: + with open(os.path.join(path, 'utf-8.file'), encoding='utf-8') as f: self.assertEqual(f.read(), 'Hello, UTF-8 world!\n') def test_is_resource(self): @@ -152,32 +176,32 @@ def test_is_resource(self): self.assertTrue(is_resource(self.anchor02, *path_parts)) def test_contents(self): - is_resource = resources.is_resource - with check_warnings((".*contents.*", DeprecationWarning)): + with warnings_helper.check_warnings((".*contents.*", DeprecationWarning)): c = resources.contents(self.anchor01) self.assertGreaterEqual( set(c), {'utf-8.file', 'utf-16.file', 'binary.file', 'subdirectory'}, ) - with ( - self.assertRaises(OSError), - check_warnings((".*contents.*", DeprecationWarning)), - ): + with self.assertRaises(OSError), warnings_helper.check_warnings(( + ".*contents.*", + DeprecationWarning, + )): list(resources.contents(self.anchor01, 'utf-8.file')) + for path_parts in self._gen_resourcetxt_path_parts(): - with ( - self.assertRaises(OSError), - check_warnings((".*contents.*", DeprecationWarning)), - ): + with self.assertRaises(OSError), warnings_helper.check_warnings(( + ".*contents.*", + DeprecationWarning, + )): list(resources.contents(self.anchor01, *path_parts)) - with check_warnings((".*contents.*", DeprecationWarning)): + with warnings_helper.check_warnings((".*contents.*", DeprecationWarning)): c = resources.contents(self.anchor01, 'subdirectory') self.assertGreaterEqual( set(c), {'binary.file'}, ) - @ignore_warnings(category=DeprecationWarning) + @warnings_helper.ignore_warnings(category=DeprecationWarning) def test_common_errors(self): for func in ( resources.read_text, @@ -208,18 +232,24 @@ def test_text_errors(self): # Multiple path arguments need explicit encoding argument. with self.assertRaises(TypeError): func( - self.anchor02, 'subdirectory', - 'subsubdir', 'resource.txt', + self.anchor02, + 'subdirectory', + 'subsubdir', + 'resource.txt', ) class FunctionalAPITest_StringAnchor( - unittest.TestCase, FunctionalAPIBase, StringAnchorMixin, + StringAnchorMixin, + FunctionalAPIBase, + unittest.TestCase, ): pass class FunctionalAPITest_ModuleAnchor( - unittest.TestCase, FunctionalAPIBase, ModuleAnchorMixin, + ModuleAnchorMixin, + FunctionalAPIBase, + unittest.TestCase, ): pass diff --git a/Lib/test/test_importlib/resources/test_open.py b/Lib/test/test_importlib/resources/test_open.py index 3b6b2142ef4..8c00378ad3c 100644 --- a/Lib/test/test_importlib/resources/test_open.py +++ b/Lib/test/test_importlib/resources/test_open.py @@ -1,7 +1,6 @@ import unittest from importlib import resources -from . import data01 from . import util @@ -65,16 +64,12 @@ def test_open_text_FileNotFoundError(self): target.open(encoding='utf-8') -class OpenDiskTests(OpenTests, unittest.TestCase): - def setUp(self): - self.data = data01 - +class OpenDiskTests(OpenTests, util.DiskSetup, unittest.TestCase): + pass -class OpenDiskNamespaceTests(OpenTests, unittest.TestCase): - def setUp(self): - from . import namespacedata01 - self.data = namespacedata01 +class OpenDiskNamespaceTests(OpenTests, util.DiskSetup, unittest.TestCase): + MODULE = 'namespacedata01' class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase): @@ -82,7 +77,7 @@ class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase): class OpenNamespaceZipTests(OpenTests, util.ZipSetup, unittest.TestCase): - ZIP_MODULE = 'namespacedata01' + MODULE = 'namespacedata01' if __name__ == '__main__': diff --git a/Lib/test/test_importlib/resources/test_path.py b/Lib/test/test_importlib/resources/test_path.py index 90b22905ab8..378dc7a2bae 100644 --- a/Lib/test/test_importlib/resources/test_path.py +++ b/Lib/test/test_importlib/resources/test_path.py @@ -3,7 +3,6 @@ import unittest from importlib import resources -from . import data01 from . import util @@ -25,9 +24,7 @@ def test_reading(self): self.assertEqual('Hello, UTF-8 world!\n', path.read_text(encoding='utf-8')) -class PathDiskTests(PathTests, unittest.TestCase): - data = data01 - +class PathDiskTests(PathTests, util.DiskSetup, unittest.TestCase): def test_natural_path(self): # Guarantee the internal implementation detail that # file-system-backed resources do not get the tempdir diff --git a/Lib/test/test_importlib/resources/test_read.py b/Lib/test/test_importlib/resources/test_read.py index 984feecbb9e..59c237d9641 100644 --- a/Lib/test/test_importlib/resources/test_read.py +++ b/Lib/test/test_importlib/resources/test_read.py @@ -1,7 +1,7 @@ import unittest from importlib import import_module, resources -from . import data01 + from . import util @@ -51,8 +51,8 @@ def test_read_text_with_errors(self): ) -class ReadDiskTests(ReadTests, unittest.TestCase): - data = data01 +class ReadDiskTests(ReadTests, util.DiskSetup, unittest.TestCase): + pass class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase): @@ -68,15 +68,12 @@ def test_read_submodule_resource_by_name(self): self.assertEqual(result, bytes(range(4, 8))) -class ReadNamespaceTests(ReadTests, unittest.TestCase): - def setUp(self): - from . import namespacedata01 - - self.data = namespacedata01 +class ReadNamespaceTests(ReadTests, util.DiskSetup, unittest.TestCase): + MODULE = 'namespacedata01' class ReadNamespaceZipTests(ReadTests, util.ZipSetup, unittest.TestCase): - ZIP_MODULE = 'namespacedata01' + MODULE = 'namespacedata01' def test_read_submodule_resource(self): submodule = import_module('namespacedata01.subdirectory') diff --git a/Lib/test/test_importlib/resources/test_reader.py b/Lib/test/test_importlib/resources/test_reader.py index dac9c2a892f..ed5693ab416 100644 --- a/Lib/test/test_importlib/resources/test_reader.py +++ b/Lib/test/test_importlib/resources/test_reader.py @@ -1,16 +1,21 @@ import os.path -import sys import pathlib import unittest from importlib import import_module from importlib.readers import MultiplexedPath, NamespaceReader +from . import util -class MultiplexedPathTest(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.folder = pathlib.Path(__file__).parent / 'namespacedata01' + +class MultiplexedPathTest(util.DiskSetup, unittest.TestCase): + MODULE = 'namespacedata01' + + def setUp(self): + super().setUp() + self.folder = pathlib.Path(self.data.__path__[0]) + self.data01 = pathlib.Path(self.load_fixture('data01').__file__).parent + self.data02 = pathlib.Path(self.load_fixture('data02').__file__).parent def test_init_no_paths(self): with self.assertRaises(FileNotFoundError): @@ -31,9 +36,8 @@ def test_iterdir(self): ) def test_iterdir_duplicate(self): - data01 = pathlib.Path(__file__).parent.joinpath('data01') contents = { - path.name for path in MultiplexedPath(self.folder, data01).iterdir() + path.name for path in MultiplexedPath(self.folder, self.data01).iterdir() } for remove in ('__pycache__', '__init__.pyc'): try: @@ -61,9 +65,8 @@ def test_open_file(self): path.open() def test_join_path(self): - data01 = pathlib.Path(__file__).parent.joinpath('data01') - prefix = str(data01.parent) - path = MultiplexedPath(self.folder, data01) + prefix = str(self.folder.parent) + path = MultiplexedPath(self.folder, self.data01) self.assertEqual( str(path.joinpath('binary.file'))[len(prefix) + 1 :], os.path.join('namespacedata01', 'binary.file'), @@ -83,10 +86,8 @@ def test_join_path_compound(self): assert not path.joinpath('imaginary/foo.py').exists() def test_join_path_common_subdir(self): - data01 = pathlib.Path(__file__).parent.joinpath('data01') - data02 = pathlib.Path(__file__).parent.joinpath('data02') - prefix = str(data01.parent) - path = MultiplexedPath(data01, data02) + prefix = str(self.data02.parent) + path = MultiplexedPath(self.data01, self.data02) self.assertIsInstance(path.joinpath('subdirectory'), MultiplexedPath) self.assertEqual( str(path.joinpath('subdirectory', 'subsubdir'))[len(prefix) + 1 :], @@ -106,16 +107,8 @@ def test_name(self): ) -class NamespaceReaderTest(unittest.TestCase): - site_dir = str(pathlib.Path(__file__).parent) - - @classmethod - def setUpClass(cls): - sys.path.append(cls.site_dir) - - @classmethod - def tearDownClass(cls): - sys.path.remove(cls.site_dir) +class NamespaceReaderTest(util.DiskSetup, unittest.TestCase): + MODULE = 'namespacedata01' def test_init_error(self): with self.assertRaises(ValueError): @@ -125,7 +118,7 @@ def test_resource_path(self): namespacedata01 = import_module('namespacedata01') reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations) - root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01')) + root = self.data.__path__[0] self.assertEqual( reader.resource_path('binary.file'), os.path.join(root, 'binary.file') ) @@ -134,9 +127,8 @@ def test_resource_path(self): ) def test_files(self): - namespacedata01 = import_module('namespacedata01') - reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations) - root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01')) + reader = NamespaceReader(self.data.__spec__.submodule_search_locations) + root = self.data.__path__[0] self.assertIsInstance(reader.files(), MultiplexedPath) self.assertEqual(repr(reader.files()), f"MultiplexedPath('{root}')") diff --git a/Lib/test/test_importlib/resources/test_resource.py b/Lib/test/test_importlib/resources/test_resource.py index d1d45d9b461..fcede14b891 100644 --- a/Lib/test/test_importlib/resources/test_resource.py +++ b/Lib/test/test_importlib/resources/test_resource.py @@ -1,8 +1,5 @@ -import sys import unittest -import pathlib -from . import data01 from . import util from importlib import resources, import_module @@ -24,9 +21,8 @@ def test_is_dir(self): self.assertTrue(target.is_dir()) -class ResourceDiskTests(ResourceTests, unittest.TestCase): - def setUp(self): - self.data = data01 +class ResourceDiskTests(ResourceTests, util.DiskSetup, unittest.TestCase): + pass class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase): @@ -37,33 +33,39 @@ def names(traversable): return {item.name for item in traversable.iterdir()} -class ResourceLoaderTests(unittest.TestCase): +class ResourceLoaderTests(util.DiskSetup, unittest.TestCase): def test_resource_contents(self): package = util.create_package( - file=data01, path=data01.__file__, contents=['A', 'B', 'C'] + file=self.data, path=self.data.__file__, contents=['A', 'B', 'C'] ) self.assertEqual(names(resources.files(package)), {'A', 'B', 'C'}) def test_is_file(self): package = util.create_package( - file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] + file=self.data, + path=self.data.__file__, + contents=['A', 'B', 'C', 'D/E', 'D/F'], ) self.assertTrue(resources.files(package).joinpath('B').is_file()) def test_is_dir(self): package = util.create_package( - file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] + file=self.data, + path=self.data.__file__, + contents=['A', 'B', 'C', 'D/E', 'D/F'], ) self.assertTrue(resources.files(package).joinpath('D').is_dir()) def test_resource_missing(self): package = util.create_package( - file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] + file=self.data, + path=self.data.__file__, + contents=['A', 'B', 'C', 'D/E', 'D/F'], ) self.assertFalse(resources.files(package).joinpath('Z').is_file()) -class ResourceCornerCaseTests(unittest.TestCase): +class ResourceCornerCaseTests(util.DiskSetup, unittest.TestCase): def test_package_has_no_reader_fallback(self): """ Test odd ball packages which: @@ -72,7 +74,7 @@ def test_package_has_no_reader_fallback(self): # 3. Are not in a zip file """ module = util.create_package( - file=data01, path=data01.__file__, contents=['A', 'B', 'C'] + file=self.data, path=self.data.__file__, contents=['A', 'B', 'C'] ) # Give the module a dummy loader. module.__loader__ = object() @@ -83,9 +85,7 @@ def test_package_has_no_reader_fallback(self): self.assertFalse(resources.files(module).joinpath('A').is_file()) -class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase): - ZIP_MODULE = 'data01' - +class ResourceFromZipsTest01(util.ZipSetup, unittest.TestCase): def test_is_submodule_resource(self): submodule = import_module('data01.subdirectory') self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file()) @@ -116,8 +116,8 @@ def test_as_file_directory(self): assert not data.parent.exists() -class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase): - ZIP_MODULE = 'data02' +class ResourceFromZipsTest02(util.ZipSetup, unittest.TestCase): + MODULE = 'data02' def test_unrelated_contents(self): """ @@ -134,7 +134,7 @@ def test_unrelated_contents(self): ) -class DeletingZipsTest(util.ZipSetupBase, unittest.TestCase): +class DeletingZipsTest(util.ZipSetup, unittest.TestCase): """Having accessed resources in a zip file should not keep an open reference to the zip. """ @@ -216,24 +216,20 @@ def test_submodule_sub_contents_by_name(self): self.assertEqual(contents, {'binary.file'}) -class ResourceFromNamespaceDiskTests(ResourceFromNamespaceTests, unittest.TestCase): - site_dir = str(pathlib.Path(__file__).parent) - - @classmethod - def setUpClass(cls): - sys.path.append(cls.site_dir) - - @classmethod - def tearDownClass(cls): - sys.path.remove(cls.site_dir) +class ResourceFromNamespaceDiskTests( + util.DiskSetup, + ResourceFromNamespaceTests, + unittest.TestCase, +): + MODULE = 'namespacedata01' class ResourceFromNamespaceZipTests( - util.ZipSetupBase, + util.ZipSetup, ResourceFromNamespaceTests, unittest.TestCase, ): - ZIP_MODULE = 'namespacedata01' + MODULE = 'namespacedata01' if __name__ == '__main__': diff --git a/Lib/test/test_importlib/resources/update-zips.py b/Lib/test/test_importlib/resources/update-zips.py deleted file mode 100755 index 231334aa7e3..00000000000 --- a/Lib/test/test_importlib/resources/update-zips.py +++ /dev/null @@ -1,53 +0,0 @@ -""" -Generate the zip test data files. - -Run to build the tests/zipdataNN/ziptestdata.zip files from -files in tests/dataNN. - -Replaces the file with the working copy, but does commit anything -to the source repo. -""" - -import contextlib -import os -import pathlib -import zipfile - - -def main(): - """ - >>> from unittest import mock - >>> monkeypatch = getfixture('monkeypatch') - >>> monkeypatch.setattr(zipfile, 'ZipFile', mock.MagicMock()) - >>> print(); main() # print workaround for bpo-32509 - - ...data01... -> ziptestdata/... - ... - ...data02... -> ziptestdata/... - ... - """ - suffixes = '01', '02' - tuple(map(generate, suffixes)) - - -def generate(suffix): - root = pathlib.Path(__file__).parent.relative_to(os.getcwd()) - zfpath = root / f'zipdata{suffix}/ziptestdata.zip' - with zipfile.ZipFile(zfpath, 'w') as zf: - for src, rel in walk(root / f'data{suffix}'): - dst = 'ziptestdata' / pathlib.PurePosixPath(rel.as_posix()) - print(src, '->', dst) - zf.write(src, dst) - - -def walk(datapath): - for dirpath, dirnames, filenames in os.walk(datapath): - with contextlib.suppress(ValueError): - dirnames.remove('__pycache__') - for filename in filenames: - res = pathlib.Path(dirpath) / filename - rel = res.relative_to(datapath) - yield res, rel - - -__name__ == '__main__' and main() diff --git a/Lib/test/test_importlib/resources/util.py b/Lib/test/test_importlib/resources/util.py index d4bf3e6cc5d..e2d995f5963 100644 --- a/Lib/test/test_importlib/resources/util.py +++ b/Lib/test/test_importlib/resources/util.py @@ -6,10 +6,10 @@ import pathlib import contextlib -from . import data01 from importlib.resources.abc import ResourceReader from test.support import import_helper, os_helper from . import zip as zip_ +from . import _path from importlib.machinery import ModuleSpec @@ -68,7 +68,7 @@ def create_package(file=None, path=None, is_package=True, contents=()): ) -class CommonTests(metaclass=abc.ABCMeta): +class CommonTestsBase(metaclass=abc.ABCMeta): """ Tests shared by test_open, test_path, and test_read. """ @@ -84,34 +84,34 @@ def test_package_name(self): """ Passing in the package name should succeed. """ - self.execute(data01.__name__, 'utf-8.file') + self.execute(self.data.__name__, 'utf-8.file') def test_package_object(self): """ Passing in the package itself should succeed. """ - self.execute(data01, 'utf-8.file') + self.execute(self.data, 'utf-8.file') def test_string_path(self): """ Passing in a string for the path should succeed. """ path = 'utf-8.file' - self.execute(data01, path) + self.execute(self.data, path) def test_pathlib_path(self): """ Passing in a pathlib.PurePath object for the path should succeed. """ path = pathlib.PurePath('utf-8.file') - self.execute(data01, path) + self.execute(self.data, path) def test_importing_module_as_side_effect(self): """ The anchor package can already be imported. """ - del sys.modules[data01.__name__] - self.execute(data01.__name__, 'utf-8.file') + del sys.modules[self.data.__name__] + self.execute(self.data.__name__, 'utf-8.file') def test_missing_path(self): """ @@ -141,24 +141,66 @@ def test_useless_loader(self): self.execute(package, 'utf-8.file') -class ZipSetupBase: - ZIP_MODULE = 'data01' - +fixtures = dict( + data01={ + '__init__.py': '', + 'binary.file': bytes(range(4)), + 'utf-16.file': '\ufeffHello, UTF-16 world!\n'.encode('utf-16-le'), + 'utf-8.file': 'Hello, UTF-8 world!\n'.encode('utf-8'), + 'subdirectory': { + '__init__.py': '', + 'binary.file': bytes(range(4, 8)), + }, + }, + data02={ + '__init__.py': '', + 'one': {'__init__.py': '', 'resource1.txt': 'one resource'}, + 'two': {'__init__.py': '', 'resource2.txt': 'two resource'}, + 'subdirectory': {'subsubdir': {'resource.txt': 'a resource'}}, + }, + namespacedata01={ + 'binary.file': bytes(range(4)), + 'utf-16.file': '\ufeffHello, UTF-16 world!\n'.encode('utf-16-le'), + 'utf-8.file': 'Hello, UTF-8 world!\n'.encode('utf-8'), + 'subdirectory': { + 'binary.file': bytes(range(12, 16)), + }, + }, +) + + +class ModuleSetup: def setUp(self): self.fixtures = contextlib.ExitStack() self.addCleanup(self.fixtures.close) self.fixtures.enter_context(import_helper.isolated_modules()) + self.data = self.load_fixture(self.MODULE) + + def load_fixture(self, module): + self.tree_on_path({module: fixtures[module]}) + return importlib.import_module(module) + + +class ZipSetup(ModuleSetup): + MODULE = 'data01' + def tree_on_path(self, spec): temp_dir = self.fixtures.enter_context(os_helper.temp_dir()) modules = pathlib.Path(temp_dir) / 'zipped modules.zip' - src_path = pathlib.Path(__file__).parent.joinpath(self.ZIP_MODULE) self.fixtures.enter_context( - import_helper.DirsOnSysPath(str(zip_.make_zip_file(src_path, modules))) + import_helper.DirsOnSysPath(str(zip_.make_zip_file(spec, modules))) ) - self.data = importlib.import_module(self.ZIP_MODULE) + +class DiskSetup(ModuleSetup): + MODULE = 'data01' + + def tree_on_path(self, spec): + temp_dir = self.fixtures.enter_context(os_helper.temp_dir()) + _path.build(spec, pathlib.Path(temp_dir)) + self.fixtures.enter_context(import_helper.DirsOnSysPath(temp_dir)) -class ZipSetup(ZipSetupBase): +class CommonTests(DiskSetup, CommonTestsBase): pass diff --git a/Lib/test/test_importlib/resources/zip.py b/Lib/test/test_importlib/resources/zip.py index 4dcf6facc77..fc453f02060 100755 --- a/Lib/test/test_importlib/resources/zip.py +++ b/Lib/test/test_importlib/resources/zip.py @@ -2,29 +2,23 @@ Generate zip test data files. """ -import contextlib -import os -import pathlib import zipfile -def make_zip_file(src, dst): +def make_zip_file(tree, dst): """ - Zip the files in src into a new zipfile at dst. + Zip the files in tree into a new zipfile at dst. """ with zipfile.ZipFile(dst, 'w') as zf: - for src_path, rel in walk(src): - dst_name = src.name / pathlib.PurePosixPath(rel.as_posix()) - zf.write(src_path, dst_name) + for name, contents in walk(tree): + zf.writestr(name, contents) zipfile._path.CompleteDirs.inject(zf) return dst -def walk(datapath): - for dirpath, dirnames, filenames in os.walk(datapath): - with contextlib.suppress(ValueError): - dirnames.remove('__pycache__') - for filename in filenames: - res = pathlib.Path(dirpath) / filename - rel = res.relative_to(datapath) - yield res, rel +def walk(tree, prefix=''): + for name, contents in tree.items(): + if isinstance(contents, dict): + yield from walk(contents, prefix=f'{prefix}{name}/') + else: + yield f'{prefix}{name}', contents diff --git a/Lib/test/test_importlib/resources/zipdata01/__init__.py b/Lib/test/test_importlib/resources/zipdata01/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/Lib/test/test_importlib/resources/zipdata01/ziptestdata.zip b/Lib/test/test_importlib/resources/zipdata01/ziptestdata.zip deleted file mode 100644 index 9a3bb0739f8..00000000000 Binary files a/Lib/test/test_importlib/resources/zipdata01/ziptestdata.zip and /dev/null differ diff --git a/Lib/test/test_importlib/resources/zipdata02/__init__.py b/Lib/test/test_importlib/resources/zipdata02/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/Lib/test/test_importlib/resources/zipdata02/ziptestdata.zip b/Lib/test/test_importlib/resources/zipdata02/ziptestdata.zip deleted file mode 100644 index d63ff512d28..00000000000 Binary files a/Lib/test/test_importlib/resources/zipdata02/ziptestdata.zip and /dev/null differ diff --git a/Lib/test/test_importlib/test_abc.py b/Lib/test/test_importlib/test_abc.py index 603125f6d92..1a777732551 100644 --- a/Lib/test/test_importlib/test_abc.py +++ b/Lib/test/test_importlib/test_abc.py @@ -913,5 +913,30 @@ def test_universal_newlines(self): SourceOnlyLoaderMock=SPLIT_SOL) +class DeprecatedAttrsTests: + + """Test the deprecated attributes can be accessed.""" + + def test_deprecated_attr_ResourceReader(self): + with self.assertWarns(DeprecationWarning): + self.abc.ResourceReader + del self.abc.ResourceReader + + def test_deprecated_attr_Traversable(self): + with self.assertWarns(DeprecationWarning): + self.abc.Traversable + del self.abc.Traversable + + def test_deprecated_attr_TraversableResources(self): + with self.assertWarns(DeprecationWarning): + self.abc.TraversableResources + del self.abc.TraversableResources + + +(Frozen_DeprecatedAttrsTests, + Source_DeprecatedAttrsTests + ) = test_util.test_both(DeprecatedAttrsTests, abc=abc) + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_importlib/test_util.py b/Lib/test/test_importlib/test_util.py index 668042782bd..d001b72d81f 100644 --- a/Lib/test/test_importlib/test_util.py +++ b/Lib/test/test_importlib/test_util.py @@ -634,7 +634,7 @@ def test_magic_number(self): # stakeholders such as OS package maintainers must be notified # in advance. Such exceptional releases will then require an # adjustment to this test case. - EXPECTED_MAGIC_NUMBER = 3495 + EXPECTED_MAGIC_NUMBER = 3571 actual = int.from_bytes(importlib.util.MAGIC_NUMBER[:2], 'little') msg = ( diff --git a/Lib/test/test_inspect/inspect_fodder2.py b/Lib/test/test_inspect/inspect_fodder2.py index bb9d3e88cfb..43fda662253 100644 --- a/Lib/test/test_inspect/inspect_fodder2.py +++ b/Lib/test/test_inspect/inspect_fodder2.py @@ -315,3 +315,57 @@ def g(): class ClassWithCodeObject: import sys code = sys._getframe(0).f_code + +import enum + +# line 321 +class enum322(enum.Enum): + A = 'a' + +# line 325 +class enum326(enum.IntEnum): + A = 1 + +# line 329 +class flag330(enum.Flag): + A = 1 + +# line 333 +class flag334(enum.IntFlag): + A = 1 + +# line 337 +simple_enum338 = enum.Enum('simple_enum338', 'A') +simple_enum339 = enum.IntEnum('simple_enum339', 'A') +simple_flag340 = enum.Flag('simple_flag340', 'A') +simple_flag341 = enum.IntFlag('simple_flag341', 'A') + +import typing + +# line 345 +class nt346(typing.NamedTuple): + x: int + y: int + +# line 350 +nt351 = typing.NamedTuple('nt351', (('x', int), ('y', int))) + +# line 353 +class td354(typing.TypedDict): + x: int + y: int + +# line 358 +td359 = typing.TypedDict('td359', (('x', int), ('y', int))) + +import dataclasses + +# line 363 +@dataclasses.dataclass +class dc364: + x: int + y: int + +# line 369 +dc370 = dataclasses.make_dataclass('dc370', (('x', int), ('y', int))) +dc371 = dataclasses.make_dataclass('dc370', (('x', int), ('y', int)), module=__name__) diff --git a/Lib/test/test_inspect/test_inspect.py b/Lib/test/test_inspect/test_inspect.py index 5d0f32884d1..e2e4f1230ab 100644 --- a/Lib/test/test_inspect/test_inspect.py +++ b/Lib/test/test_inspect/test_inspect.py @@ -405,6 +405,8 @@ def test_isroutine(self): self.assertFalse(inspect.isroutine(type)) self.assertFalse(inspect.isroutine(int)) self.assertFalse(inspect.isroutine(type('some_class', (), {}))) + # partial + self.assertFalse(inspect.isroutine(functools.partial(mod.spam))) def test_isclass(self): self.istest(inspect.isclass, 'mod.StupidGit') @@ -822,7 +824,7 @@ def test_getsource_on_code_object(self): self.assertSourceEqual(mod.eggs.__code__, 12, 18) def test_getsource_on_generated_class(self): - A = type('A', (), {}) + A = type('A', (unittest.TestCase,), {}) self.assertEqual(inspect.getsourcefile(A), __file__) self.assertEqual(inspect.getfile(A), __file__) self.assertIs(inspect.getmodule(A), sys.modules[__name__]) @@ -836,6 +838,47 @@ class C: nonlocal __firstlineno__ self.assertRaises(OSError, inspect.getsource, C) +class TestGetsourceStdlib(unittest.TestCase): + # Test Python implementations of the stdlib modules + + def test_getsource_stdlib_collections_abc(self): + import collections.abc + lines, lineno = inspect.getsourcelines(collections.abc.Sequence) + self.assertEqual(lines[0], 'class Sequence(Reversible, Collection):\n') + src = inspect.getsource(collections.abc.Sequence) + self.assertEqual(src.splitlines(True), lines) + + def test_getsource_stdlib_tomllib(self): + import tomllib + self.assertRaises(OSError, inspect.getsource, tomllib.TOMLDecodeError) + self.assertRaises(OSError, inspect.getsourcelines, tomllib.TOMLDecodeError) + + def test_getsource_stdlib_abc(self): + # Pure Python implementation + abc = import_helper.import_fresh_module('abc', blocked=['_abc']) + with support.swap_item(sys.modules, 'abc', abc): + self.assertRaises(OSError, inspect.getsource, abc.ABCMeta) + self.assertRaises(OSError, inspect.getsourcelines, abc.ABCMeta) + # With C acceleration + import abc + try: + src = inspect.getsource(abc.ABCMeta) + lines, lineno = inspect.getsourcelines(abc.ABCMeta) + except OSError: + pass + else: + self.assertEqual(lines[0], ' class ABCMeta(type):\n') + self.assertEqual(src.splitlines(True), lines) + + def test_getsource_stdlib_decimal(self): + # Pure Python implementation + decimal = import_helper.import_fresh_module('decimal', blocked=['_decimal']) + with support.swap_item(sys.modules, 'decimal', decimal): + src = inspect.getsource(decimal.Decimal) + lines, lineno = inspect.getsourcelines(decimal.Decimal) + self.assertEqual(lines[0], 'class Decimal(object):\n') + self.assertEqual(src.splitlines(True), lines) + class TestGetsourceInteractive(unittest.TestCase): def test_getclasses_interactive(self): # bpo-44648: simulate a REPL session; @@ -930,6 +973,29 @@ def test_anonymous(self): # as argument to another function. self.assertSourceEqual(mod2.anonymous, 55, 55) + def test_enum(self): + self.assertSourceEqual(mod2.enum322, 322, 323) + self.assertSourceEqual(mod2.enum326, 326, 327) + self.assertSourceEqual(mod2.flag330, 330, 331) + self.assertSourceEqual(mod2.flag334, 334, 335) + self.assertRaises(OSError, inspect.getsource, mod2.simple_enum338) + self.assertRaises(OSError, inspect.getsource, mod2.simple_enum339) + self.assertRaises(OSError, inspect.getsource, mod2.simple_flag340) + self.assertRaises(OSError, inspect.getsource, mod2.simple_flag341) + + def test_namedtuple(self): + self.assertSourceEqual(mod2.nt346, 346, 348) + self.assertRaises(OSError, inspect.getsource, mod2.nt351) + + def test_typeddict(self): + self.assertSourceEqual(mod2.td354, 354, 356) + self.assertRaises(OSError, inspect.getsource, mod2.td359) + + def test_dataclass(self): + self.assertSourceEqual(mod2.dc364, 364, 367) + self.assertRaises(OSError, inspect.getsource, mod2.dc370) + self.assertRaises(OSError, inspect.getsource, mod2.dc371) + class TestBlockComments(GetSourceBase): fodderModule = mod @@ -993,7 +1059,7 @@ def test_findsource_without_filename(self): self.assertRaises(IOError, inspect.findsource, co) self.assertRaises(IOError, inspect.getsource, co) - def test_findsource_with_out_of_bounds_lineno(self): + def test_findsource_on_func_with_out_of_bounds_lineno(self): mod_len = len(inspect.getsource(mod)) src = '\n' * 2* mod_len + "def f(): pass" co = compile(src, mod.__file__, "exec") @@ -1001,9 +1067,20 @@ def test_findsource_with_out_of_bounds_lineno(self): eval(co, g, l) func = l['f'] self.assertEqual(func.__code__.co_firstlineno, 1+2*mod_len) - with self.assertRaisesRegex(IOError, "lineno is out of bounds"): + with self.assertRaisesRegex(OSError, "lineno is out of bounds"): inspect.findsource(func) + def test_findsource_on_class_with_out_of_bounds_lineno(self): + mod_len = len(inspect.getsource(mod)) + src = '\n' * 2* mod_len + "class A: pass" + co = compile(src, mod.__file__, "exec") + g, l = {'__name__': mod.__name__}, {} + eval(co, g, l) + cls = l['A'] + self.assertEqual(cls.__firstlineno__, 1+2*mod_len) + with self.assertRaisesRegex(OSError, "lineno is out of bounds"): + inspect.findsource(cls) + def test_getsource_on_method(self): self.assertSourceEqual(mod2.ClassWithMethod.method, 118, 119) @@ -1551,6 +1628,56 @@ def f(self): self.assertIn(('f', b.f), inspect.getmembers(b)) self.assertIn(('f', b.f), inspect.getmembers(b, inspect.ismethod)) + def test_getmembers_custom_dir(self): + class CorrectDir: + def __init__(self, attr): + self.attr = attr + def method(self): + return self.attr + 1 + def __dir__(self): + return ['attr', 'method'] + + cd = CorrectDir(5) + self.assertEqual(inspect.getmembers(cd), [ + ('attr', 5), + ('method', cd.method), + ]) + self.assertEqual(inspect.getmembers(cd, inspect.ismethod), [ + ('method', cd.method), + ]) + + def test_getmembers_custom_broken_dir(self): + # inspect.getmembers calls `dir()` on the passed object inside. + # if `__dir__` mentions some non-existent attribute, + # we still need to return others correctly. + class BrokenDir: + existing = 1 + def method(self): + return self.existing + 1 + def __dir__(self): + return ['method', 'missing', 'existing'] + + bd = BrokenDir() + self.assertEqual(inspect.getmembers(bd), [ + ('existing', 1), + ('method', bd.method), + ]) + self.assertEqual(inspect.getmembers(bd, inspect.ismethod), [ + ('method', bd.method), + ]) + + def test_getmembers_custom_duplicated_dir(self): + # Duplicates in `__dir__` must not fail and return just one result. + class DuplicatedDir: + attr = 1 + def __dir__(self): + return ['attr', 'attr'] + + dd = DuplicatedDir() + self.assertEqual(inspect.getmembers(dd), [ + ('attr', 1), + ]) + def test_getmembers_VirtualAttribute(self): class M(type): def __getattr__(cls, name): @@ -1906,6 +2033,7 @@ def function(): self.assertFalse(inspect.ismethoddescriptor(Owner.static_method)) self.assertFalse(inspect.ismethoddescriptor(function)) self.assertFalse(inspect.ismethoddescriptor(a_lambda)) + self.assertFalse(inspect.ismethoddescriptor(functools.partial(function))) def test_descriptor_being_a_class(self): class MethodDescriptorMeta(type): diff --git a/Lib/test/test_interpreters/test_channels.py b/Lib/test/test_interpreters/test_channels.py index 68cc45d1a5e..eada18f99d0 100644 --- a/Lib/test/test_interpreters/test_channels.py +++ b/Lib/test/test_interpreters/test_channels.py @@ -48,6 +48,7 @@ def test_list_all(self): self.assertEqual(after, created) def test_shareable(self): + interp = interpreters.create() rch, sch = channels.create() self.assertTrue( @@ -60,8 +61,25 @@ def test_shareable(self): rch2 = rch.recv() sch2 = rch.recv() + interp.prepare_main(rch=rch, sch=sch) + sch.send_nowait(rch) + sch.send_nowait(sch) + interp.exec(dedent(""" + rch2 = rch.recv() + sch2 = rch.recv() + assert rch2 == rch + assert sch2 == sch + + sch.send_nowait(rch2) + sch.send_nowait(sch2) + """)) + rch3 = rch.recv() + sch3 = rch.recv() + self.assertEqual(rch2, rch) self.assertEqual(sch2, sch) + self.assertEqual(rch3, rch) + self.assertEqual(sch3, sch) def test_is_closed(self): rch, sch = channels.create() @@ -354,6 +372,228 @@ def test_send_buffer_nowait(self): obj[4:8] = b'ham.' self.assertEqual(obj, buf) + def test_send_cleared_with_subinterpreter(self): + def common(rch, sch, unbound=None, presize=0): + if not unbound: + extraargs = '' + elif unbound is channels.UNBOUND: + extraargs = ', unbound=channels.UNBOUND' + elif unbound is channels.UNBOUND_ERROR: + extraargs = ', unbound=channels.UNBOUND_ERROR' + elif unbound is channels.UNBOUND_REMOVE: + extraargs = ', unbound=channels.UNBOUND_REMOVE' + else: + raise NotImplementedError(repr(unbound)) + interp = interpreters.create() + + _run_output(interp, dedent(f""" + from test.support.interpreters import channels + sch = channels.SendChannel({sch.id}) + obj1 = b'spam' + obj2 = b'eggs' + sch.send_nowait(obj1{extraargs}) + sch.send_nowait(obj2{extraargs}) + """)) + self.assertEqual( + _channels.get_count(rch.id), + presize + 2, + ) + + if presize == 0: + obj1 = rch.recv() + self.assertEqual(obj1, b'spam') + self.assertEqual( + _channels.get_count(rch.id), + presize + 1, + ) + + return interp + + with self.subTest('default'): # UNBOUND + rch, sch = channels.create() + interp = common(rch, sch) + del interp + self.assertEqual(_channels.get_count(rch.id), 1) + obj1 = rch.recv() + self.assertEqual(_channels.get_count(rch.id), 0) + self.assertIs(obj1, channels.UNBOUND) + self.assertEqual(_channels.get_count(rch.id), 0) + with self.assertRaises(channels.ChannelEmptyError): + rch.recv_nowait() + + with self.subTest('UNBOUND'): + rch, sch = channels.create() + interp = common(rch, sch, channels.UNBOUND) + del interp + self.assertEqual(_channels.get_count(rch.id), 1) + obj1 = rch.recv() + self.assertIs(obj1, channels.UNBOUND) + self.assertEqual(_channels.get_count(rch.id), 0) + with self.assertRaises(channels.ChannelEmptyError): + rch.recv_nowait() + + with self.subTest('UNBOUND_ERROR'): + rch, sch = channels.create() + interp = common(rch, sch, channels.UNBOUND_ERROR) + + del interp + self.assertEqual(_channels.get_count(rch.id), 1) + with self.assertRaises(channels.ItemInterpreterDestroyed): + rch.recv() + + self.assertEqual(_channels.get_count(rch.id), 0) + with self.assertRaises(channels.ChannelEmptyError): + rch.recv_nowait() + + with self.subTest('UNBOUND_REMOVE'): + rch, sch = channels.create() + + interp = common(rch, sch, channels.UNBOUND_REMOVE) + del interp + self.assertEqual(_channels.get_count(rch.id), 0) + with self.assertRaises(channels.ChannelEmptyError): + rch.recv_nowait() + + sch.send_nowait(b'ham', unbound=channels.UNBOUND_REMOVE) + self.assertEqual(_channels.get_count(rch.id), 1) + interp = common(rch, sch, channels.UNBOUND_REMOVE, 1) + self.assertEqual(_channels.get_count(rch.id), 3) + sch.send_nowait(42, unbound=channels.UNBOUND_REMOVE) + self.assertEqual(_channels.get_count(rch.id), 4) + del interp + self.assertEqual(_channels.get_count(rch.id), 2) + obj1 = rch.recv() + obj2 = rch.recv() + self.assertEqual(obj1, b'ham') + self.assertEqual(obj2, 42) + self.assertEqual(_channels.get_count(rch.id), 0) + with self.assertRaises(channels.ChannelEmptyError): + rch.recv_nowait() + + def test_send_cleared_with_subinterpreter_mixed(self): + rch, sch = channels.create() + interp = interpreters.create() + + # If we don't associate the main interpreter with the channel + # then the channel will be automatically closed when interp + # is destroyed. + sch.send_nowait(None) + rch.recv() + self.assertEqual(_channels.get_count(rch.id), 0) + + _run_output(interp, dedent(f""" + from test.support.interpreters import channels + sch = channels.SendChannel({sch.id}) + sch.send_nowait(1, unbound=channels.UNBOUND) + sch.send_nowait(2, unbound=channels.UNBOUND_ERROR) + sch.send_nowait(3) + sch.send_nowait(4, unbound=channels.UNBOUND_REMOVE) + sch.send_nowait(5, unbound=channels.UNBOUND) + """)) + self.assertEqual(_channels.get_count(rch.id), 5) + + del interp + self.assertEqual(_channels.get_count(rch.id), 4) + + obj1 = rch.recv() + self.assertIs(obj1, channels.UNBOUND) + self.assertEqual(_channels.get_count(rch.id), 3) + + with self.assertRaises(channels.ItemInterpreterDestroyed): + rch.recv() + self.assertEqual(_channels.get_count(rch.id), 2) + + obj2 = rch.recv() + self.assertIs(obj2, channels.UNBOUND) + self.assertEqual(_channels.get_count(rch.id), 1) + + obj3 = rch.recv() + self.assertIs(obj3, channels.UNBOUND) + self.assertEqual(_channels.get_count(rch.id), 0) + + def test_send_cleared_with_subinterpreter_multiple(self): + rch, sch = channels.create() + interp1 = interpreters.create() + interp2 = interpreters.create() + + sch.send_nowait(1) + _run_output(interp1, dedent(f""" + from test.support.interpreters import channels + rch = channels.RecvChannel({rch.id}) + sch = channels.SendChannel({sch.id}) + obj1 = rch.recv() + sch.send_nowait(2, unbound=channels.UNBOUND) + sch.send_nowait(obj1, unbound=channels.UNBOUND_REMOVE) + """)) + _run_output(interp2, dedent(f""" + from test.support.interpreters import channels + rch = channels.RecvChannel({rch.id}) + sch = channels.SendChannel({sch.id}) + obj2 = rch.recv() + obj1 = rch.recv() + """)) + self.assertEqual(_channels.get_count(rch.id), 0) + sch.send_nowait(3) + _run_output(interp1, dedent(""" + sch.send_nowait(4, unbound=channels.UNBOUND) + # interp closed here + sch.send_nowait(5, unbound=channels.UNBOUND_REMOVE) + sch.send_nowait(6, unbound=channels.UNBOUND) + """)) + _run_output(interp2, dedent(""" + sch.send_nowait(7, unbound=channels.UNBOUND_ERROR) + # interp closed here + sch.send_nowait(obj1, unbound=channels.UNBOUND_ERROR) + sch.send_nowait(obj2, unbound=channels.UNBOUND_REMOVE) + sch.send_nowait(8, unbound=channels.UNBOUND) + """)) + _run_output(interp1, dedent(""" + sch.send_nowait(9, unbound=channels.UNBOUND_REMOVE) + sch.send_nowait(10, unbound=channels.UNBOUND) + """)) + self.assertEqual(_channels.get_count(rch.id), 10) + + obj3 = rch.recv() + self.assertEqual(obj3, 3) + self.assertEqual(_channels.get_count(rch.id), 9) + + obj4 = rch.recv() + self.assertEqual(obj4, 4) + self.assertEqual(_channels.get_count(rch.id), 8) + + del interp1 + self.assertEqual(_channels.get_count(rch.id), 6) + + # obj5 was removed + + obj6 = rch.recv() + self.assertIs(obj6, channels.UNBOUND) + self.assertEqual(_channels.get_count(rch.id), 5) + + obj7 = rch.recv() + self.assertEqual(obj7, 7) + self.assertEqual(_channels.get_count(rch.id), 4) + + del interp2 + self.assertEqual(_channels.get_count(rch.id), 3) + + # obj1 + with self.assertRaises(channels.ItemInterpreterDestroyed): + rch.recv() + self.assertEqual(_channels.get_count(rch.id), 2) + + # obj2 was removed + + obj8 = rch.recv() + self.assertIs(obj8, channels.UNBOUND) + self.assertEqual(_channels.get_count(rch.id), 1) + + # obj9 was removed + + obj10 = rch.recv() + self.assertIs(obj10, channels.UNBOUND) + self.assertEqual(_channels.get_count(rch.id), 0) + if __name__ == '__main__': # Test needs to be a package, so we can do relative imports. diff --git a/Lib/test/test_interpreters/test_queues.py b/Lib/test/test_interpreters/test_queues.py index a3d44c402e0..cbe0d8caffd 100644 --- a/Lib/test/test_interpreters/test_queues.py +++ b/Lib/test/test_interpreters/test_queues.py @@ -9,17 +9,20 @@ # Raise SkipTest if subinterpreters not supported. _queues = import_helper.import_module('_interpqueues') from test.support import interpreters -from test.support.interpreters import queues +from test.support.interpreters import queues, _crossinterp from .utils import _run_output, TestBase as _TestBase +REPLACE = _crossinterp._UNBOUND_CONSTANT_TO_FLAG[_crossinterp.UNBOUND] + + def get_num_queues(): return len(_queues.list_all()) class TestBase(_TestBase): def tearDown(self): - for qid, _ in _queues.list_all(): + for qid, _, _ in _queues.list_all(): try: _queues.destroy(qid) except Exception: @@ -40,7 +43,7 @@ def test_highlevel_reloaded(self): importlib.reload(queues) def test_create_destroy(self): - qid = _queues.create(2, 0) + qid = _queues.create(2, 0, REPLACE) _queues.destroy(qid) self.assertEqual(get_num_queues(), 0) with self.assertRaises(queues.QueueNotFoundError): @@ -54,7 +57,7 @@ def test_not_destroyed(self): '-c', dedent(f""" import {_queues.__name__} as _queues - _queues.create(2, 0) + _queues.create(2, 0, {REPLACE}) """), ) self.assertEqual(stdout, '') @@ -65,13 +68,13 @@ def test_not_destroyed(self): def test_bind_release(self): with self.subTest('typical'): - qid = _queues.create(2, 0) + qid = _queues.create(2, 0, REPLACE) _queues.bind(qid) _queues.release(qid) self.assertEqual(get_num_queues(), 0) with self.subTest('bind too much'): - qid = _queues.create(2, 0) + qid = _queues.create(2, 0, REPLACE) _queues.bind(qid) _queues.bind(qid) _queues.release(qid) @@ -79,7 +82,7 @@ def test_bind_release(self): self.assertEqual(get_num_queues(), 0) with self.subTest('nested'): - qid = _queues.create(2, 0) + qid = _queues.create(2, 0, REPLACE) _queues.bind(qid) _queues.bind(qid) _queues.release(qid) @@ -87,7 +90,7 @@ def test_bind_release(self): self.assertEqual(get_num_queues(), 0) with self.subTest('release without binding'): - qid = _queues.create(2, 0) + qid = _queues.create(2, 0, REPLACE) with self.assertRaises(queues.QueueError): _queues.release(qid) @@ -427,26 +430,206 @@ def test_put_get_different_interpreters(self): self.assertNotEqual(id(obj2), int(out)) def test_put_cleared_with_subinterpreter(self): - interp = interpreters.create() - queue = queues.create() - - out = _run_output( - interp, - dedent(f""" + def common(queue, unbound=None, presize=0): + if not unbound: + extraargs = '' + elif unbound is queues.UNBOUND: + extraargs = ', unbound=queues.UNBOUND' + elif unbound is queues.UNBOUND_ERROR: + extraargs = ', unbound=queues.UNBOUND_ERROR' + elif unbound is queues.UNBOUND_REMOVE: + extraargs = ', unbound=queues.UNBOUND_REMOVE' + else: + raise NotImplementedError(repr(unbound)) + interp = interpreters.create() + + _run_output(interp, dedent(f""" from test.support.interpreters import queues queue = queues.Queue({queue.id}) obj1 = b'spam' obj2 = b'eggs' - queue.put(obj1, syncobj=True) - queue.put(obj2, syncobj=True) + queue.put(obj1, syncobj=True{extraargs}) + queue.put(obj2, syncobj=True{extraargs}) """)) - self.assertEqual(queue.qsize(), 2) + self.assertEqual(queue.qsize(), presize + 2) + + if presize == 0: + obj1 = queue.get() + self.assertEqual(obj1, b'spam') + self.assertEqual(queue.qsize(), presize + 1) + + return interp + + with self.subTest('default'): # UNBOUND + queue = queues.create() + interp = common(queue) + del interp + obj1 = queue.get() + self.assertIs(obj1, queues.UNBOUND) + self.assertEqual(queue.qsize(), 0) + with self.assertRaises(queues.QueueEmpty): + queue.get_nowait() + + with self.subTest('UNBOUND'): + queue = queues.create() + interp = common(queue, queues.UNBOUND) + del interp + obj1 = queue.get() + self.assertIs(obj1, queues.UNBOUND) + self.assertEqual(queue.qsize(), 0) + with self.assertRaises(queues.QueueEmpty): + queue.get_nowait() + + with self.subTest('UNBOUND_ERROR'): + queue = queues.create() + interp = common(queue, queues.UNBOUND_ERROR) + + del interp + self.assertEqual(queue.qsize(), 1) + with self.assertRaises(queues.ItemInterpreterDestroyed): + queue.get() + + self.assertEqual(queue.qsize(), 0) + with self.assertRaises(queues.QueueEmpty): + queue.get_nowait() + + with self.subTest('UNBOUND_REMOVE'): + queue = queues.create() + + interp = common(queue, queues.UNBOUND_REMOVE) + del interp + self.assertEqual(queue.qsize(), 0) + with self.assertRaises(queues.QueueEmpty): + queue.get_nowait() + + queue.put(b'ham', unbound=queues.UNBOUND_REMOVE) + self.assertEqual(queue.qsize(), 1) + interp = common(queue, queues.UNBOUND_REMOVE, 1) + self.assertEqual(queue.qsize(), 3) + queue.put(42, unbound=queues.UNBOUND_REMOVE) + self.assertEqual(queue.qsize(), 4) + del interp + self.assertEqual(queue.qsize(), 2) + obj1 = queue.get() + obj2 = queue.get() + self.assertEqual(obj1, b'ham') + self.assertEqual(obj2, 42) + self.assertEqual(queue.qsize(), 0) + with self.assertRaises(queues.QueueEmpty): + queue.get_nowait() + + def test_put_cleared_with_subinterpreter_mixed(self): + queue = queues.create() + interp = interpreters.create() + _run_output(interp, dedent(f""" + from test.support.interpreters import queues + queue = queues.Queue({queue.id}) + queue.put(1, syncobj=True, unbound=queues.UNBOUND) + queue.put(2, syncobj=True, unbound=queues.UNBOUND_ERROR) + queue.put(3, syncobj=True) + queue.put(4, syncobj=True, unbound=queues.UNBOUND_REMOVE) + queue.put(5, syncobj=True, unbound=queues.UNBOUND) + """)) + self.assertEqual(queue.qsize(), 5) + + del interp + self.assertEqual(queue.qsize(), 4) obj1 = queue.get() - self.assertEqual(obj1, b'spam') + self.assertIs(obj1, queues.UNBOUND) + self.assertEqual(queue.qsize(), 3) + + with self.assertRaises(queues.ItemInterpreterDestroyed): + queue.get() + self.assertEqual(queue.qsize(), 2) + + obj2 = queue.get() + self.assertIs(obj2, queues.UNBOUND) self.assertEqual(queue.qsize(), 1) - del interp + obj3 = queue.get() + self.assertIs(obj3, queues.UNBOUND) + self.assertEqual(queue.qsize(), 0) + + def test_put_cleared_with_subinterpreter_multiple(self): + queue = queues.create() + interp1 = interpreters.create() + interp2 = interpreters.create() + + queue.put(1, syncobj=True) + _run_output(interp1, dedent(f""" + from test.support.interpreters import queues + queue = queues.Queue({queue.id}) + obj1 = queue.get() + queue.put(2, syncobj=True, unbound=queues.UNBOUND) + queue.put(obj1, syncobj=True, unbound=queues.UNBOUND_REMOVE) + """)) + _run_output(interp2, dedent(f""" + from test.support.interpreters import queues + queue = queues.Queue({queue.id}) + obj2 = queue.get() + obj1 = queue.get() + """)) + self.assertEqual(queue.qsize(), 0) + queue.put(3) + _run_output(interp1, dedent(""" + queue.put(4, syncobj=True, unbound=queues.UNBOUND) + # interp closed here + queue.put(5, syncobj=True, unbound=queues.UNBOUND_REMOVE) + queue.put(6, syncobj=True, unbound=queues.UNBOUND) + """)) + _run_output(interp2, dedent(""" + queue.put(7, syncobj=True, unbound=queues.UNBOUND_ERROR) + # interp closed here + queue.put(obj1, syncobj=True, unbound=queues.UNBOUND_ERROR) + queue.put(obj2, syncobj=True, unbound=queues.UNBOUND_REMOVE) + queue.put(8, syncobj=True, unbound=queues.UNBOUND) + """)) + _run_output(interp1, dedent(""" + queue.put(9, syncobj=True, unbound=queues.UNBOUND_REMOVE) + queue.put(10, syncobj=True, unbound=queues.UNBOUND) + """)) + self.assertEqual(queue.qsize(), 10) + + obj3 = queue.get() + self.assertEqual(obj3, 3) + self.assertEqual(queue.qsize(), 9) + + obj4 = queue.get() + self.assertEqual(obj4, 4) + self.assertEqual(queue.qsize(), 8) + + del interp1 + self.assertEqual(queue.qsize(), 6) + + # obj5 was removed + + obj6 = queue.get() + self.assertIs(obj6, queues.UNBOUND) + self.assertEqual(queue.qsize(), 5) + + obj7 = queue.get() + self.assertEqual(obj7, 7) + self.assertEqual(queue.qsize(), 4) + + del interp2 + self.assertEqual(queue.qsize(), 3) + + # obj1 + with self.assertRaises(queues.ItemInterpreterDestroyed): + queue.get() + self.assertEqual(queue.qsize(), 2) + + # obj2 was removed + + obj8 = queue.get() + self.assertIs(obj8, queues.UNBOUND) + self.assertEqual(queue.qsize(), 1) + + # obj9 was removed + + obj10 = queue.get() + self.assertIs(obj10, queues.UNBOUND) self.assertEqual(queue.qsize(), 0) def test_put_get_different_threads(self): diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py index 1ca3edac8c8..aa1b8268592 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -639,11 +639,9 @@ def test_large_file_ops(self): def test_with_open(self): for bufsize in (0, 100): - f = None with self.open(os_helper.TESTFN, "wb", bufsize) as f: f.write(b"xxx") self.assertEqual(f.closed, True) - f = None try: with self.open(os_helper.TESTFN, "wb", bufsize) as f: 1/0 diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py index c3ecf2a7429..bd8d5d220e9 100644 --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -2585,12 +2585,42 @@ def testExplodeShortHandIpStr(self): self.assertEqual('192.168.178.1', addr4.exploded) def testReversePointer(self): - addr1 = ipaddress.IPv4Address('127.0.0.1') - addr2 = ipaddress.IPv6Address('2001:db8::1') - self.assertEqual('1.0.0.127.in-addr.arpa', addr1.reverse_pointer) - self.assertEqual('1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.' + - 'b.d.0.1.0.0.2.ip6.arpa', - addr2.reverse_pointer) + for addr_v4, expected in [ + ('127.0.0.1', '1.0.0.127.in-addr.arpa'), + # test vector: https://www.rfc-editor.org/rfc/rfc1035, §3.5 + ('10.2.0.52', '52.0.2.10.in-addr.arpa'), + ]: + with self.subTest('ipv4_reverse_pointer', addr=addr_v4): + addr = ipaddress.IPv4Address(addr_v4) + self.assertEqual(addr.reverse_pointer, expected) + + for addr_v6, expected in [ + ( + '2001:db8::1', ( + '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.' + '0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.' + 'ip6.arpa' + ) + ), + ( + '::FFFF:192.168.1.35', ( + '3.2.1.0.8.a.0.c.f.f.f.f.0.0.0.0.' + '0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.' + 'ip6.arpa' + ) + ), + # test vector: https://www.rfc-editor.org/rfc/rfc3596, §2.5 + ( + '4321:0:1:2:3:4:567:89ab', ( + 'b.a.9.8.7.6.5.0.4.0.0.0.3.0.0.0.' + '2.0.0.0.1.0.0.0.0.0.0.0.1.2.3.4.' + 'ip6.arpa' + ) + ) + ]: + with self.subTest('ipv6_reverse_pointer', addr=addr_v6): + addr = ipaddress.IPv6Address(addr_v6) + self.assertEqual(addr.reverse_pointer, expected) def testIntRepresentation(self): self.assertEqual(16909060, int(self.ipv4_address)) diff --git a/Lib/test/test_iter.py b/Lib/test/test_iter.py index ec2b68acb90..1b9f3cf7624 100644 --- a/Lib/test/test_iter.py +++ b/Lib/test/test_iter.py @@ -5,6 +5,7 @@ from test.support import cpython_only from test.support.os_helper import TESTFN, unlink from test.support import check_free_after_iterating, ALWAYS_EQ, NEVER_EQ +from test.support import BrokenIter import pickle import collections.abc import functools @@ -1148,35 +1149,30 @@ def test_exception_locations(self): # The location of an exception raised from __init__ or # __next__ should should be the iterator expression - class Iter: - def __init__(self, init_raises=False, next_raises=False): - if init_raises: - 1/0 - self.next_raises = next_raises - - def __next__(self): - if self.next_raises: - 1/0 - - def __iter__(self): - return self - def init_raises(): try: - for x in Iter(init_raises=True): + for x in BrokenIter(init_raises=True): pass except Exception as e: return e def next_raises(): try: - for x in Iter(next_raises=True): + for x in BrokenIter(next_raises=True): + pass + except Exception as e: + return e + + def iter_raises(): + try: + for x in BrokenIter(iter_raises=True): pass except Exception as e: return e - for func, expected in [(init_raises, "Iter(init_raises=True)"), - (next_raises, "Iter(next_raises=True)"), + for func, expected in [(init_raises, "BrokenIter(init_raises=True)"), + (next_raises, "BrokenIter(next_raises=True)"), + (iter_raises, "BrokenIter(iter_raises=True)"), ]: with self.subTest(func): exc = func() diff --git a/Lib/test/test_largefile.py b/Lib/test/test_largefile.py index 849b6cb3e50..41f7b70e5cf 100644 --- a/Lib/test/test_largefile.py +++ b/Lib/test/test_largefile.py @@ -141,6 +141,9 @@ def test_truncate(self): f.truncate(1) self.assertEqual(f.tell(), 0) # else pointer moved f.seek(0) + # Verify readall on a truncated file is well behaved. read() + # without a size can be unbounded, this should get just the byte + # that remains. self.assertEqual(len(f.read()), 1) # else wasn't truncated def test_seekable(self): @@ -151,6 +154,22 @@ def test_seekable(self): f.seek(pos) self.assertTrue(f.seekable()) + @bigmemtest(size=size, memuse=2, dry_run=False) + def test_seek_readall(self, _size): + # Seek which doesn't change position should readall successfully. + with self.open(TESTFN, 'rb') as f: + self.assertEqual(f.seek(0, os.SEEK_CUR), 0) + self.assertEqual(len(f.read()), size + 1) + + # Seek which changes (or might change) position should readall + # successfully. + with self.open(TESTFN, 'rb') as f: + self.assertEqual(f.seek(20, os.SEEK_SET), 20) + self.assertEqual(len(f.read()), size - 19) + + with self.open(TESTFN, 'rb') as f: + self.assertEqual(f.seek(-3, os.SEEK_END), size - 2) + self.assertEqual(len(f.read()), 3) def skip_no_disk_space(path, required): def decorator(fun): diff --git a/Lib/test/test_linecache.py b/Lib/test/test_linecache.py index 8ac521d72ef..6f595579140 100644 --- a/Lib/test/test_linecache.py +++ b/Lib/test/test_linecache.py @@ -280,6 +280,37 @@ def test_loader(self): self.assertEqual(linecache.getlines(filename, module_globals), ['source for x.y.z\n']) + def test_invalid_names(self): + for name, desc in [ + ('\x00', 'NUL bytes filename'), + (__file__ + '\x00', 'filename with embedded NUL bytes'), + # A filename with surrogate codes. A UnicodeEncodeError is raised + # by os.stat() upon querying, which is a subclass of ValueError. + ("\uD834\uDD1E.py", 'surrogate codes (MUSICAL SYMBOL G CLEF)'), + # For POSIX platforms, an OSError will be raised but for Windows + # platforms, a ValueError is raised due to the path_t converter. + # See: https://github.com/python/cpython/issues/122170 + ('a' * 1_000_000, 'very long filename'), + ]: + with self.subTest(f'updatecache: {desc}'): + linecache.clearcache() + lines = linecache.updatecache(name) + self.assertListEqual(lines, []) + self.assertNotIn(name, linecache.cache) + + # hack into the cache (it shouldn't be allowed + # but we never know what people do...) + for key, fullname in [(name, 'ok'), ('key', name), (name, name)]: + with self.subTest(f'checkcache: {desc}', + key=key, fullname=fullname): + linecache.clearcache() + linecache.cache[key] = (0, 1234, [], fullname) + linecache.checkcache(key) + self.assertNotIn(key, linecache.cache) + + # just to be sure that we did not mess with cache + linecache.clearcache() + class LineCacheInvalidationTests(unittest.TestCase): def setUp(self): diff --git a/Lib/test/test_list.py b/Lib/test/test_list.py index 4d2d54705fc..ad7accf2099 100644 --- a/Lib/test/test_list.py +++ b/Lib/test/test_list.py @@ -299,6 +299,15 @@ def __eq__(self, other): lst = [X(), X()] X() in lst + def test_tier2_invalidates_iterator(self): + # GH-121012 + for _ in range(100): + a = [1, 2, 3] + it = iter(a) + for _ in it: + pass + a.append(4) + self.assertEqual(list(it), []) if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_listcomps.py b/Lib/test/test_listcomps.py index 58b076e9ea5..45644d6c092 100644 --- a/Lib/test/test_listcomps.py +++ b/Lib/test/test_listcomps.py @@ -1,8 +1,11 @@ import doctest import textwrap +import traceback import types import unittest +from test.support import BrokenIter + doctests = """ ########### Tests borrowed from or inspired by test_genexps.py ############ @@ -711,6 +714,42 @@ def test_multiple_comprehension_name_reuse(self): self._check_in_scopes(code, {"x": 2, "y": [3]}, ns={"x": 3}, scopes=["class"]) self._check_in_scopes(code, {"x": 2, "y": [2]}, ns={"x": 3}, scopes=["function", "module"]) + def test_exception_locations(self): + # The location of an exception raised from __init__ or + # __next__ should should be the iterator expression + + def init_raises(): + try: + [x for x in BrokenIter(init_raises=True)] + except Exception as e: + return e + + def next_raises(): + try: + [x for x in BrokenIter(next_raises=True)] + except Exception as e: + return e + + def iter_raises(): + try: + [x for x in BrokenIter(iter_raises=True)] + except Exception as e: + return e + + for func, expected in [(init_raises, "BrokenIter(init_raises=True)"), + (next_raises, "BrokenIter(next_raises=True)"), + (iter_raises, "BrokenIter(iter_raises=True)"), + ]: + with self.subTest(func): + exc = func() + f = traceback.extract_tb(exc.__traceback__)[0] + indent = 16 + co = func.__code__ + self.assertEqual(f.lineno, co.co_firstlineno + 2) + self.assertEqual(f.end_lineno, co.co_firstlineno + 2) + self.assertEqual(f.line[f.colno - indent : f.end_colno - indent], + expected) + __test__ = {'doctests' : doctests} def load_tests(loader, tests, pattern): diff --git a/Lib/test/test_locale.py b/Lib/test/test_locale.py index da4bd79746a..00e93d8e784 100644 --- a/Lib/test/test_locale.py +++ b/Lib/test/test_locale.py @@ -355,6 +355,8 @@ def setUp(self): is_emscripten or is_wasi, "musl libc issue on Emscripten/WASI, bpo-46390" ) + @unittest.skipIf(sys.platform.startswith("netbsd"), + "gh-124108: NetBSD doesn't support UTF-8 for LC_COLLATE") def test_strcoll_with_diacritic(self): self.assertLess(locale.strcoll('à', 'b'), 0) @@ -364,6 +366,8 @@ def test_strcoll_with_diacritic(self): is_emscripten or is_wasi, "musl libc issue on Emscripten/WASI, bpo-46390" ) + @unittest.skipIf(sys.platform.startswith("netbsd"), + "gh-124108: NetBSD doesn't support UTF-8 for LC_COLLATE") def test_strxfrm_with_diacritic(self): self.assertLess(locale.strxfrm('à'), locale.strxfrm('b')) diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index 4223d10e791..72252e0ae2b 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -2367,6 +2367,26 @@ class CustomListener(logging.handlers.QueueListener): class CustomQueue(queue.Queue): pass +class CustomQueueProtocol: + def __init__(self, maxsize=0): + self.queue = queue.Queue(maxsize) + + def __getattr__(self, attribute): + queue = object.__getattribute__(self, 'queue') + return getattr(queue, attribute) + +class CustomQueueFakeProtocol(CustomQueueProtocol): + # An object implementing the Queue API (incorrect signatures). + # The object will be considered a valid queue class since we + # do not check the signatures (only callability of methods) + # but will NOT be usable in production since a TypeError will + # be raised due to a missing argument. + def empty(self, x): + pass + +class CustomQueueWrongProtocol(CustomQueueProtocol): + empty = None + def queueMaker(): return queue.Queue() @@ -3900,18 +3920,16 @@ def do_queuehandler_configuration(self, qspec, lspec): @threading_helper.requires_working_threading() @support.requires_subprocess() def test_config_queue_handler(self): - q = CustomQueue() - dq = { - '()': __name__ + '.CustomQueue', - 'maxsize': 10 - } + qs = [CustomQueue(), CustomQueueProtocol()] + dqs = [{'()': f'{__name__}.{cls}', 'maxsize': 10} + for cls in ['CustomQueue', 'CustomQueueProtocol']] dl = { '()': __name__ + '.listenerMaker', 'arg1': None, 'arg2': None, 'respect_handler_level': True } - qvalues = (None, __name__ + '.queueMaker', __name__ + '.CustomQueue', dq, q) + qvalues = (None, __name__ + '.queueMaker', __name__ + '.CustomQueue', *dqs, *qs) lvalues = (None, __name__ + '.CustomListener', dl, CustomListener) for qspec, lspec in itertools.product(qvalues, lvalues): self.do_queuehandler_configuration(qspec, lspec) @@ -3931,15 +3949,21 @@ def test_config_queue_handler(self): @support.requires_subprocess() @patch("multiprocessing.Manager") def test_config_queue_handler_does_not_create_multiprocessing_manager(self, manager): - # gh-120868 + # gh-120868, gh-121723 from multiprocessing import Queue as MQ q1 = {"()": "queue.Queue", "maxsize": -1} q2 = MQ() q3 = queue.Queue() - - for qspec in (q1, q2, q3): + # CustomQueueFakeProtocol passes the checks but will not be usable + # since the signatures are incompatible. Checking the Queue API + # without testing the type of the actual queue is a trade-off + # between usability and the work we need to do in order to safely + # check that the queue object correctly implements the API. + q4 = CustomQueueFakeProtocol() + + for qspec in (q1, q2, q3, q4): self.apply_config( { "version": 1, @@ -3955,22 +3979,64 @@ def test_config_queue_handler_does_not_create_multiprocessing_manager(self, mana @patch("multiprocessing.Manager") def test_config_queue_handler_invalid_config_does_not_create_multiprocessing_manager(self, manager): - # gh-120868 + # gh-120868, gh-121723 - with self.assertRaises(ValueError): - self.apply_config( - { - "version": 1, - "handlers": { - "queue_listener": { - "class": "logging.handlers.QueueHandler", - "queue": object(), + for qspec in [object(), CustomQueueWrongProtocol()]: + with self.assertRaises(ValueError): + self.apply_config( + { + "version": 1, + "handlers": { + "queue_listener": { + "class": "logging.handlers.QueueHandler", + "queue": qspec, + }, }, - }, + } + ) + manager.assert_not_called() + + @skip_if_tsan_fork + @support.requires_subprocess() + @unittest.skipUnless(support.Py_DEBUG, "requires a debug build for testing" + " assertions in multiprocessing") + def test_config_queue_handler_multiprocessing_context(self): + # regression test for gh-121723 + if support.MS_WINDOWS: + start_methods = ['spawn'] + else: + start_methods = ['spawn', 'fork', 'forkserver'] + for start_method in start_methods: + with self.subTest(start_method=start_method): + ctx = multiprocessing.get_context(start_method) + with ctx.Manager() as manager: + q = manager.Queue() + records = [] + # use 1 process and 1 task per child to put 1 record + with ctx.Pool(1, initializer=self._mpinit_issue121723, + initargs=(q, "text"), maxtasksperchild=1): + records.append(q.get(timeout=60)) + self.assertTrue(q.empty()) + self.assertEqual(len(records), 1) + + @staticmethod + def _mpinit_issue121723(qspec, message_to_log): + # static method for pickling support + logging.config.dictConfig({ + 'version': 1, + 'disable_existing_loggers': True, + 'handlers': { + 'log_to_parent': { + 'class': 'logging.handlers.QueueHandler', + 'queue': qspec } - ) - manager.assert_not_called() + }, + 'root': {'handlers': ['log_to_parent'], 'level': 'DEBUG'} + }) + # log a message (this creates a record put in the queue) + logging.getLogger().info(message_to_log) + @skip_if_tsan_fork @support.requires_subprocess() def test_multiprocessing_queues(self): # See gh-119819 @@ -4283,6 +4349,7 @@ def test_queue_listener_with_multiple_handlers(self): import multiprocessing from unittest.mock import patch + @skip_if_tsan_fork @threading_helper.requires_working_threading() class QueueListenerTest(BaseTest): """ @@ -5183,6 +5250,7 @@ def _extract_logrecord_process_name(key, logMultiprocessing, conn=None): else: return results + @skip_if_tsan_fork def test_multiprocessing(self): support.skip_if_broken_multiprocessing_synchronize() multiprocessing_imported = 'multiprocessing' in sys.modules @@ -6127,13 +6195,28 @@ def test_emit_after_closing_in_write_mode(self): self.assertEqual(fp.read().strip(), '1') class RotatingFileHandlerTest(BaseFileTest): - @unittest.skipIf(support.is_wasi, "WASI does not have /dev/null.") def test_should_not_rollover(self): - # If maxbytes is zero rollover never occurs + # If file is empty rollover never occurs + rh = logging.handlers.RotatingFileHandler( + self.fn, encoding="utf-8", maxBytes=1) + self.assertFalse(rh.shouldRollover(None)) + rh.close() + + # If maxBytes is zero rollover never occurs + rh = logging.handlers.RotatingFileHandler( + self.fn, encoding="utf-8", maxBytes=0) + self.assertFalse(rh.shouldRollover(None)) + rh.close() + + with open(self.fn, 'wb') as f: + f.write(b'\n') rh = logging.handlers.RotatingFileHandler( self.fn, encoding="utf-8", maxBytes=0) self.assertFalse(rh.shouldRollover(None)) rh.close() + + @unittest.skipIf(support.is_wasi, "WASI does not have /dev/null.") + def test_should_not_rollover_non_file(self): # bpo-45401 - test with special file # We set maxBytes to 1 so that rollover would normally happen, except # for the check for regular files @@ -6143,18 +6226,47 @@ def test_should_not_rollover(self): rh.close() def test_should_rollover(self): - rh = logging.handlers.RotatingFileHandler(self.fn, encoding="utf-8", maxBytes=1) + with open(self.fn, 'wb') as f: + f.write(b'\n') + rh = logging.handlers.RotatingFileHandler(self.fn, encoding="utf-8", maxBytes=2) self.assertTrue(rh.shouldRollover(self.next_rec())) rh.close() def test_file_created(self): # checks that the file is created and assumes it was created # by us + os.unlink(self.fn) rh = logging.handlers.RotatingFileHandler(self.fn, encoding="utf-8") rh.emit(self.next_rec()) self.assertLogFile(self.fn) rh.close() + def test_max_bytes(self, delay=False): + kwargs = {'delay': delay} if delay else {} + os.unlink(self.fn) + rh = logging.handlers.RotatingFileHandler( + self.fn, encoding="utf-8", backupCount=2, maxBytes=100, **kwargs) + self.assertIs(os.path.exists(self.fn), not delay) + small = logging.makeLogRecord({'msg': 'a'}) + large = logging.makeLogRecord({'msg': 'b'*100}) + self.assertFalse(rh.shouldRollover(small)) + self.assertFalse(rh.shouldRollover(large)) + rh.emit(small) + self.assertLogFile(self.fn) + self.assertFalse(os.path.exists(self.fn + ".1")) + self.assertFalse(rh.shouldRollover(small)) + self.assertTrue(rh.shouldRollover(large)) + rh.emit(large) + self.assertTrue(os.path.exists(self.fn)) + self.assertLogFile(self.fn + ".1") + self.assertFalse(os.path.exists(self.fn + ".2")) + self.assertTrue(rh.shouldRollover(small)) + self.assertTrue(rh.shouldRollover(large)) + rh.close() + + def test_max_bytes_delay(self): + self.test_max_bytes(delay=True) + def test_rollover_filenames(self): def namer(name): return name + ".test" @@ -6163,11 +6275,15 @@ def namer(name): rh.namer = namer rh.emit(self.next_rec()) self.assertLogFile(self.fn) + self.assertFalse(os.path.exists(namer(self.fn + ".1"))) rh.emit(self.next_rec()) self.assertLogFile(namer(self.fn + ".1")) + self.assertFalse(os.path.exists(namer(self.fn + ".2"))) rh.emit(self.next_rec()) self.assertLogFile(namer(self.fn + ".2")) self.assertFalse(os.path.exists(namer(self.fn + ".3"))) + rh.emit(self.next_rec()) + self.assertFalse(os.path.exists(namer(self.fn + ".3"))) rh.close() def test_namer_rotator_inheritance(self): diff --git a/Lib/test/test_math.py b/Lib/test/test_math.py index b68c442013c..541ccdb3d09 100644 --- a/Lib/test/test_math.py +++ b/Lib/test/test_math.py @@ -187,6 +187,9 @@ def result_check(expected, got, ulp_tol=5, abs_tol=0.0): # Check exactly equal (applies also to strings representing exceptions) if got == expected: + if not got and not expected: + if math.copysign(1, got) != math.copysign(1, expected): + return f"expected {expected}, got {got} (zero has wrong sign)" return None failure = "not equal" @@ -809,11 +812,13 @@ def testHypot(self): # Test allowable types (those with __float__) self.assertEqual(hypot(12.0, 5.0), 13.0) self.assertEqual(hypot(12, 5), 13) - self.assertEqual(hypot(1, -1), math.sqrt(2)) - self.assertEqual(hypot(1, FloatLike(-1.)), math.sqrt(2)) + self.assertEqual(hypot(0.75, -1), 1.25) + self.assertEqual(hypot(-1, 0.75), 1.25) + self.assertEqual(hypot(0.75, FloatLike(-1.)), 1.25) + self.assertEqual(hypot(FloatLike(-1.), 0.75), 1.25) self.assertEqual(hypot(Decimal(12), Decimal(5)), 13) self.assertEqual(hypot(Fraction(12, 32), Fraction(5, 32)), Fraction(13, 32)) - self.assertEqual(hypot(bool(1), bool(0), bool(1), bool(1)), math.sqrt(3)) + self.assertEqual(hypot(True, False, True, True, True), 2.0) # Test corner cases self.assertEqual(hypot(0.0, 0.0), 0.0) # Max input is zero @@ -969,9 +974,9 @@ def testDist(self): self.assertEqual(dist((D(14), D(1)), (D(2), D(-4))), D(13)) self.assertEqual(dist((F(14, 32), F(1, 32)), (F(2, 32), F(-4, 32))), F(13, 32)) - self.assertEqual(dist((True, True, False, True, False), - (True, False, True, True, False)), - sqrt(2.0)) + self.assertEqual(dist((True, True, False, False, True, True), + (True, False, True, False, False, False)), + 2.0) # Test corner cases self.assertEqual(dist((13.25, 12.5, -3.25), @@ -2051,6 +2056,13 @@ def test_testfile(self): except OverflowError: result = 'OverflowError' + # C99+ says for math.h's sqrt: If the argument is +∞ or ±0, it is + # returned, unmodified. On another hand, for csqrt: If z is ±0+0i, + # the result is +0+0i. Lets correct zero sign of er to follow + # first convention. + if id in ['sqrt0002', 'sqrt0003', 'sqrt1001', 'sqrt1023']: + er = math.copysign(er, ar) + # Default tolerances ulp_tol, abs_tol = 5, 0.0 diff --git a/Lib/test/test_memoryview.py b/Lib/test/test_memoryview.py index 0eb2a367603..2d4bf5f1408 100644 --- a/Lib/test/test_memoryview.py +++ b/Lib/test/test_memoryview.py @@ -18,6 +18,10 @@ from test.support import import_helper +class MyObject: + pass + + class AbstractMemoryTests: source_bytes = b"abcdef" @@ -228,8 +232,6 @@ def __init__(self, base): self.m = memoryview(base) class MySource(tp): pass - class MyObject: - pass # Create a reference cycle through a memoryview object. # This exercises mbuf_clear(). @@ -656,5 +658,26 @@ def __bool__(self): m[0] = MyBool() self.assertEqual(ba[:8], b'\0'*8) + def test_buffer_reference_loop(self): + m = memoryview(b'abc').__buffer__(0) + o = MyObject() + o.m = m + o.o = o + wr = weakref.ref(o) + del m, o + gc.collect() + self.assertIsNone(wr()) + + def test_picklebuffer_reference_loop(self): + pb = pickle.PickleBuffer(memoryview(b'abc')) + o = MyObject() + o.pb = pb + o.o = o + wr = weakref.ref(o) + del pb, o + gc.collect() + self.assertIsNone(wr()) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_monitoring.py b/Lib/test/test_monitoring.py index b7c6abed101..094d25b88c6 100644 --- a/Lib/test/test_monitoring.py +++ b/Lib/test/test_monitoring.py @@ -1841,6 +1841,21 @@ def f(a=1, b=2): self.assertEqual(call_data[0], (f, 1)) self.assertEqual(call_data[1], (f, sys.monitoring.MISSING)) + def test_instruction_explicit_callback(self): + # gh-122247 + # Calling the instruction event callback explicitly should not + # crash CPython + def callback(code, instruction_offset): + pass + + sys.monitoring.use_tool_id(0, "test") + self.addCleanup(sys.monitoring.free_tool_id, 0) + sys.monitoring.register_callback(0, sys.monitoring.events.INSTRUCTION, callback) + sys.monitoring.set_events(0, sys.monitoring.events.INSTRUCTION) + callback(None, 0) # call the *same* handler while it is registered + sys.monitoring.restart_events() + sys.monitoring.set_events(0, 0) + class TestOptimizer(MonitoringTestBase, unittest.TestCase): diff --git a/Lib/test/test_multiprocessing_fork/__init__.py b/Lib/test/test_multiprocessing_fork/__init__.py index aa1fff50b28..b35e82879d7 100644 --- a/Lib/test/test_multiprocessing_fork/__init__.py +++ b/Lib/test/test_multiprocessing_fork/__init__.py @@ -12,5 +12,8 @@ if sys.platform == 'darwin': raise unittest.SkipTest("test may crash on macOS (bpo-33725)") +if support.check_sanitizer(thread=True): + raise unittest.SkipTest("TSAN doesn't support threads after fork") + def load_tests(*args): return support.load_package_tests(os.path.dirname(__file__), *args) diff --git a/Lib/test/test_opcache.py b/Lib/test/test_opcache.py index 92a34113bc0..c4fcc1993ca 100644 --- a/Lib/test/test_opcache.py +++ b/Lib/test/test_opcache.py @@ -28,6 +28,13 @@ def wrapper(*args, **kwargs): return wrapper +class TestBase(unittest.TestCase): + def assert_specialized(self, f, opname): + instructions = dis.get_instructions(f, adaptive=True) + opnames = {instruction.opname for instruction in instructions} + self.assertIn(opname, opnames) + + class TestLoadSuperAttrCache(unittest.TestCase): def test_descriptor_not_double_executed_on_spec_fail(self): calls = [] @@ -479,7 +486,7 @@ def f(): self.assertFalse(f()) -class TestCallCache(unittest.TestCase): +class TestCallCache(TestBase): def test_too_many_defaults_0(self): def f(): pass @@ -507,10 +514,33 @@ def f(x, y): f(None) f() + @disabling_optimizer + @requires_specialization + def test_assign_init_code(self): + class MyClass: + def __init__(self): + pass + + def instantiate(): + return MyClass() + + # Trigger specialization + for _ in range(1025): + instantiate() + self.assert_specialized(instantiate, "CALL_ALLOC_AND_ENTER_INIT") + + def count_args(self, *args): + self.num_args = len(args) + + # Set MyClass.__init__.__code__ to a code object that is incompatible + # (uses varargs) with the current specialization + MyClass.__init__.__code__ = count_args.__code__ + instantiate() + @threading_helper.requires_working_threading() @requires_specialization -class TestRacesDoNotCrash(unittest.TestCase): +class TestRacesDoNotCrash(TestBase): # Careful with these. Bigger numbers have a higher chance of catching bugs, # but you can also burn through a *ton* of type/dict/function versions: ITEMS = 1000 @@ -518,11 +548,6 @@ class TestRacesDoNotCrash(unittest.TestCase): WARMUPS = 2 WRITERS = 2 - def assert_specialized(self, f, opname): - instructions = dis.get_instructions(f, adaptive=True) - opnames = {instruction.opname for instruction in instructions} - self.assertIn(opname, opnames) - @disabling_optimizer def assert_races_do_not_crash( self, opname, get_items, read, write, *, check_items=False diff --git a/Lib/test/test_ordered_dict.py b/Lib/test/test_ordered_dict.py index 06a0e812271..a9b6a84996e 100644 --- a/Lib/test/test_ordered_dict.py +++ b/Lib/test/test_ordered_dict.py @@ -2,7 +2,9 @@ import contextlib import copy import gc +import operator import pickle +import re from random import randrange, shuffle import struct import sys @@ -740,11 +742,44 @@ def test_ordered_dict_items_result_gc(self): # when it's mutated and returned from __next__: self.assertTrue(gc.is_tracked(next(it))) + +class _TriggerSideEffectOnEqual: + count = 0 # number of calls to __eq__ + trigger = 1 # count value when to trigger side effect + + def __eq__(self, other): + if self.__class__.count == self.__class__.trigger: + self.side_effect() + self.__class__.count += 1 + return True + + def __hash__(self): + # all instances represent the same key + return -1 + + def side_effect(self): + raise NotImplementedError + class PurePythonOrderedDictTests(OrderedDictTests, unittest.TestCase): module = py_coll OrderedDict = py_coll.OrderedDict + def test_issue119004_attribute_error(self): + class Key(_TriggerSideEffectOnEqual): + def side_effect(self): + del dict1[TODEL] + + TODEL = Key() + dict1 = self.OrderedDict(dict.fromkeys((0, TODEL, 4.2))) + dict2 = self.OrderedDict(dict.fromkeys((0, Key(), 4.2))) + # This causes an AttributeError due to the linked list being changed + msg = re.escape("'NoneType' object has no attribute 'key'") + self.assertRaisesRegex(AttributeError, msg, operator.eq, dict1, dict2) + self.assertEqual(Key.count, 2) + self.assertDictEqual(dict1, dict.fromkeys((0, 4.2))) + self.assertDictEqual(dict2, dict.fromkeys((0, Key(), 4.2))) + class CPythonBuiltinDictTests(unittest.TestCase): """Builtin dict preserves insertion order. @@ -765,8 +800,85 @@ class CPythonBuiltinDictTests(unittest.TestCase): del method +class CPythonOrderedDictSideEffects: + + def check_runtime_error_issue119004(self, dict1, dict2): + msg = re.escape("OrderedDict mutated during iteration") + self.assertRaisesRegex(RuntimeError, msg, operator.eq, dict1, dict2) + + def test_issue119004_change_size_by_clear(self): + class Key(_TriggerSideEffectOnEqual): + def side_effect(self): + dict1.clear() + + dict1 = self.OrderedDict(dict.fromkeys((0, Key(), 4.2))) + dict2 = self.OrderedDict(dict.fromkeys((0, Key(), 4.2))) + self.check_runtime_error_issue119004(dict1, dict2) + self.assertEqual(Key.count, 2) + self.assertDictEqual(dict1, {}) + self.assertDictEqual(dict2, dict.fromkeys((0, Key(), 4.2))) + + def test_issue119004_change_size_by_delete_key(self): + class Key(_TriggerSideEffectOnEqual): + def side_effect(self): + del dict1[TODEL] + + TODEL = Key() + dict1 = self.OrderedDict(dict.fromkeys((0, TODEL, 4.2))) + dict2 = self.OrderedDict(dict.fromkeys((0, Key(), 4.2))) + self.check_runtime_error_issue119004(dict1, dict2) + self.assertEqual(Key.count, 2) + self.assertDictEqual(dict1, dict.fromkeys((0, 4.2))) + self.assertDictEqual(dict2, dict.fromkeys((0, Key(), 4.2))) + + def test_issue119004_change_linked_list_by_clear(self): + class Key(_TriggerSideEffectOnEqual): + def side_effect(self): + dict1.clear() + dict1['a'] = dict1['b'] = 'c' + + dict1 = self.OrderedDict(dict.fromkeys((0, Key(), 4.2))) + dict2 = self.OrderedDict(dict.fromkeys((0, Key(), 4.2))) + self.check_runtime_error_issue119004(dict1, dict2) + self.assertEqual(Key.count, 2) + self.assertDictEqual(dict1, dict.fromkeys(('a', 'b'), 'c')) + self.assertDictEqual(dict2, dict.fromkeys((0, Key(), 4.2))) + + def test_issue119004_change_linked_list_by_delete_key(self): + class Key(_TriggerSideEffectOnEqual): + def side_effect(self): + del dict1[TODEL] + dict1['a'] = 'c' + + TODEL = Key() + dict1 = self.OrderedDict(dict.fromkeys((0, TODEL, 4.2))) + dict2 = self.OrderedDict(dict.fromkeys((0, Key(), 4.2))) + self.check_runtime_error_issue119004(dict1, dict2) + self.assertEqual(Key.count, 2) + self.assertDictEqual(dict1, {0: None, 'a': 'c', 4.2: None}) + self.assertDictEqual(dict2, dict.fromkeys((0, Key(), 4.2))) + + def test_issue119004_change_size_by_delete_key_in_dict_eq(self): + class Key(_TriggerSideEffectOnEqual): + trigger = 0 + def side_effect(self): + del dict1[TODEL] + + TODEL = Key() + dict1 = self.OrderedDict(dict.fromkeys((0, TODEL, 4.2))) + dict2 = self.OrderedDict(dict.fromkeys((0, Key(), 4.2))) + self.assertEqual(Key.count, 0) + # the side effect is in dict.__eq__ and modifies the length + self.assertNotEqual(dict1, dict2) + self.assertEqual(Key.count, 2) + self.assertDictEqual(dict1, dict.fromkeys((0, 4.2))) + self.assertDictEqual(dict2, dict.fromkeys((0, Key(), 4.2))) + + @unittest.skipUnless(c_coll, 'requires the C version of the collections module') -class CPythonOrderedDictTests(OrderedDictTests, unittest.TestCase): +class CPythonOrderedDictTests(OrderedDictTests, + CPythonOrderedDictSideEffects, + unittest.TestCase): module = c_coll OrderedDict = c_coll.OrderedDict diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index 2beb9ca8aa6..7f6855866df 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -828,7 +828,7 @@ def ns_to_sec(ns): return (ns * 1e-9) + 0.5e-9 def test_utime_by_indexed(self): - # pass times as floating point seconds as the second indexed parameter + # pass times as floating-point seconds as the second indexed parameter def set_time(filename, ns): atime_ns, mtime_ns = ns atime = self.ns_to_sec(atime_ns) @@ -3127,7 +3127,8 @@ class Win32NtTests(unittest.TestCase): def test_getfinalpathname_handles(self): nt = import_helper.import_module('nt') ctypes = import_helper.import_module('ctypes') - import ctypes.wintypes + # Ruff false positive -- it thinks we're redefining `ctypes` here + import ctypes.wintypes # noqa: F811 kernel = ctypes.WinDLL('Kernel32.dll', use_last_error=True) kernel.GetCurrentProcess.restype = ctypes.wintypes.HANDLE diff --git a/Lib/test/test_pathlib/test_pathlib.py b/Lib/test/test_pathlib/test_pathlib.py index 5fd1a41cbee..ff054e76efc 100644 --- a/Lib/test/test_pathlib/test_pathlib.py +++ b/Lib/test/test_pathlib/test_pathlib.py @@ -1296,18 +1296,20 @@ def test_absolute_posix(self): ) @needs_posix def test_open_mode(self): - old_mask = os.umask(0) + # Unmask all permissions except world-write, which may + # not be supported on some filesystems (see GH-85633.) + old_mask = os.umask(0o002) self.addCleanup(os.umask, old_mask) p = self.cls(self.base) with (p / 'new_file').open('wb'): pass st = os.stat(self.parser.join(self.base, 'new_file')) - self.assertEqual(stat.S_IMODE(st.st_mode), 0o666) - os.umask(0o022) + self.assertEqual(stat.S_IMODE(st.st_mode), 0o664) + os.umask(0o026) with (p / 'other_new_file').open('wb'): pass st = os.stat(self.parser.join(self.base, 'other_new_file')) - self.assertEqual(stat.S_IMODE(st.st_mode), 0o644) + self.assertEqual(stat.S_IMODE(st.st_mode), 0o640) @needs_posix def test_resolve_root(self): @@ -1325,16 +1327,18 @@ def test_resolve_root(self): ) @needs_posix def test_touch_mode(self): - old_mask = os.umask(0) + # Unmask all permissions except world-write, which may + # not be supported on some filesystems (see GH-85633.) + old_mask = os.umask(0o002) self.addCleanup(os.umask, old_mask) p = self.cls(self.base) (p / 'new_file').touch() st = os.stat(self.parser.join(self.base, 'new_file')) - self.assertEqual(stat.S_IMODE(st.st_mode), 0o666) - os.umask(0o022) + self.assertEqual(stat.S_IMODE(st.st_mode), 0o664) + os.umask(0o026) (p / 'other_new_file').touch() st = os.stat(self.parser.join(self.base, 'other_new_file')) - self.assertEqual(stat.S_IMODE(st.st_mode), 0o644) + self.assertEqual(stat.S_IMODE(st.st_mode), 0o640) (p / 'masked_new_file').touch(mode=0o750) st = os.stat(self.parser.join(self.base, 'masked_new_file')) self.assertEqual(stat.S_IMODE(st.st_mode), 0o750) diff --git a/Lib/test/test_patma.py b/Lib/test/test_patma.py index 1bdab125dc6..8325b83a593 100644 --- a/Lib/test/test_patma.py +++ b/Lib/test/test_patma.py @@ -1,6 +1,7 @@ import array import collections import dataclasses +import dis import enum import inspect import sys @@ -3377,6 +3378,24 @@ class Keys: self.assertIs(y, None) self.assertIs(z, None) +class TestSourceLocations(unittest.TestCase): + def test_jump_threading(self): + # See gh-123048 + def f(): + x = 0 + v = 1 + match v: + case 1: + if x < 0: + x = 1 + case 2: + if x < 0: + x = 1 + x += 1 + + for inst in dis.get_instructions(f): + if inst.opcode in dis.hasjump: + self.assertIsNotNone(inst.positions.lineno, "jump without location") class TestTracing(unittest.TestCase): diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index 4b3557f9b0d..9b2c885ed67 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -3416,10 +3416,12 @@ def test_file_modified_after_execution(self): print("hello") """ + # the time.sleep is needed for low-resolution filesystems like HFS+ commands = """ filename = $_frame.f_code.co_filename f = open(filename, "w") f.write("print('goodbye')") + import time; time.sleep(1) f.close() ll """ @@ -3429,10 +3431,12 @@ def test_file_modified_after_execution(self): self.assertIn("was edited", stdout) def test_file_modified_after_execution_with_multiple_instances(self): + # the time.sleep is needed for low-resolution filesystems like HFS+ script = """ import pdb; pdb.Pdb().set_trace() with open(__file__, "w") as f: f.write("print('goodbye')\\n" * 5) + import time; time.sleep(1) import pdb; pdb.Pdb().set_trace() """ @@ -3492,6 +3496,23 @@ def change_file(content, filename): # the file as up to date self.assertNotIn("WARNING:", stdout) + def test_post_mortem_restart(self): + script = """ + def foo(): + raise ValueError("foo") + foo() + """ + + commands = """ + continue + restart + continue + quit + """ + + stdout, stderr = self.run_pdb_script(script, commands) + self.assertIn("Restarting", stdout) + def test_relative_imports(self): self.module_name = 't_main' os_helper.rmtree(self.module_name) diff --git a/Lib/test/test_pickle.py b/Lib/test/test_pickle.py index 49aa4b38603..c84e507cdf6 100644 --- a/Lib/test/test_pickle.py +++ b/Lib/test/test_pickle.py @@ -16,6 +16,7 @@ from test.pickletester import AbstractHookTests from test.pickletester import AbstractUnpickleTests +from test.pickletester import AbstractPicklingErrorTests from test.pickletester import AbstractPickleTests from test.pickletester import AbstractPickleModuleTests from test.pickletester import AbstractPersistentPicklerTests @@ -55,6 +56,18 @@ def loads(self, buf, **kwds): return u.load() +class PyPicklingErrorTests(AbstractPicklingErrorTests, unittest.TestCase): + + pickler = pickle._Pickler + + def dumps(self, arg, proto=None, **kwargs): + f = io.BytesIO() + p = self.pickler(f, proto, **kwargs) + p.dump(arg) + f.seek(0) + return bytes(f.read()) + + class PyPicklerTests(AbstractPickleTests, unittest.TestCase): pickler = pickle._Pickler @@ -88,6 +101,8 @@ def loads(self, buf, **kwds): return pickle.loads(buf, **kwds) test_framed_write_sizes_with_delayed_writer = None + test_find_class = None + test_custom_find_class = None class PersistentPicklerUnpicklerMixin(object): @@ -267,6 +282,9 @@ class CUnpicklerTests(PyUnpicklerTests): bad_stack_errors = (pickle.UnpicklingError,) truncated_errors = (pickle.UnpicklingError,) + class CPicklingErrorTests(PyPicklingErrorTests): + pickler = _pickle.Pickler + class CPicklerTests(PyPicklerTests): pickler = _pickle.Pickler unpickler = _pickle.Unpickler diff --git a/Lib/test/test_pickletools.py b/Lib/test/test_pickletools.py index d37af79e878..8cb1f6dffcc 100644 --- a/Lib/test/test_pickletools.py +++ b/Lib/test/test_pickletools.py @@ -1,3 +1,4 @@ +import io import pickle import pickletools from test import support @@ -62,6 +63,315 @@ def test_optimize_binput_and_memoize(self): self.assertNotIn(pickle.BINPUT, pickled2) +class SimpleReader: + def __init__(self, data): + self.data = data + self.pos = 0 + + def read(self, n): + data = self.data[self.pos: self.pos + n] + self.pos += n + return data + + def readline(self): + nl = self.data.find(b'\n', self.pos) + 1 + if not nl: + nl = len(self.data) + data = self.data[self.pos: nl] + self.pos = nl + return data + + +class GenopsTests(unittest.TestCase): + def test_genops(self): + it = pickletools.genops(b'(I123\nK\x12J\x12\x34\x56\x78t.') + self.assertEqual([(item[0].name,) + item[1:] for item in it], [ + ('MARK', None, 0), + ('INT', 123, 1), + ('BININT1', 0x12, 6), + ('BININT', 0x78563412, 8), + ('TUPLE', None, 13), + ('STOP', None, 14), + ]) + + def test_from_file(self): + f = io.BytesIO(b'prefix(I123\nK\x12J\x12\x34\x56\x78t.suffix') + self.assertEqual(f.read(6), b'prefix') + it = pickletools.genops(f) + self.assertEqual([(item[0].name,) + item[1:] for item in it], [ + ('MARK', None, 6), + ('INT', 123, 7), + ('BININT1', 0x12, 12), + ('BININT', 0x78563412, 14), + ('TUPLE', None, 19), + ('STOP', None, 20), + ]) + self.assertEqual(f.read(), b'suffix') + + def test_without_pos(self): + f = SimpleReader(b'(I123\nK\x12J\x12\x34\x56\x78t.') + it = pickletools.genops(f) + self.assertEqual([(item[0].name,) + item[1:] for item in it], [ + ('MARK', None, None), + ('INT', 123, None), + ('BININT1', 0x12, None), + ('BININT', 0x78563412, None), + ('TUPLE', None, None), + ('STOP', None, None), + ]) + + def test_no_stop(self): + it = pickletools.genops(b'N') + item = next(it) + self.assertEqual(item[0].name, 'NONE') + with self.assertRaisesRegex(ValueError, + 'pickle exhausted before seeing STOP'): + next(it) + + def test_truncated_data(self): + it = pickletools.genops(b'I123') + with self.assertRaisesRegex(ValueError, + 'no newline found when trying to read stringnl'): + next(it) + it = pickletools.genops(b'J\x12\x34') + with self.assertRaisesRegex(ValueError, + 'not enough data in stream to read int4'): + next(it) + + def test_unknown_opcode(self): + it = pickletools.genops(b'N\xff') + item = next(it) + self.assertEqual(item[0].name, 'NONE') + with self.assertRaisesRegex(ValueError, + r"at position 1, opcode b'\\xff' unknown"): + next(it) + + def test_unknown_opcode_without_pos(self): + f = SimpleReader(b'N\xff') + it = pickletools.genops(f) + item = next(it) + self.assertEqual(item[0].name, 'NONE') + with self.assertRaisesRegex(ValueError, + r"at position , opcode b'\\xff' unknown"): + next(it) + + +class DisTests(unittest.TestCase): + maxDiff = None + + def check_dis(self, data, expected, **kwargs): + out = io.StringIO() + pickletools.dis(data, out=out, **kwargs) + self.assertEqual(out.getvalue(), expected) + + def check_dis_error(self, data, expected, expected_error, **kwargs): + out = io.StringIO() + with self.assertRaisesRegex(ValueError, expected_error): + pickletools.dis(data, out=out, **kwargs) + self.assertEqual(out.getvalue(), expected) + + def test_mark(self): + self.check_dis(b'(N(tl.', '''\ + 0: ( MARK + 1: N NONE + 2: ( MARK + 3: t TUPLE (MARK at 2) + 4: l LIST (MARK at 0) + 5: . STOP +highest protocol among opcodes = 0 +''') + + def test_indentlevel(self): + self.check_dis(b'(N(tl.', '''\ + 0: ( MARK + 1: N NONE + 2: ( MARK + 3: t TUPLE (MARK at 2) + 4: l LIST (MARK at 0) + 5: . STOP +highest protocol among opcodes = 0 +''', indentlevel=2) + + def test_mark_without_pos(self): + self.check_dis(SimpleReader(b'(N(tl.'), '''\ +( MARK +N NONE +( MARK +t TUPLE (MARK at unknown opcode offset) +l LIST (MARK at unknown opcode offset) +. STOP +highest protocol among opcodes = 0 +''') + + def test_no_mark(self): + self.check_dis_error(b'Nt.', '''\ + 0: N NONE + 1: t TUPLE no MARK exists on stack +''', 'no MARK exists on stack') + + def test_put(self): + self.check_dis(b'Np0\nq\x01r\x02\x00\x00\x00\x94.', '''\ + 0: N NONE + 1: p PUT 0 + 4: q BINPUT 1 + 6: r LONG_BINPUT 2 + 11: \\x94 MEMOIZE (as 3) + 12: . STOP +highest protocol among opcodes = 4 +''') + + def test_put_redefined(self): + self.check_dis_error(b'Np1\np1\n.', '''\ + 0: N NONE + 1: p PUT 1 + 4: p PUT 1 +''', 'memo key 1 already defined') + self.check_dis_error(b'Np1\nq\x01.', '''\ + 0: N NONE + 1: p PUT 1 + 4: q BINPUT 1 +''', 'memo key 1 already defined') + self.check_dis_error(b'Np1\nr\x01\x00\x00\x00.', '''\ + 0: N NONE + 1: p PUT 1 + 4: r LONG_BINPUT 1 +''', 'memo key 1 already defined') + self.check_dis_error(b'Np1\n\x94.', '''\ + 0: N NONE + 1: p PUT 1 + 4: \\x94 MEMOIZE (as 1) +''', 'memo key None already defined') + + def test_put_empty_stack(self): + self.check_dis_error(b'p0\n', '''\ + 0: p PUT 0 +''', "stack is empty -- can't store into memo") + + def test_put_markobject(self): + self.check_dis_error(b'(p0\n', '''\ + 0: ( MARK + 1: p PUT 0 +''', "can't store markobject in the memo") + + def test_get(self): + self.check_dis(b'(Np1\ng1\nh\x01j\x01\x00\x00\x00t.', '''\ + 0: ( MARK + 1: N NONE + 2: p PUT 1 + 5: g GET 1 + 8: h BINGET 1 + 10: j LONG_BINGET 1 + 15: t TUPLE (MARK at 0) + 16: . STOP +highest protocol among opcodes = 1 +''') + + def test_get_without_put(self): + self.check_dis_error(b'g1\n.', '''\ + 0: g GET 1 +''', 'memo key 1 has never been stored into') + self.check_dis_error(b'h\x01.', '''\ + 0: h BINGET 1 +''', 'memo key 1 has never been stored into') + self.check_dis_error(b'j\x01\x00\x00\x00.', '''\ + 0: j LONG_BINGET 1 +''', 'memo key 1 has never been stored into') + + def test_memo(self): + memo = {} + self.check_dis(b'Np1\n.', '''\ + 0: N NONE + 1: p PUT 1 + 4: . STOP +highest protocol among opcodes = 0 +''', memo=memo) + self.check_dis(b'g1\n.', '''\ + 0: g GET 1 + 3: . STOP +highest protocol among opcodes = 0 +''', memo=memo) + + def test_mark_pop(self): + self.check_dis(b'(N00N.', '''\ + 0: ( MARK + 1: N NONE + 2: 0 POP + 3: 0 POP (MARK at 0) + 4: N NONE + 5: . STOP +highest protocol among opcodes = 0 +''') + + def test_too_small_stack(self): + self.check_dis_error(b'a', '''\ + 0: a APPEND +''', 'tries to pop 2 items from stack with only 0 items') + self.check_dis_error(b']a', '''\ + 0: ] EMPTY_LIST + 1: a APPEND +''', 'tries to pop 2 items from stack with only 1 items') + + def test_no_stop(self): + self.check_dis_error(b'N', '''\ + 0: N NONE +''', 'pickle exhausted before seeing STOP') + + def test_truncated_data(self): + self.check_dis_error(b'NI123', '''\ + 0: N NONE +''', 'no newline found when trying to read stringnl') + self.check_dis_error(b'NJ\x12\x34', '''\ + 0: N NONE +''', 'not enough data in stream to read int4') + + def test_unknown_opcode(self): + self.check_dis_error(b'N\xff', '''\ + 0: N NONE +''', r"at position 1, opcode b'\\xff' unknown") + + def test_stop_not_empty_stack(self): + self.check_dis_error(b']N.', '''\ + 0: ] EMPTY_LIST + 1: N NONE + 2: . STOP +highest protocol among opcodes = 1 +''', r'stack not empty after STOP: \[list\]') + + def test_annotate(self): + self.check_dis(b'(Nt.', '''\ + 0: ( MARK Push markobject onto the stack. + 1: N NONE Push None on the stack. + 2: t TUPLE (MARK at 0) Build a tuple out of the topmost stack slice, after markobject. + 3: . STOP Stop the unpickling machine. +highest protocol among opcodes = 0 +''', annotate=1) + self.check_dis(b'(Nt.', '''\ + 0: ( MARK Push markobject onto the stack. + 1: N NONE Push None on the stack. + 2: t TUPLE (MARK at 0) Build a tuple out of the topmost stack slice, after markobject. + 3: . STOP Stop the unpickling machine. +highest protocol among opcodes = 0 +''', annotate=20) + self.check_dis(b'(((((((ttttttt.', '''\ + 0: ( MARK Push markobject onto the stack. + 1: ( MARK Push markobject onto the stack. + 2: ( MARK Push markobject onto the stack. + 3: ( MARK Push markobject onto the stack. + 4: ( MARK Push markobject onto the stack. + 5: ( MARK Push markobject onto the stack. + 6: ( MARK Push markobject onto the stack. + 7: t TUPLE (MARK at 6) Build a tuple out of the topmost stack slice, after markobject. + 8: t TUPLE (MARK at 5) Build a tuple out of the topmost stack slice, after markobject. + 9: t TUPLE (MARK at 4) Build a tuple out of the topmost stack slice, after markobject. + 10: t TUPLE (MARK at 3) Build a tuple out of the topmost stack slice, after markobject. + 11: t TUPLE (MARK at 2) Build a tuple out of the topmost stack slice, after markobject. + 12: t TUPLE (MARK at 1) Build a tuple out of the topmost stack slice, after markobject. + 13: t TUPLE (MARK at 0) Build a tuple out of the topmost stack slice, after markobject. + 14: . STOP Stop the unpickling machine. +highest protocol among opcodes = 0 +''', annotate=20) + + class MiscTestCase(unittest.TestCase): def test__all__(self): not_exported = { diff --git a/Lib/test/test_pkgutil.py b/Lib/test/test_pkgutil.py index d095f440a99..ca6927554b0 100644 --- a/Lib/test/test_pkgutil.py +++ b/Lib/test/test_pkgutil.py @@ -522,7 +522,43 @@ def test_mixed_namespace(self): del sys.modules['foo.bar'] del sys.modules['foo.baz'] - # XXX: test .pkg files + + def test_extend_path_argument_types(self): + pkgname = 'foo' + dirname_0 = self.create_init(pkgname) + + # If the input path is not a list it is returned unchanged + self.assertEqual('notalist', pkgutil.extend_path('notalist', 'foo')) + self.assertEqual(('not', 'a', 'list'), pkgutil.extend_path(('not', 'a', 'list'), 'foo')) + self.assertEqual(123, pkgutil.extend_path(123, 'foo')) + self.assertEqual(None, pkgutil.extend_path(None, 'foo')) + + # Cleanup + shutil.rmtree(dirname_0) + del sys.path[0] + + + def test_extend_path_pkg_files(self): + pkgname = 'foo' + dirname_0 = self.create_init(pkgname) + + with open(os.path.join(dirname_0, 'bar.pkg'), 'w') as pkg_file: + pkg_file.write('\n'.join([ + 'baz', + '/foo/bar/baz', + '', + '#comment' + ])) + + extended_paths = pkgutil.extend_path(sys.path, 'bar') + + self.assertEqual(extended_paths[:-2], sys.path) + self.assertEqual(extended_paths[-2], 'baz') + self.assertEqual(extended_paths[-1], '/foo/bar/baz') + + # Cleanup + shutil.rmtree(dirname_0) + del sys.path[0] class NestedNamespacePackageTest(unittest.TestCase): @@ -588,8 +624,11 @@ def test_get_loader_handles_missing_spec_attribute(self): mod = type(sys)(name) del mod.__spec__ with CleanImport(name): - sys.modules[name] = mod - loader = pkgutil.get_loader(name) + try: + sys.modules[name] = mod + loader = pkgutil.get_loader(name) + finally: + sys.modules.pop(name, None) self.assertIsNone(loader) @ignore_warnings(category=DeprecationWarning) @@ -598,8 +637,11 @@ def test_get_loader_handles_spec_attribute_none(self): mod = type(sys)(name) mod.__spec__ = None with CleanImport(name): - sys.modules[name] = mod - loader = pkgutil.get_loader(name) + try: + sys.modules[name] = mod + loader = pkgutil.get_loader(name) + finally: + sys.modules.pop(name, None) self.assertIsNone(loader) @ignore_warnings(category=DeprecationWarning) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py index 908354cb857..07442a96549 100644 --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -411,8 +411,10 @@ def test_posix_fallocate(self): # issue33655: Also ignore EINVAL on *BSD since ZFS is also # often used there. if inst.errno == errno.EINVAL and sys.platform.startswith( - ('sunos', 'freebsd', 'netbsd', 'openbsd', 'gnukfreebsd')): + ('sunos', 'freebsd', 'openbsd', 'gnukfreebsd')): raise unittest.SkipTest("test may fail on ZFS filesystems") + elif inst.errno == errno.EOPNOTSUPP and sys.platform.startswith("netbsd"): + raise unittest.SkipTest("test may fail on FFS filesystems") else: raise finally: @@ -2140,6 +2142,13 @@ def test_stat(self): with self.assertRaisesRegex(NotImplementedError, "dir_fd unavailable"): os.stat("file", dir_fd=0) + def test_ptsname_r(self): + self._verify_available("HAVE_PTSNAME_R") + if self.mac_ver >= (10, 13, 4): + self.assertIn("HAVE_PTSNAME_R", posix._have_functions) + else: + self.assertNotIn("HAVE_PTSNAME_R", posix._have_functions) + def test_access(self): self._verify_available("HAVE_FACCESSAT") if self.mac_ver >= (10, 10): diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py index 57a24e9c70d..ca5cf42f8fc 100644 --- a/Lib/test/test_posixpath.py +++ b/Lib/test/test_posixpath.py @@ -5,7 +5,7 @@ import unittest from posixpath import realpath, abspath, dirname, basename from test import test_genericpath -from test.support import import_helper +from test.support import get_attribute, import_helper from test.support import cpython_only, os_helper from test.support.os_helper import FakePath from unittest import mock @@ -359,13 +359,19 @@ def test_expanduser_pwd(self): "no home directory on VxWorks") def test_expanduser_pwd2(self): pwd = import_helper.import_module('pwd') - for e in pwd.getpwall(): - name = e.pw_name - home = e.pw_dir + for all_entry in get_attribute(pwd, 'getpwall')(): + name = all_entry.pw_name + + # gh-121200: pw_dir can be different between getpwall() and + # getpwnam(), so use getpwnam() pw_dir as expanduser() does. + entry = pwd.getpwnam(name) + home = entry.pw_dir home = home.rstrip('/') or '/' - self.assertEqual(posixpath.expanduser('~' + name), home) - self.assertEqual(posixpath.expanduser(os.fsencode('~' + name)), - os.fsencode(home)) + + with self.subTest(all_entry=all_entry, entry=entry): + self.assertEqual(posixpath.expanduser('~' + name), home) + self.assertEqual(posixpath.expanduser(os.fsencode('~' + name)), + os.fsencode(home)) NORMPATH_CASES = [ ("", "."), diff --git a/Lib/test/test_pyrepl/support.py b/Lib/test/test_pyrepl/support.py index 70e12286f7d..672d4896c92 100644 --- a/Lib/test/test_pyrepl/support.py +++ b/Lib/test/test_pyrepl/support.py @@ -1,3 +1,4 @@ +import os from code import InteractiveConsole from functools import partial from typing import Iterable @@ -38,6 +39,20 @@ def code_to_events(code: str): yield Event(evt="key", data=c, raw=bytearray(c.encode("utf-8"))) +def clean_screen(screen: Iterable[str]): + """Cleans color and console characters out of a screen output. + + This is useful for screen testing, it increases the test readability since + it strips out all the unreadable side of the screen. + """ + output = [] + for line in screen: + if line.startswith(">>>") or line.startswith("..."): + line = line[3:] + output.append(line) + return "\n".join(output).strip() + + def prepare_reader(console: Console, **kwargs): config = ReadlineConfig(readline_completer=kwargs.pop("readline_completer", None)) reader = ReadlineAlikeReader(console=console, config=config) @@ -86,8 +101,18 @@ def handle_all_events( ) +def make_clean_env() -> dict[str, str]: + clean_env = os.environ.copy() + for k in clean_env.copy(): + if k.startswith("PYTHON"): + clean_env.pop(k) + clean_env.pop("FORCE_COLOR", None) + clean_env.pop("NO_COLOR", None) + return clean_env + + class FakeConsole(Console): - def __init__(self, events, encoding="utf-8"): + def __init__(self, events, encoding="utf-8") -> None: self.events = iter(events) self.encoding = encoding self.screen = [] @@ -136,8 +161,8 @@ def flushoutput(self) -> None: def forgetinput(self) -> None: pass - def wait(self) -> None: - pass + def wait(self, timeout: float | None = None) -> bool: + return True def repaint(self) -> None: pass diff --git a/Lib/test/test_pyrepl/test_interact.py b/Lib/test/test_pyrepl/test_interact.py index df97b1354a1..e71ab419570 100644 --- a/Lib/test/test_pyrepl/test_interact.py +++ b/Lib/test/test_pyrepl/test_interact.py @@ -7,7 +7,7 @@ from test.support import force_not_colorized from _pyrepl.console import InteractiveColoredConsole - +from _pyrepl.simple_interact import _more_lines class TestSimpleInteract(unittest.TestCase): def test_multiple_statements(self): @@ -88,6 +88,20 @@ def test_runsource_returns_false_for_failed_compilation(self): self.assertFalse(result) self.assertIn('SyntaxError', f.getvalue()) + @force_not_colorized + def test_runsource_show_syntax_error_location(self): + console = InteractiveColoredConsole() + source = "def f(x, x): ..." + f = io.StringIO() + with contextlib.redirect_stderr(f): + result = console.runsource(source) + self.assertFalse(result) + r = """ + def f(x, x): ... + ^ +SyntaxError: duplicate argument 'x' in function definition""" + self.assertIn(r, f.getvalue()) + def test_runsource_shows_syntax_error_for_failed_compilation(self): console = InteractiveColoredConsole() source = "print('Hello, world!'" @@ -111,3 +125,104 @@ def test_no_active_future(self): result = console.runsource(source) self.assertFalse(result) self.assertEqual(f.getvalue(), "{'x': }\n") + + +class TestMoreLines(unittest.TestCase): + def test_invalid_syntax_single_line(self): + namespace = {} + code = "if foo" + console = InteractiveColoredConsole(namespace, filename="") + self.assertFalse(_more_lines(console, code)) + + def test_empty_line(self): + namespace = {} + code = "" + console = InteractiveColoredConsole(namespace, filename="") + self.assertFalse(_more_lines(console, code)) + + def test_valid_single_statement(self): + namespace = {} + code = "foo = 1" + console = InteractiveColoredConsole(namespace, filename="") + self.assertFalse(_more_lines(console, code)) + + def test_multiline_single_assignment(self): + namespace = {} + code = dedent("""\ + foo = [ + 1, + 2, + 3, + ]""") + console = InteractiveColoredConsole(namespace, filename="") + self.assertFalse(_more_lines(console, code)) + + def test_multiline_single_block(self): + namespace = {} + code = dedent("""\ + def foo(): + '''docs''' + + return 1""") + console = InteractiveColoredConsole(namespace, filename="") + self.assertTrue(_more_lines(console, code)) + + def test_multiple_statements_single_line(self): + namespace = {} + code = "foo = 1;bar = 2" + console = InteractiveColoredConsole(namespace, filename="") + self.assertFalse(_more_lines(console, code)) + + def test_multiple_statements(self): + namespace = {} + code = dedent("""\ + import time + + foo = 1""") + console = InteractiveColoredConsole(namespace, filename="") + self.assertTrue(_more_lines(console, code)) + + def test_multiple_blocks(self): + namespace = {} + code = dedent("""\ + from dataclasses import dataclass + + @dataclass + class Point: + x: float + y: float""") + console = InteractiveColoredConsole(namespace, filename="") + self.assertTrue(_more_lines(console, code)) + + def test_multiple_blocks_empty_newline(self): + namespace = {} + code = dedent("""\ + from dataclasses import dataclass + + @dataclass + class Point: + x: float + y: float + """) + console = InteractiveColoredConsole(namespace, filename="") + self.assertFalse(_more_lines(console, code)) + + def test_multiple_blocks_indented_newline(self): + namespace = {} + code = ( + "from dataclasses import dataclass\n" + "\n" + "@dataclass\n" + "class Point:\n" + " x: float\n" + " y: float\n" + " " + ) + console = InteractiveColoredConsole(namespace, filename="") + self.assertFalse(_more_lines(console, code)) + + def test_incomplete_statement(self): + namespace = {} + code = "if foo:" + console = InteractiveColoredConsole(namespace, filename="") + self.assertTrue(_more_lines(console, code)) diff --git a/Lib/test/test_pyrepl/test_pyrepl.py b/Lib/test/test_pyrepl/test_pyrepl.py index b189d3291e8..0f3e9996e77 100644 --- a/Lib/test/test_pyrepl/test_pyrepl.py +++ b/Lib/test/test_pyrepl/test_pyrepl.py @@ -1,14 +1,19 @@ import io import itertools import os +import pathlib +import re import rlcompleter import select import subprocess import sys -from unittest import TestCase, skipUnless +import tempfile +from unittest import TestCase, skipUnless, skipIf from unittest.mock import patch from test.support import force_not_colorized from test.support import SHORT_TIMEOUT +from test.support.import_helper import import_module +from test.support.os_helper import unlink from .support import ( FakeConsole, @@ -17,9 +22,12 @@ more_lines, multiline_input, code_to_events, + clean_screen, + make_clean_env, ) from _pyrepl.console import Event -from _pyrepl.readline import ReadlineAlikeReader, ReadlineConfig +from _pyrepl.readline import (ReadlineAlikeReader, ReadlineConfig, + _ReadlineWrapper) from _pyrepl.readline import multiline_input as readline_multiline_input try: @@ -27,6 +35,94 @@ except ImportError: pty = None + +class ReplTestCase(TestCase): + def run_repl( + self, + repl_input: str | list[str], + env: dict | None = None, + *, + cmdline_args: list[str] | None = None, + cwd: str | None = None, + ) -> tuple[str, int]: + temp_dir = None + if cwd is None: + temp_dir = tempfile.TemporaryDirectory(ignore_cleanup_errors=True) + cwd = temp_dir.name + try: + return self._run_repl( + repl_input, env=env, cmdline_args=cmdline_args, cwd=cwd + ) + finally: + if temp_dir is not None: + temp_dir.cleanup() + + def _run_repl( + self, + repl_input: str | list[str], + *, + env: dict | None, + cmdline_args: list[str] | None, + cwd: str, + ) -> tuple[str, int]: + assert pty + master_fd, slave_fd = pty.openpty() + cmd = [sys.executable, "-i", "-u"] + if env is None: + cmd.append("-I") + elif "PYTHON_HISTORY" not in env: + env["PYTHON_HISTORY"] = os.path.join(cwd, ".regrtest_history") + if cmdline_args is not None: + cmd.extend(cmdline_args) + + try: + import termios + except ModuleNotFoundError: + pass + else: + term_attr = termios.tcgetattr(slave_fd) + term_attr[6][termios.VREPRINT] = 0 # pass through CTRL-R + term_attr[6][termios.VINTR] = 0 # pass through CTRL-C + termios.tcsetattr(slave_fd, termios.TCSANOW, term_attr) + + process = subprocess.Popen( + cmd, + stdin=slave_fd, + stdout=slave_fd, + stderr=slave_fd, + cwd=cwd, + text=True, + close_fds=True, + env=env if env else os.environ, + ) + os.close(slave_fd) + if isinstance(repl_input, list): + repl_input = "\n".join(repl_input) + "\n" + os.write(master_fd, repl_input.encode("utf-8")) + + output = [] + while select.select([master_fd], [], [], SHORT_TIMEOUT)[0]: + try: + data = os.read(master_fd, 1024).decode("utf-8") + if not data: + break + except OSError: + break + output.append(data) + else: + os.close(master_fd) + process.kill() + self.fail(f"Timeout while waiting for output, got: {''.join(output)}") + + os.close(master_fd) + try: + exit_code = process.wait(timeout=SHORT_TIMEOUT) + except subprocess.TimeoutExpired: + process.kill() + exit_code = process.wait() + return "".join(output), exit_code + + class TestCursorPosition(TestCase): def prepare_reader(self, events): console = FakeConsole(events) @@ -458,6 +554,24 @@ def test_auto_indent_with_comment(self): output = multiline_input(reader) self.assertEqual(output, output_code) + def test_auto_indent_with_multicomment(self): + # fmt: off + events = code_to_events( + "def f(): ## foo\n" + "pass\n\n" + ) + + output_code = ( + "def f(): ## foo\n" + " pass\n" + " " + ) + # fmt: on + + reader = self.prepare_reader(events) + output = multiline_input(reader) + self.assertEqual(output, output_code) + def test_auto_indent_ignore_comments(self): # fmt: off events = code_to_events( @@ -479,13 +593,40 @@ def prepare_reader(self, events): console = FakeConsole(events) config = ReadlineConfig(readline_completer=None) reader = ReadlineAlikeReader(console=console, config=config) + reader.can_colorize = False return reader + def test_stdin_is_tty(self): + # Used during test log analysis to figure out if a TTY was available. + try: + if os.isatty(sys.stdin.fileno()): + return + except OSError as ose: + self.skipTest(f"stdin tty check failed: {ose}") + else: + self.skipTest("stdin is not a tty") + + def test_stdout_is_tty(self): + # Used during test log analysis to figure out if a TTY was available. + try: + if os.isatty(sys.stdout.fileno()): + return + except OSError as ose: + self.skipTest(f"stdout tty check failed: {ose}") + else: + self.skipTest("stdout is not a tty") + def test_basic(self): reader = self.prepare_reader(code_to_events("1+1\n")) output = multiline_input(reader) self.assertEqual(output, "1+1") + self.assertEqual(clean_screen(reader.screen), "1+1") + + def test_get_line_buffer_returns_str(self): + reader = self.prepare_reader(code_to_events("\n")) + wrapper = _ReadlineWrapper(f_in=None, f_out=None, reader=reader) + self.assertIs(type(wrapper.get_line_buffer()), str) def test_multiline_edit(self): events = itertools.chain( @@ -515,8 +656,10 @@ def test_multiline_edit(self): output = multiline_input(reader) self.assertEqual(output, "def f():\n ...\n ") + self.assertEqual(clean_screen(reader.screen), "def f():\n ...") output = multiline_input(reader) self.assertEqual(output, "def g():\n pass\n ") + self.assertEqual(clean_screen(reader.screen), "def g():\n pass") def test_history_navigation_with_up_arrow(self): events = itertools.chain( @@ -535,12 +678,40 @@ def test_history_navigation_with_up_arrow(self): output = multiline_input(reader) self.assertEqual(output, "1+1") + self.assertEqual(clean_screen(reader.screen), "1+1") output = multiline_input(reader) self.assertEqual(output, "2+2") + self.assertEqual(clean_screen(reader.screen), "2+2") output = multiline_input(reader) self.assertEqual(output, "2+2") + self.assertEqual(clean_screen(reader.screen), "2+2") output = multiline_input(reader) self.assertEqual(output, "1+1") + self.assertEqual(clean_screen(reader.screen), "1+1") + + def test_history_with_multiline_entries(self): + code = "def foo():\nx = 1\ny = 2\nz = 3\n\ndef bar():\nreturn 42\n\n" + events = list(itertools.chain( + code_to_events(code), + [ + Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), + Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), + Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), + Event(evt="key", data="\n", raw=bytearray(b"\n")), + Event(evt="key", data="\n", raw=bytearray(b"\n")), + ] + )) + + reader = self.prepare_reader(events) + output = multiline_input(reader) + output = multiline_input(reader) + output = multiline_input(reader) + self.assertEqual( + clean_screen(reader.screen), + 'def foo():\n x = 1\n y = 2\n z = 3' + ) + self.assertEqual(output, "def foo():\n x = 1\n y = 2\n z = 3\n ") + def test_history_navigation_with_down_arrow(self): events = itertools.chain( @@ -558,6 +729,7 @@ def test_history_navigation_with_down_arrow(self): output = multiline_input(reader) self.assertEqual(output, "1+1") + self.assertEqual(clean_screen(reader.screen), "1+1") def test_history_search(self): events = itertools.chain( @@ -574,18 +746,62 @@ def test_history_search(self): output = multiline_input(reader) self.assertEqual(output, "1+1") + self.assertEqual(clean_screen(reader.screen), "1+1") output = multiline_input(reader) self.assertEqual(output, "2+2") + self.assertEqual(clean_screen(reader.screen), "2+2") output = multiline_input(reader) self.assertEqual(output, "3+3") + self.assertEqual(clean_screen(reader.screen), "3+3") output = multiline_input(reader) self.assertEqual(output, "1+1") + self.assertEqual(clean_screen(reader.screen), "1+1") def test_control_character(self): events = code_to_events("c\x1d\n") reader = self.prepare_reader(events) output = multiline_input(reader) self.assertEqual(output, "c\x1d") + self.assertEqual(clean_screen(reader.screen), "c") + + def test_history_search_backward(self): + # Test history search backward with "imp" input + events = itertools.chain( + code_to_events("import os\n"), + code_to_events("imp"), + [ + Event(evt='key', data='page up', raw=bytearray(b'\x1b[5~')), + Event(evt="key", data="\n", raw=bytearray(b"\n")), + ], + ) + + # fill the history + reader = self.prepare_reader(events) + multiline_input(reader) + + # search for "imp" in history + output = multiline_input(reader) + self.assertEqual(output, "import os") + self.assertEqual(clean_screen(reader.screen), "import os") + + def test_history_search_backward_empty(self): + # Test history search backward with an empty input + events = itertools.chain( + code_to_events("import os\n"), + [ + Event(evt='key', data='page up', raw=bytearray(b'\x1b[5~')), + Event(evt="key", data="\n", raw=bytearray(b"\n")), + ], + ) + + # fill the history + reader = self.prepare_reader(events) + multiline_input(reader) + + # search backward in history + output = multiline_input(reader) + self.assertEqual(output, "import os") + self.assertEqual(clean_screen(reader.screen), "import os") class TestPyReplCompleter(TestCase): @@ -840,12 +1056,33 @@ def test_bracketed_paste_single_line(self): @skipUnless(pty, "requires pty") -class TestMain(TestCase): +class TestDumbTerminal(ReplTestCase): + def test_dumb_terminal_exits_cleanly(self): + env = os.environ.copy() + env.update({"TERM": "dumb"}) + output, exit_code = self.run_repl("exit()\n", env=env) + self.assertEqual(exit_code, 0) + self.assertIn("warning: can't use pyrepl", output) + self.assertNotIn("Exception", output) + self.assertNotIn("Traceback", output) + + +@skipUnless(pty, "requires pty") +@skipIf((os.environ.get("TERM") or "dumb") == "dumb", "can't use pyrepl in dumb terminal") +class TestMain(ReplTestCase): + def setUp(self): + # Cleanup from PYTHON* variables to isolate from local + # user settings, see #121359. Such variables should be + # added later in test methods to patched os.environ. + patcher = patch('os.environ', new=make_clean_env()) + self.addCleanup(patcher.stop) + patcher.start() + @force_not_colorized def test_exposed_globals_in_repl(self): pre = "['__annotations__', '__builtins__'" post = "'__loader__', '__name__', '__package__', '__spec__']" - output, exit_code = self.run_repl(["sorted(dir())", "exit"]) + output, exit_code = self.run_repl(["sorted(dir())", "exit()"]) if "can't use pyrepl" in output: self.skipTest("pyrepl not available") self.assertEqual(exit_code, 0) @@ -864,15 +1101,84 @@ def test_exposed_globals_in_repl(self): self.assertTrue(case1 or case2 or case3 or case4, output) - def test_dumb_terminal_exits_cleanly(self): - env = os.environ.copy() - env.update({"TERM": "dumb"}) - output, exit_code = self.run_repl("exit()\n", env=env) + def _assertMatchOK( + self, var: str, expected: str | re.Pattern, actual: str + ) -> None: + if isinstance(expected, re.Pattern): + self.assertTrue( + expected.match(actual), + f"{var}={actual} does not match {expected.pattern}", + ) + else: + self.assertEqual( + actual, + expected, + f"expected {var}={expected}, got {var}={actual}", + ) + + @force_not_colorized + def _run_repl_globals_test(self, expectations, *, as_file=False, as_module=False): + clean_env = make_clean_env() + clean_env["NO_COLOR"] = "1" # force_not_colorized doesn't touch subprocesses + + with tempfile.TemporaryDirectory() as td: + blue = pathlib.Path(td) / "blue" + blue.mkdir() + mod = blue / "calx.py" + mod.write_text("FOO = 42", encoding="utf-8") + commands = [ + "print(f'^{" + var + "=}')" for var in expectations + ] + ["exit()"] + if as_file and as_module: + self.fail("as_file and as_module are mutually exclusive") + elif as_file: + output, exit_code = self.run_repl( + commands, + cmdline_args=[str(mod)], + env=clean_env, + ) + elif as_module: + output, exit_code = self.run_repl( + commands, + cmdline_args=["-m", "blue.calx"], + env=clean_env, + cwd=td, + ) + else: + self.fail("Choose one of as_file or as_module") + + if "can't use pyrepl" in output: + self.skipTest("pyrepl not available") + self.assertEqual(exit_code, 0) - self.assertIn("warning: can\'t use pyrepl", output) + for var, expected in expectations.items(): + with self.subTest(var=var, expected=expected): + if m := re.search(rf"\^{var}=(.+?)[\r\n]", output): + self._assertMatchOK(var, expected, actual=m.group(1)) + else: + self.fail(f"{var}= not found in output: {output!r}\n\n{output}") + self.assertNotIn("Exception", output) self.assertNotIn("Traceback", output) + def test_inspect_keeps_globals_from_inspected_file(self): + expectations = { + "FOO": "42", + "__name__": "'__main__'", + "__package__": "None", + # "__file__" is missing in -i, like in the basic REPL + } + self._run_repl_globals_test(expectations, as_file=True) + + def test_inspect_keeps_globals_from_inspected_module(self): + expectations = { + "FOO": "42", + "__name__": "'__main__'", + "__package__": "'blue'", + "__file__": re.compile(r"^'.*calx.py'$"), + } + self._run_repl_globals_test(expectations, as_module=True) + @force_not_colorized def test_python_basic_repl(self): env = os.environ.copy() @@ -898,33 +1204,126 @@ def test_python_basic_repl(self): self.assertNotIn("Exception", output) self.assertNotIn("Traceback", output) - def run_repl(self, repl_input: str | list[str], env: dict | None = None) -> tuple[str, int]: - master_fd, slave_fd = pty.openpty() - process = subprocess.Popen( - [sys.executable, "-i", "-u"], - stdin=slave_fd, - stdout=slave_fd, - stderr=slave_fd, - text=True, - close_fds=True, - env=env if env else os.environ, - ) - if isinstance(repl_input, list): - repl_input = "\n".join(repl_input) + "\n" - os.write(master_fd, repl_input.encode("utf-8")) + @force_not_colorized + def test_bad_sys_excepthook_doesnt_crash_pyrepl(self): + env = os.environ.copy() + commands = ("import sys\n" + "sys.excepthook = 1\n" + "1/0\n" + "exit()\n") - output = [] - while select.select([master_fd], [], [], 0.5)[0]: - data = os.read(master_fd, 1024).decode("utf-8") - if not data: - break - output.append(data) + def check(output, exitcode): + self.assertIn("Error in sys.excepthook:", output) + self.assertEqual(output.count("'int' object is not callable"), 1) + self.assertIn("Original exception was:", output) + self.assertIn("division by zero", output) + self.assertEqual(exitcode, 0) + env.pop("PYTHON_BASIC_REPL", None) + output, exit_code = self.run_repl(commands, env=env) + if "can\'t use pyrepl" in output: + self.skipTest("pyrepl not available") + check(output, exit_code) - os.close(master_fd) - os.close(slave_fd) - try: - exit_code = process.wait(timeout=SHORT_TIMEOUT) - except subprocess.TimeoutExpired: - process.kill() - exit_code = process.wait() - return "\n".join(output), exit_code + env["PYTHON_BASIC_REPL"] = "1" + output, exit_code = self.run_repl(commands, env=env) + check(output, exit_code) + + def test_not_wiping_history_file(self): + # skip, if readline module is not available + import_module('readline') + + hfile = tempfile.NamedTemporaryFile(delete=False) + self.addCleanup(unlink, hfile.name) + env = os.environ.copy() + env["PYTHON_HISTORY"] = hfile.name + commands = "123\nspam\nexit()\n" + + env.pop("PYTHON_BASIC_REPL", None) + output, exit_code = self.run_repl(commands, env=env) + self.assertEqual(exit_code, 0) + self.assertIn("123", output) + self.assertIn("spam", output) + self.assertNotEqual(pathlib.Path(hfile.name).stat().st_size, 0) + + hfile.file.truncate() + hfile.close() + + env["PYTHON_BASIC_REPL"] = "1" + output, exit_code = self.run_repl(commands, env=env) + self.assertEqual(exit_code, 0) + self.assertIn("123", output) + self.assertIn("spam", output) + self.assertNotEqual(pathlib.Path(hfile.name).stat().st_size, 0) + + @force_not_colorized + def test_correct_filename_in_syntaxerrors(self): + env = os.environ.copy() + commands = "a b c\nexit()\n" + output, exit_code = self.run_repl(commands, env=env) + if "can't use pyrepl" in output: + self.skipTest("pyrepl not available") + self.assertIn("SyntaxError: invalid syntax", output) + self.assertIn("", output) + commands = " b\nexit()\n" + output, exit_code = self.run_repl(commands, env=env) + self.assertIn("IndentationError: unexpected indent", output) + self.assertIn("", output) + + @force_not_colorized + def test_proper_tracebacklimit(self): + env = os.environ.copy() + for set_tracebacklimit in [True, False]: + commands = ("import sys\n" + + ("sys.tracebacklimit = 1\n" if set_tracebacklimit else "") + + "def x1(): 1/0\n\n" + "def x2(): x1()\n\n" + "def x3(): x2()\n\n" + "x3()\n" + "exit()\n") + + for basic_repl in [True, False]: + if basic_repl: + env["PYTHON_BASIC_REPL"] = "1" + else: + env.pop("PYTHON_BASIC_REPL", None) + with self.subTest(set_tracebacklimit=set_tracebacklimit, + basic_repl=basic_repl): + output, exit_code = self.run_repl(commands, env=env) + if "can't use pyrepl" in output: + self.skipTest("pyrepl not available") + self.assertIn("in x1", output) + if set_tracebacklimit: + self.assertNotIn("in x2", output) + self.assertNotIn("in x3", output) + self.assertNotIn("in ", output) + else: + self.assertIn("in x2", output) + self.assertIn("in x3", output) + self.assertIn("in ", output) + + def test_readline_history_file(self): + # skip, if readline module is not available + readline = import_module('readline') + if readline.backend != "editline": + self.skipTest("GNU readline is not affected by this issue") + + hfile = tempfile.NamedTemporaryFile() + self.addCleanup(unlink, hfile.name) + env = os.environ.copy() + env["PYTHON_HISTORY"] = hfile.name + + env["PYTHON_BASIC_REPL"] = "1" + output, exit_code = self.run_repl("spam \nexit()\n", env=env) + self.assertEqual(exit_code, 0) + self.assertIn("spam ", output) + self.assertNotEqual(pathlib.Path(hfile.name).stat().st_size, 0) + self.assertIn("spam\\040", pathlib.Path(hfile.name).read_text()) + + env.pop("PYTHON_BASIC_REPL", None) + output, exit_code = self.run_repl("exit\n", env=env) + self.assertEqual(exit_code, 0) + self.assertNotIn("\\040", pathlib.Path(hfile.name).read_text()) + + def test_keyboard_interrupt_after_isearch(self): + output, exit_code = self.run_repl(["\x12", "\x03", "exit"]) + self.assertEqual(exit_code, 0) diff --git a/Lib/test/test_pyrepl/test_reader.py b/Lib/test/test_pyrepl/test_reader.py index 78b11323d60..421545eb1f6 100644 --- a/Lib/test/test_pyrepl/test_reader.py +++ b/Lib/test/test_pyrepl/test_reader.py @@ -31,6 +31,37 @@ def test_calc_screen_wrap_three_lines(self): reader, _ = handle_events_narrow_console(events) self.assert_screen_equals(reader, f"{9*"a"}\\\n{9*"a"}\\\naa") + def test_calc_screen_prompt_handling(self): + def prepare_reader_keep_prompts(*args, **kwargs): + reader = prepare_reader(*args, **kwargs) + del reader.get_prompt + reader.ps1 = ">>> " + reader.ps2 = ">>> " + reader.ps3 = "... " + reader.ps4 = "" + reader.can_colorize = False + reader.paste_mode = False + return reader + + events = code_to_events("if some_condition:\nsome_function()") + reader, _ = handle_events_narrow_console( + events, + prepare_reader=prepare_reader_keep_prompts, + ) + # fmt: off + self.assert_screen_equals( + reader, + ( + ">>> if so\\\n" + "me_condit\\\n" + "ion:\n" + "... s\\\n" + "ome_funct\\\n" + "ion()" + ) + ) + # fmt: on + def test_calc_screen_wrap_three_lines_mixed_character(self): # fmt: off code = ( @@ -89,6 +120,12 @@ def test_setpos_for_xy_simple(self): reader.setpos_from_xy(0, 0) self.assertEqual(reader.pos, 0) + def test_control_characters(self): + code = 'flag = "🏳️‍🌈"' + events = code_to_events(code) + reader, _ = handle_all_events(events) + self.assert_screen_equals(reader, 'flag = "🏳️\\u200d🌈"') + def test_setpos_from_xy_multiple_lines(self): # fmt: off code = ( diff --git a/Lib/test/test_random.py b/Lib/test/test_random.py index 9a44ab17686..51f9193b269 100644 --- a/Lib/test/test_random.py +++ b/Lib/test/test_random.py @@ -1433,8 +1433,8 @@ def test_main(self): ("'a a' 'b b' 'c c'", "b b"), ("--integer 5", 4), ("5", 4), - ("--float 2.5", 2.266632777287572), - ("2.5", 2.266632777287572), + ("--float 2.5", 2.1110546288126204), + ("2.5", 2.1110546288126204), ]: random.seed(0) self.assertEqual(random.main(shlex.split(command)), expected) diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py index b8b50e8b3c2..2a46e058e95 100644 --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -1116,47 +1116,76 @@ def test_not_literal(self): def test_possible_set_operations(self): s = bytes(range(128)).decode() - with self.assertWarns(FutureWarning): + with self.assertWarnsRegex(FutureWarning, 'Possible set difference') as w: p = re.compile(r'[0-9--1]') + self.assertEqual(w.filename, __file__) self.assertEqual(p.findall(s), list('-./0123456789')) + with self.assertWarnsRegex(FutureWarning, 'Possible set difference') as w: + self.assertEqual(re.findall(r'[0-9--2]', s), list('-./0123456789')) + self.assertEqual(w.filename, __file__) + self.assertEqual(re.findall(r'[--1]', s), list('-./01')) - with self.assertWarns(FutureWarning): + + with self.assertWarnsRegex(FutureWarning, 'Possible set difference') as w: p = re.compile(r'[%--1]') + self.assertEqual(w.filename, __file__) self.assertEqual(p.findall(s), list("%&'()*+,-1")) - with self.assertWarns(FutureWarning): + + with self.assertWarnsRegex(FutureWarning, 'Possible set difference ') as w: p = re.compile(r'[%--]') + self.assertEqual(w.filename, __file__) self.assertEqual(p.findall(s), list("%&'()*+,-")) - with self.assertWarns(FutureWarning): + with self.assertWarnsRegex(FutureWarning, 'Possible set intersection ') as w: p = re.compile(r'[0-9&&1]') + self.assertEqual(w.filename, __file__) self.assertEqual(p.findall(s), list('&0123456789')) - with self.assertWarns(FutureWarning): + with self.assertWarnsRegex(FutureWarning, 'Possible set intersection ') as w: + self.assertEqual(re.findall(r'[0-8&&1]', s), list('&012345678')) + self.assertEqual(w.filename, __file__) + + with self.assertWarnsRegex(FutureWarning, 'Possible set intersection ') as w: p = re.compile(r'[\d&&1]') + self.assertEqual(w.filename, __file__) self.assertEqual(p.findall(s), list('&0123456789')) + self.assertEqual(re.findall(r'[&&1]', s), list('&1')) - with self.assertWarns(FutureWarning): + with self.assertWarnsRegex(FutureWarning, 'Possible set union ') as w: p = re.compile(r'[0-9||a]') + self.assertEqual(w.filename, __file__) self.assertEqual(p.findall(s), list('0123456789a|')) - with self.assertWarns(FutureWarning): + + with self.assertWarnsRegex(FutureWarning, 'Possible set union ') as w: p = re.compile(r'[\d||a]') + self.assertEqual(w.filename, __file__) self.assertEqual(p.findall(s), list('0123456789a|')) + self.assertEqual(re.findall(r'[||1]', s), list('1|')) - with self.assertWarns(FutureWarning): + with self.assertWarnsRegex(FutureWarning, 'Possible set symmetric difference ') as w: p = re.compile(r'[0-9~~1]') + self.assertEqual(w.filename, __file__) self.assertEqual(p.findall(s), list('0123456789~')) - with self.assertWarns(FutureWarning): + + with self.assertWarnsRegex(FutureWarning, 'Possible set symmetric difference ') as w: p = re.compile(r'[\d~~1]') + self.assertEqual(w.filename, __file__) self.assertEqual(p.findall(s), list('0123456789~')) + self.assertEqual(re.findall(r'[~~1]', s), list('1~')) - with self.assertWarns(FutureWarning): + with self.assertWarnsRegex(FutureWarning, 'Possible nested set ') as w: p = re.compile(r'[[0-9]|]') + self.assertEqual(w.filename, __file__) self.assertEqual(p.findall(s), list('0123456789[]')) + with self.assertWarnsRegex(FutureWarning, 'Possible nested set ') as w: + self.assertEqual(re.findall(r'[[0-8]|]', s), list('012345678[]')) + self.assertEqual(w.filename, __file__) - with self.assertWarns(FutureWarning): + with self.assertWarnsRegex(FutureWarning, 'Possible nested set ') as w: p = re.compile(r'[[:digit:]|]') + self.assertEqual(w.filename, __file__) self.assertEqual(p.findall(s), list(':[]dgit')) def test_search_coverage(self): diff --git a/Lib/test/test_readline.py b/Lib/test/test_readline.py index 5e0e6f8dfac..50e77cbbb6b 100644 --- a/Lib/test/test_readline.py +++ b/Lib/test/test_readline.py @@ -12,6 +12,7 @@ from test.support.os_helper import unlink, temp_dir, TESTFN from test.support.pty_helper import run_pty from test.support.script_helper import assert_python_ok +from test.support.threading_helper import requires_working_threading # Skip tests if there is no readline module readline = import_module('readline') @@ -132,6 +133,32 @@ def test_nonascii_history(self): self.assertEqual(readline.get_history_item(1), "entrée 1") self.assertEqual(readline.get_history_item(2), "entrée 22") + def test_write_read_limited_history(self): + previous_length = readline.get_history_length() + self.addCleanup(readline.set_history_length, previous_length) + + readline.clear_history() + readline.add_history("first line") + readline.add_history("second line") + readline.add_history("third line") + + readline.set_history_length(2) + self.assertEqual(readline.get_history_length(), 2) + readline.write_history_file(TESTFN) + self.addCleanup(os.remove, TESTFN) + + readline.clear_history() + self.assertEqual(readline.get_current_history_length(), 0) + self.assertEqual(readline.get_history_length(), 2) + + readline.read_history_file(TESTFN) + self.assertEqual(readline.get_history_item(1), "second line") + self.assertEqual(readline.get_history_item(2), "third line") + self.assertEqual(readline.get_history_item(3), None) + + # Readline seems to report an additional history element. + self.assertIn(readline.get_current_history_length(), (2, 3)) + class TestReadline(unittest.TestCase): @@ -323,6 +350,50 @@ def test_history_size(self): self.assertEqual(len(lines), history_size) self.assertEqual(lines[-1].strip(), b"last input") + @requires_working_threading() + def test_gh123321_threadsafe(self): + """gh-123321: readline should be thread-safe and not crash""" + script = textwrap.dedent(r""" + import threading + from test.support.threading_helper import join_thread + + def func(): + input() + + thread1 = threading.Thread(target=func) + thread2 = threading.Thread(target=func) + thread1.start() + thread2.start() + join_thread(thread1) + join_thread(thread2) + print("done") + """) + + output = run_pty(script, input=b"input1\rinput2\r") + + self.assertIn(b"done", output) + + + def test_write_read_limited_history(self): + previous_length = readline.get_history_length() + self.addCleanup(readline.set_history_length, previous_length) + + readline.add_history("first line") + readline.add_history("second line") + readline.add_history("third line") + + readline.set_history_length(2) + self.assertEqual(readline.get_history_length(), 2) + readline.write_history_file(TESTFN) + self.addCleanup(os.remove, TESTFN) + + readline.read_history_file(TESTFN) + # Without clear_history() there's no good way to test if + # the correct entries are present (we're combining history limiting and + # possible deduplication with arbitrary previous content). + # So, we've only tested that the read did not fail. + # See TestHistoryManipulation for the full test. + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py index 17eff617a56..54b6a16a0da 100644 --- a/Lib/test/test_regrtest.py +++ b/Lib/test/test_regrtest.py @@ -21,6 +21,8 @@ import tempfile import textwrap import unittest +from xml.etree import ElementTree + from test import support from test.support import os_helper, without_optimizer from test.libregrtest import cmdline @@ -473,6 +475,19 @@ def test_verbose3_huntrleaks(self): self.assertEqual(regrtest.hunt_refleak.runs, 10) self.assertFalse(regrtest.output_on_failure) + def test_single_process(self): + args = ['-j2', '--single-process'] + with support.captured_stderr(): + regrtest = self.create_regrtest(args) + self.assertEqual(regrtest.num_workers, 0) + self.assertTrue(regrtest.single_process) + + args = ['--fast-ci', '--single-process'] + with support.captured_stderr(): + regrtest = self.create_regrtest(args) + self.assertEqual(regrtest.num_workers, 0) + self.assertTrue(regrtest.single_process) + @dataclasses.dataclass(slots=True) class Rerun: @@ -2243,6 +2258,44 @@ def test_pass(self): self.check_executed_tests(output, testname, stats=1, parallel=True) self.assertNotIn('SPAM SPAM SPAM', output) + def test_xml(self): + code = textwrap.dedent(r""" + import unittest + from test import support + + class VerboseTests(unittest.TestCase): + def test_failed(self): + print("abc \x1b def") + self.fail() + """) + testname = self.create_test(code=code) + + # Run sequentially + filename = os_helper.TESTFN + self.addCleanup(os_helper.unlink, filename) + + output = self.run_tests(testname, "--junit-xml", filename, + exitcode=EXITCODE_BAD_TEST) + self.check_executed_tests(output, testname, + failed=testname, + stats=TestStats(1, 1, 0)) + + # Test generated XML + with open(filename, encoding="utf8") as fp: + content = fp.read() + + testsuite = ElementTree.fromstring(content) + self.assertEqual(int(testsuite.get('tests')), 1) + self.assertEqual(int(testsuite.get('errors')), 0) + self.assertEqual(int(testsuite.get('failures')), 1) + + testcase = testsuite[0][0] + self.assertEqual(testcase.get('status'), 'run') + self.assertEqual(testcase.get('result'), 'completed') + self.assertGreater(float(testcase.get('time')), 0) + for out in testcase.iter('system-out'): + self.assertEqual(out.text, r"abc \x1b def") + class TestUtils(unittest.TestCase): def test_format_duration(self): @@ -2426,6 +2479,25 @@ def id(self): self.assertTrue(match_test(test_chdir)) self.assertFalse(match_test(test_copy)) + def test_sanitize_xml(self): + sanitize_xml = utils.sanitize_xml + + # escape invalid XML characters + self.assertEqual(sanitize_xml('abc \x1b\x1f def'), + r'abc \x1b\x1f def') + self.assertEqual(sanitize_xml('nul:\x00, bell:\x07'), + r'nul:\x00, bell:\x07') + self.assertEqual(sanitize_xml('surrogate:\uDC80'), + r'surrogate:\udc80') + self.assertEqual(sanitize_xml('illegal \uFFFE and \uFFFF'), + r'illegal \ufffe and \uffff') + + # no escape for valid XML characters + self.assertEqual(sanitize_xml('a\n\tb'), + 'a\n\tb') + self.assertEqual(sanitize_xml('valid t\xe9xt \u20ac'), + 'valid t\xe9xt \u20ac') + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_repl.py b/Lib/test/test_repl.py index 1caf09ceaf1..7a7285a1a2f 100644 --- a/Lib/test/test_repl.py +++ b/Lib/test/test_repl.py @@ -1,15 +1,27 @@ """Test the interactive interpreter.""" import os +import select import subprocess import sys import unittest from textwrap import dedent from test import support -from test.support import cpython_only, has_subprocess_support, SuppressCrashReport -from test.support.script_helper import kill_python, assert_python_ok +from test.support import ( + cpython_only, + has_subprocess_support, + os_helper, + SuppressCrashReport, + SHORT_TIMEOUT, +) +from test.support.script_helper import kill_python from test.support.import_helper import import_module +try: + import pty +except ImportError: + pty = None + if not has_subprocess_support: raise unittest.SkipTest("test module requires subprocess") @@ -29,7 +41,7 @@ def spawn_repl(*args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kw): # path may be used by Py_GetPath() to build the default module search # path. stdin_fname = os.path.join(os.path.dirname(sys.executable), "") - cmd_line = [stdin_fname, '-E', '-i'] + cmd_line = [stdin_fname, '-I', '-i'] cmd_line.extend(args) # Set TERM=vt100, for the rationale see the comments in spawn_python() of @@ -175,6 +187,19 @@ def foo(x): ] self.assertEqual(traceback_lines, expected_lines) + def test_runsource_show_syntax_error_location(self): + user_input = dedent("""def f(x, x): ... + """) + p = spawn_repl() + p.stdin.write(user_input) + output = kill_python(p) + expected_lines = [ + ' def f(x, x): ...', + ' ^', + "SyntaxError: duplicate argument 'x' in function definition" + ] + self.assertEqual(output.splitlines()[4:-1], expected_lines) + def test_interactive_source_is_in_linecache(self): user_input = dedent(""" def foo(x): @@ -195,9 +220,58 @@ def bar(x): expected = "(30, None, [\'def foo(x):\\n\', \' return x + 1\\n\', \'\\n\'], \'\')" self.assertIn(expected, output, expected) + def test_asyncio_repl_reaches_python_startup_script(self): + with os_helper.temp_dir() as tmpdir: + script = os.path.join(tmpdir, "pythonstartup.py") + with open(script, "w") as f: + f.write("print('pythonstartup done!')" + os.linesep) + f.write("exit(0)" + os.linesep) + + env = os.environ.copy() + env["PYTHON_HISTORY"] = os.path.join(tmpdir, ".asyncio_history") + env["PYTHONSTARTUP"] = script + subprocess.check_call( + [sys.executable, "-m", "asyncio"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=env, + timeout=SHORT_TIMEOUT, + ) + + @unittest.skipUnless(pty, "requires pty") def test_asyncio_repl_is_ok(self): - assert_python_ok("-m", "asyncio") + m, s = pty.openpty() + cmd = [sys.executable, "-I", "-m", "asyncio"] + env = os.environ.copy() + proc = subprocess.Popen( + cmd, + stdin=s, + stdout=s, + stderr=s, + text=True, + close_fds=True, + env=env, + ) + os.close(s) + os.write(m, b"await asyncio.sleep(0)\n") + os.write(m, b"exit()\n") + output = [] + while select.select([m], [], [], SHORT_TIMEOUT)[0]: + try: + data = os.read(m, 1024).decode("utf-8") + if not data: + break + except OSError: + break + output.append(data) + os.close(m) + try: + exit_code = proc.wait(timeout=SHORT_TIMEOUT) + except subprocess.TimeoutExpired: + proc.kill() + exit_code = proc.wait() + self.assertEqual(exit_code, 0, "".join(output)) class TestInteractiveModeSyntaxErrors(unittest.TestCase): diff --git a/Lib/test/test_scope.py b/Lib/test/test_scope.py index 6e46dfa96a6..24a366efc6c 100644 --- a/Lib/test/test_scope.py +++ b/Lib/test/test_scope.py @@ -810,6 +810,30 @@ def dig(self): gc_collect() # For PyPy or other GCs. self.assertIsNone(ref()) + def test_multiple_nesting(self): + # Regression test for https://github.com/python/cpython/issues/121863 + class MultiplyNested: + def f1(self): + __arg = 1 + class D: + def g(self, __arg): + return __arg + return D().g(_MultiplyNested__arg=2) + + def f2(self): + __arg = 1 + class D: + def g(self, __arg): + return __arg + return D().g + + inst = MultiplyNested() + with self.assertRaises(TypeError): + inst.f1() + + closure = inst.f2() + with self.assertRaises(TypeError): + closure(_MultiplyNested__arg=2) if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_setcomps.py b/Lib/test/test_setcomps.py index 976fa885bd8..0bb02ef11f6 100644 --- a/Lib/test/test_setcomps.py +++ b/Lib/test/test_setcomps.py @@ -1,6 +1,9 @@ import doctest +import traceback import unittest +from test.support import BrokenIter + doctests = """ ########### Tests mostly copied from test_listcomps.py ############ @@ -148,6 +151,42 @@ """ +class SetComprehensionTest(unittest.TestCase): + def test_exception_locations(self): + # The location of an exception raised from __init__ or + # __next__ should should be the iterator expression + + def init_raises(): + try: + {x for x in BrokenIter(init_raises=True)} + except Exception as e: + return e + + def next_raises(): + try: + {x for x in BrokenIter(next_raises=True)} + except Exception as e: + return e + + def iter_raises(): + try: + {x for x in BrokenIter(iter_raises=True)} + except Exception as e: + return e + + for func, expected in [(init_raises, "BrokenIter(init_raises=True)"), + (next_raises, "BrokenIter(next_raises=True)"), + (iter_raises, "BrokenIter(iter_raises=True)"), + ]: + with self.subTest(func): + exc = func() + f = traceback.extract_tb(exc.__traceback__)[0] + indent = 16 + co = func.__code__ + self.assertEqual(f.lineno, co.co_firstlineno + 2) + self.assertEqual(f.end_lineno, co.co_firstlineno + 2) + self.assertEqual(f.line[f.colno - indent : f.end_colno - indent], + expected) __test__ = {'doctests' : doctests} diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index bccb81e0737..91239ce2d95 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -1911,7 +1911,10 @@ def test_unzip_zipfile(self): subprocess.check_output(zip_cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as exc: details = exc.output.decode(errors="replace") - if 'unrecognized option: t' in details: + if any(message in details for message in [ + 'unrecognized option: t', # BusyBox + 'invalid option -- t', # Android + ]): self.skipTest("unzip doesn't support -t") msg = "{}\n\n**Unzip Output**\n{}" self.fail(msg.format(exc, details)) diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py index 591cd4177d9..08f18a99f8d 100644 --- a/Lib/test/test_signal.py +++ b/Lib/test/test_signal.py @@ -123,6 +123,8 @@ def __repr__(self): self.assertEqual(signal.getsignal(signal.SIGHUP), hup) self.assertEqual(0, argument.repr_count) + @unittest.skipIf(sys.platform.startswith("netbsd"), + "gh-124083: strsignal is not supported on NetBSD") def test_strsignal(self): self.assertIn("Interrupt", signal.strsignal(signal.SIGINT)) self.assertIn("Terminated", signal.strsignal(signal.SIGTERM)) @@ -1325,15 +1327,18 @@ def test_stress_delivery_simultaneous(self): def handler(signum, frame): sigs.append(signum) - self.setsig(signal.SIGUSR1, handler) + # On Android, SIGUSR1 is unreliable when used in close proximity to + # another signal – see Android/testbed/app/src/main/python/main.py. + # So we use a different signal. + self.setsig(signal.SIGUSR2, handler) self.setsig(signal.SIGALRM, handler) # for ITIMER_REAL expected_sigs = 0 while expected_sigs < N: # Hopefully the SIGALRM will be received somewhere during - # initial processing of SIGUSR1. + # initial processing of SIGUSR2. signal.setitimer(signal.ITIMER_REAL, 1e-6 + random.random() * 1e-5) - os.kill(os.getpid(), signal.SIGUSR1) + os.kill(os.getpid(), signal.SIGUSR2) expected_sigs += 2 # Wait for handlers to run to avoid signal coalescing diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py index 0502181854f..2df17b9fe1c 100644 --- a/Lib/test/test_site.py +++ b/Lib/test/test_site.py @@ -328,13 +328,13 @@ def test_getsitepackages(self): if sys.platlibdir != "lib": self.assertEqual(len(dirs), 2) wanted = os.path.join('xoxo', sys.platlibdir, - 'python%d.%d' % sys.version_info[:2], + f'python{sysconfig._get_python_version_abi()}', 'site-packages') self.assertEqual(dirs[0], wanted) else: self.assertEqual(len(dirs), 1) wanted = os.path.join('xoxo', 'lib', - 'python%d.%d' % sys.version_info[:2], + f'python{sysconfig._get_python_version_abi()}', 'site-packages') self.assertEqual(dirs[-1], wanted) else: diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index 680c104db70..90ca61fef18 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -593,19 +593,27 @@ class SocketPairTest(unittest.TestCase, ThreadableTest): def __init__(self, methodName='runTest'): unittest.TestCase.__init__(self, methodName=methodName) ThreadableTest.__init__(self) + self.cli = None + self.serv = None + + def socketpair(self): + # To be overridden by some child classes. + return socket.socketpair() def setUp(self): - self.serv, self.cli = socket.socketpair() + self.serv, self.cli = self.socketpair() def tearDown(self): - self.serv.close() + if self.serv: + self.serv.close() self.serv = None def clientSetUp(self): pass def clientTearDown(self): - self.cli.close() + if self.cli: + self.cli.close() self.cli = None ThreadableTest.clientTearDown(self) @@ -4852,6 +4860,112 @@ def _testSend(self): self.assertEqual(msg, MSG) +class PurePythonSocketPairTest(SocketPairTest): + # Explicitly use socketpair AF_INET or AF_INET6 to ensure that is the + # code path we're using regardless platform is the pure python one where + # `_socket.socketpair` does not exist. (AF_INET does not work with + # _socket.socketpair on many platforms). + def socketpair(self): + # called by super().setUp(). + try: + return socket.socketpair(socket.AF_INET6) + except OSError: + return socket.socketpair(socket.AF_INET) + + # Local imports in this class make for easy security fix backporting. + + def setUp(self): + if hasattr(_socket, "socketpair"): + self._orig_sp = socket.socketpair + # This forces the version using the non-OS provided socketpair + # emulation via an AF_INET socket in Lib/socket.py. + socket.socketpair = socket._fallback_socketpair + else: + # This platform already uses the non-OS provided version. + self._orig_sp = None + super().setUp() + + def tearDown(self): + super().tearDown() + if self._orig_sp is not None: + # Restore the default socket.socketpair definition. + socket.socketpair = self._orig_sp + + def test_recv(self): + msg = self.serv.recv(1024) + self.assertEqual(msg, MSG) + + def _test_recv(self): + self.cli.send(MSG) + + def test_send(self): + self.serv.send(MSG) + + def _test_send(self): + msg = self.cli.recv(1024) + self.assertEqual(msg, MSG) + + def test_ipv4(self): + cli, srv = socket.socketpair(socket.AF_INET) + cli.close() + srv.close() + + def _test_ipv4(self): + pass + + @unittest.skipIf(not hasattr(_socket, 'IPPROTO_IPV6') or + not hasattr(_socket, 'IPV6_V6ONLY'), + "IPV6_V6ONLY option not supported") + @unittest.skipUnless(socket_helper.IPV6_ENABLED, 'IPv6 required for this test') + def test_ipv6(self): + cli, srv = socket.socketpair(socket.AF_INET6) + cli.close() + srv.close() + + def _test_ipv6(self): + pass + + def test_injected_authentication_failure(self): + orig_getsockname = socket.socket.getsockname + inject_sock = None + + def inject_getsocketname(self): + nonlocal inject_sock + sockname = orig_getsockname(self) + # Connect to the listening socket ahead of the + # client socket. + if inject_sock is None: + inject_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + inject_sock.setblocking(False) + try: + inject_sock.connect(sockname[:2]) + except (BlockingIOError, InterruptedError): + pass + inject_sock.setblocking(True) + return sockname + + sock1 = sock2 = None + try: + socket.socket.getsockname = inject_getsocketname + with self.assertRaises(OSError): + sock1, sock2 = socket.socketpair() + finally: + socket.socket.getsockname = orig_getsockname + if inject_sock: + inject_sock.close() + if sock1: # This cleanup isn't needed on a successful test. + sock1.close() + if sock2: + sock2.close() + + def _test_injected_authentication_failure(self): + # No-op. Exists for base class threading infrastructure to call. + # We could refactor this test into its own lesser class along with the + # setUp and tearDown code to construct an ideal; it is simpler to keep + # it here and live with extra overhead one this _one_ failure test. + pass + + class NonBlockingTCPTests(ThreadedTCPSocketTest): def __init__(self, methodName='runTest'): diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py index 6ec010d13f9..9c415bd7d1c 100644 --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -103,6 +103,7 @@ def data_file(*name): # Two keys and certs signed by the same CA (for SNI tests) SIGNED_CERTFILE = data_file("keycert3.pem") +SINGED_CERTFILE_ONLY = data_file("cert3.pem") SIGNED_CERTFILE_HOSTNAME = 'localhost' SIGNED_CERTFILE_INFO = { @@ -4720,6 +4721,40 @@ def test_internal_chain_client(self): ssl.PEM_cert_to_DER_cert(pem), der ) + def test_certificate_chain(self): + client_context, server_context, hostname = testing_context( + server_chain=False + ) + server = ThreadedEchoServer(context=server_context, chatty=False) + + with open(SIGNING_CA) as f: + expected_ca_cert = ssl.PEM_cert_to_DER_cert(f.read()) + + with open(SINGED_CERTFILE_ONLY) as f: + expected_ee_cert = ssl.PEM_cert_to_DER_cert(f.read()) + + with server: + with client_context.wrap_socket( + socket.socket(), + server_hostname=hostname + ) as s: + s.connect((HOST, server.port)) + vc = s.get_verified_chain() + self.assertEqual(len(vc), 2) + + ee, ca = vc + self.assertIsInstance(ee, bytes) + self.assertIsInstance(ca, bytes) + self.assertEqual(expected_ca_cert, ca) + self.assertEqual(expected_ee_cert, ee) + + uvc = s.get_unverified_chain() + self.assertEqual(len(uvc), 1) + self.assertIsInstance(uvc[0], bytes) + + self.assertEqual(ee, uvc[0]) + self.assertNotEqual(ee, ca) + def test_internal_chain_server(self): client_context, server_context, hostname = testing_context() client_context.load_cert_chain(SIGNED_CERTFILE) diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py index 6f68edd447c..7b6037529a3 100644 --- a/Lib/test/test_statistics.py +++ b/Lib/test/test_statistics.py @@ -1072,7 +1072,7 @@ def test_no_inplace_modifications(self): def test_order_doesnt_matter(self): # Test that the order of data points doesn't change the result. - # CAUTION: due to floating point rounding errors, the result actually + # CAUTION: due to floating-point rounding errors, the result actually # may depend on the order. Consider this test representing an ideal. # To avoid this test failing, only test with exact values such as ints # or Fractions. diff --git a/Lib/test/test_str.py b/Lib/test/test_str.py index ea37eb5d964..e9ed7a2156a 100644 --- a/Lib/test/test_str.py +++ b/Lib/test/test_str.py @@ -1736,8 +1736,6 @@ def __str__(self): 'character buffers are decoded to unicode' ) - self.assertRaises(TypeError, str, 42, 42, 42) - def test_constructor_keyword_args(self): """Pass various keyword argument combinations to the constructor.""" # The object argument can be passed as a keyword. @@ -2652,22 +2650,45 @@ def test_check_encoding_errors(self): self.assertEqual(proc.rc, 10, proc) def test_str_invalid_call(self): - check = lambda *a, **kw: self.assertRaises(TypeError, str, *a, **kw) - # too many args - check(1, "", "", 1) + with self.assertRaisesRegex(TypeError, r"str expected at most 3 arguments, got 4"): + str("too", "many", "argu", "ments") + with self.assertRaisesRegex(TypeError, r"str expected at most 3 arguments, got 4"): + str(1, "", "", 1) # no such kw arg - check(test=1) + with self.assertRaisesRegex(TypeError, r"str\(\) got an unexpected keyword argument 'test'"): + str(test=1) # 'encoding' must be str - check(1, encoding=1) - check(1, 1) + with self.assertRaisesRegex(TypeError, r"str\(\) argument 'encoding' must be str, not int"): + str(1, 1) + with self.assertRaisesRegex(TypeError, r"str\(\) argument 'encoding' must be str, not int"): + str(1, encoding=1) + with self.assertRaisesRegex(TypeError, r"str\(\) argument 'encoding' must be str, not bytes"): + str(b"x", b"ascii") + with self.assertRaisesRegex(TypeError, r"str\(\) argument 'encoding' must be str, not bytes"): + str(b"x", encoding=b"ascii") # 'errors' must be str - check(1, errors=1) - check(1, "", errors=1) - check(1, 1, 1) + with self.assertRaisesRegex(TypeError, r"str\(\) argument 'encoding' must be str, not int"): + str(1, 1, 1) + with self.assertRaisesRegex(TypeError, r"str\(\) argument 'errors' must be str, not int"): + str(1, errors=1) + with self.assertRaisesRegex(TypeError, r"str\(\) argument 'errors' must be str, not int"): + str(1, "", errors=1) + with self.assertRaisesRegex(TypeError, r"str\(\) argument 'errors' must be str, not bytes"): + str(b"x", "ascii", b"strict") + with self.assertRaisesRegex(TypeError, r"str\(\) argument 'errors' must be str, not bytes"): + str(b"x", "ascii", errors=b"strict") + + # both positional and kwarg + with self.assertRaisesRegex(TypeError, r"argument for str\(\) given by name \('encoding'\) and position \(2\)"): + str(b"x", "utf-8", encoding="ascii") + with self.assertRaisesRegex(TypeError, r"str\(\) takes at most 3 arguments \(4 given\)"): + str(b"x", "utf-8", "ignore", encoding="ascii") + with self.assertRaisesRegex(TypeError, r"str\(\) takes at most 3 arguments \(4 given\)"): + str(b"x", "utf-8", "strict", errors="ignore") class StringModuleTest(unittest.TestCase): diff --git a/Lib/test/test_struct.py b/Lib/test/test_struct.py index 5508cc3eec8..bdbf8800cfd 100644 --- a/Lib/test/test_struct.py +++ b/Lib/test/test_struct.py @@ -96,6 +96,13 @@ def test_new_features(self): ('10s', b'helloworld', b'helloworld', b'helloworld', 0), ('11s', b'helloworld', b'helloworld\0', b'helloworld\0', 1), ('20s', b'helloworld', b'helloworld'+10*b'\0', b'helloworld'+10*b'\0', 1), + ('0p', b'helloworld', b'', b'', 1), + ('1p', b'helloworld', b'\x00', b'\x00', 1), + ('2p', b'helloworld', b'\x01h', b'\x01h', 1), + ('10p', b'helloworld', b'\x09helloworl', b'\x09helloworl', 1), + ('11p', b'helloworld', b'\x0Ahelloworld', b'\x0Ahelloworld', 0), + ('12p', b'helloworld', b'\x0Ahelloworld\0', b'\x0Ahelloworld\0', 1), + ('20p', b'helloworld', b'\x0Ahelloworld'+9*b'\0', b'\x0Ahelloworld'+9*b'\0', 1), ('b', 7, b'\7', b'\7', 0), ('b', -7, b'\371', b'\371', 0), ('B', 7, b'\7', b'\7', 0), @@ -339,6 +346,7 @@ def assertStructError(func, *args, **kwargs): def test_p_code(self): # Test p ("Pascal string") code. for code, input, expected, expectedback in [ + ('0p', b'abc', b'', b''), ('p', b'abc', b'\x00', b''), ('1p', b'abc', b'\x00', b''), ('2p', b'abc', b'\x01a', b'a'), @@ -580,6 +588,7 @@ def test__sizeof__(self): self.check_sizeof('187s', 1) self.check_sizeof('20p', 1) self.check_sizeof('0s', 1) + self.check_sizeof('0p', 1) self.check_sizeof('0c', 0) def test_boundary_error_message(self): diff --git a/Lib/test/test_structseq.py b/Lib/test/test_structseq.py index 6aec63e2603..d0bc0bd7b61 100644 --- a/Lib/test/test_structseq.py +++ b/Lib/test/test_structseq.py @@ -2,8 +2,10 @@ import os import pickle import re +import textwrap import time import unittest +from test.support import script_helper class StructSeqTest(unittest.TestCase): @@ -342,6 +344,17 @@ def test_copy_replace_with_unnamed_fields(self): with self.assertRaisesRegex(TypeError, error_message): copy.replace(r, st_mode=1, error=2) + def test_reference_cycle(self): + # gh-122527: Check that a structseq that's part of a reference cycle + # with its own type doesn't crash. Previously, if the type's dictionary + # was cleared first, the structseq instance would crash in the + # destructor. + script_helper.assert_python_ok("-c", textwrap.dedent(r""" + import time + t = time.gmtime() + type(t).refcyle = t + """)) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py index 0162c805025..364c8efe168 100644 --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -1411,7 +1411,7 @@ def open_fds(): t = threading.Thread(target=open_fds) t.start() try: - with self.assertRaises(EnvironmentError): + with self.assertRaises(OSError): subprocess.Popen(NONEXISTING_CMD, stdin=subprocess.PIPE, stdout=subprocess.PIPE, diff --git a/Lib/test/test_super.py b/Lib/test/test_super.py index 3ffbe03f0c2..1222ec6a3c4 100644 --- a/Lib/test/test_super.py +++ b/Lib/test/test_super.py @@ -513,6 +513,7 @@ def test___class___modification_multithreaded(self): This should be the case anyways as our test suite sets an audit hook. """ + class Foo: pass @@ -522,7 +523,7 @@ class Bar: thing = Foo() def work(): foo = thing - for _ in range(5000): + for _ in range(200): foo.__class__ = Bar type(foo) foo.__class__ = Foo diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py index d160cbf0645..4310f06924a 100644 --- a/Lib/test/test_support.py +++ b/Lib/test/test_support.py @@ -547,13 +547,14 @@ def test_optim_args_from_interpreter_flags(self): with self.subTest(opts=opts): self.check_options(opts, 'optim_args_from_interpreter_flags') + @unittest.skipIf(support.is_apple_mobile, "Unstable on Apple Mobile") @unittest.skipIf(support.is_emscripten, "Unstable in Emscripten") @unittest.skipIf(support.is_wasi, "Unavailable on WASI") def test_fd_count(self): - # We cannot test the absolute value of fd_count(): on old Linux - # kernel or glibc versions, os.urandom() keeps a FD open on - # /dev/urandom device and Python has 4 FD opens instead of 3. - # Test is unstable on Emscripten. The platform starts and stops + # We cannot test the absolute value of fd_count(): on old Linux kernel + # or glibc versions, os.urandom() keeps a FD open on /dev/urandom + # device and Python has 4 FD opens instead of 3. Test is unstable on + # Emscripten and Apple Mobile platforms; these platforms start and stop # background threads that use pipes and epoll fds. start = os_helper.fd_count() fd = os.open(__file__, os.O_RDONLY) diff --git a/Lib/test/test_symtable.py b/Lib/test/test_symtable.py index 2443898c981..c1b7030d2d2 100644 --- a/Lib/test/test_symtable.py +++ b/Lib/test/test_symtable.py @@ -1,6 +1,8 @@ """ Test the API of the symtable module. """ + +import textwrap import symtable import unittest @@ -354,7 +356,7 @@ def test_name(self): self.assertEqual(self.spam.lookup("x").get_name(), "x") self.assertEqual(self.Mine.get_name(), "Mine") - def test_class_info(self): + def test_class_get_methods(self): self.assertEqual(self.Mine.get_methods(), ('a_method',)) top = symtable.symtable(TEST_COMPLEX_CLASS_CODE, "?", "exec") @@ -375,6 +377,58 @@ def test_class_info(self): 'glob_assigned_async_meth', 'glob_assigned_async_meth_pep_695', )) + # Test generator expressions that are of type TYPE_FUNCTION + # but will not be reported by get_methods() since they are + # not functions per se. + # + # Other kind of comprehensions such as list, set or dict + # expressions do not have the TYPE_FUNCTION type. + + def check_body(body, expected_methods): + indented = textwrap.indent(body, ' ' * 4) + top = symtable.symtable(f"class A:\n{indented}", "?", "exec") + this = find_block(top, "A") + self.assertEqual(this.get_methods(), expected_methods) + + # statements with 'genexpr' inside it + GENEXPRS = ( + 'x = (x for x in [])', + 'x = (x async for x in [])', + 'type x[genexpr = (x for x in [])] = (x for x in [])', + 'type x[genexpr = (x async for x in [])] = (x async for x in [])', + 'genexpr = (x for x in [])', + 'genexpr = (x async for x in [])', + 'type genexpr[genexpr = (x for x in [])] = (x for x in [])', + 'type genexpr[genexpr = (x async for x in [])] = (x async for x in [])', + ) + + for gen in GENEXPRS: + # test generator expression + with self.subTest(gen=gen): + check_body(gen, ()) + + # test generator expression + variable named 'genexpr' + with self.subTest(gen=gen, isvar=True): + check_body('\n'.join((gen, 'genexpr = 1')), ()) + check_body('\n'.join(('genexpr = 1', gen)), ()) + + for paramlist in ('()', '(x)', '(x, y)', '(z: T)'): + for func in ( + f'def genexpr{paramlist}:pass', + f'async def genexpr{paramlist}:pass', + f'def genexpr[T]{paramlist}:pass', + f'async def genexpr[T]{paramlist}:pass', + ): + with self.subTest(func=func): + # test function named 'genexpr' + check_body(func, ('genexpr',)) + + for gen in GENEXPRS: + with self.subTest(gen=gen, func=func): + # test generator expression + function named 'genexpr' + check_body('\n'.join((gen, func)), ('genexpr',)) + check_body('\n'.join((func, gen)), ('genexpr',)) + def test_filename_correct(self): ### Bug tickler: SyntaxError file name correct whether error raised ### while parsing or building symbol table. diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index 112c4f75ff2..3d2bcf2358f 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -1603,7 +1603,7 @@ class C(object): pass def func(): return sys._getframe() x = func() - check(x, size('3Pi2cP7P2ic??2P')) + check(x, size('3Pi2c2P7P2ic??2P')) # function def func(): pass check(func, size('15Pi')) @@ -1822,7 +1822,8 @@ def test_pythontypes(self): # symtable entry # XXX # sys.flags - check(sys.flags, vsize('') + self.P * len(sys.flags)) + # FIXME: The +1 will not be necessary once gh-122575 is fixed + check(sys.flags, vsize('') + self.P * (1 + len(sys.flags))) def test_asyncgen_hooks(self): old = sys.get_asyncgen_hooks() diff --git a/Lib/test/test_sys_setprofile.py b/Lib/test/test_sys_setprofile.py index 32e03d7cd25..b2e8e8a15b6 100644 --- a/Lib/test/test_sys_setprofile.py +++ b/Lib/test/test_sys_setprofile.py @@ -479,6 +479,20 @@ def f(): sys.setprofile(lambda *args: None) f() + def test_method_with_c_function(self): + # gh-122029 + # When we have a PyMethodObject whose im_func is a C function, we + # should record both the call and the return. f = classmethod(repr) + # is just a way to create a PyMethodObject with a C function. + class A: + f = classmethod(repr) + events = [] + sys.setprofile(lambda frame, event, args: events.append(event)) + A().f() + sys.setprofile(None) + # The last c_call is the call to sys.setprofile + self.assertEqual(events, ['c_call', 'c_return', 'c_call']) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py index bc2dd479fa0..42ed96fa27b 100644 --- a/Lib/test/test_sysconfig.py +++ b/Lib/test/test_sysconfig.py @@ -157,7 +157,7 @@ def test_posix_venv_scheme(self): binpath = 'bin' incpath = 'include' libpath = os.path.join('lib', - 'python%d.%d' % sys.version_info[:2], + f'python{sysconfig._get_python_version_abi()}', 'site-packages') # Resolve the paths in an imaginary venv/ directory @@ -417,8 +417,8 @@ def test_user_similar(self): if name == 'platlib': # Replace "/lib64/python3.11/site-packages" suffix # with "/lib/python3.11/site-packages". - py_version_short = sysconfig.get_python_version() - suffix = f'python{py_version_short}/site-packages' + py_version_abi = sysconfig._get_python_version_abi() + suffix = f'python{py_version_abi}/site-packages' expected = expected.replace(f'/{sys.platlibdir}/{suffix}', f'/lib/{suffix}') self.assertEqual(user_path, expected) diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py index f715940de1d..9a540765f8c 100644 --- a/Lib/test/test_tarfile.py +++ b/Lib/test/test_tarfile.py @@ -1268,6 +1268,48 @@ def test_pax_number_fields(self): finally: tar.close() + def test_pax_header_bad_formats(self): + # The fields from the pax header have priority over the + # TarInfo. + pax_header_replacements = ( + b" foo=bar\n", + b"0 \n", + b"1 \n", + b"2 \n", + b"3 =\n", + b"4 =a\n", + b"1000000 foo=bar\n", + b"0 foo=bar\n", + b"-12 foo=bar\n", + b"000000000000000000000000036 foo=bar\n", + ) + pax_headers = {"foo": "bar"} + + for replacement in pax_header_replacements: + with self.subTest(header=replacement): + tar = tarfile.open(tmpname, "w", format=tarfile.PAX_FORMAT, + encoding="iso8859-1") + try: + t = tarfile.TarInfo() + t.name = "pax" # non-ASCII + t.uid = 1 + t.pax_headers = pax_headers + tar.addfile(t) + finally: + tar.close() + + with open(tmpname, "rb") as f: + data = f.read() + self.assertIn(b"11 foo=bar\n", data) + data = data.replace(b"11 foo=bar\n", replacement) + + with open(tmpname, "wb") as f: + f.truncate() + f.write(data) + + with self.assertRaisesRegex(tarfile.ReadError, r"method tar: ReadError\('invalid header'\)"): + tarfile.open(tmpname, encoding="iso8859-1") + class WriteTestBase(TarTest): # Put all write tests in here that are supposed to be tested diff --git a/Lib/test/test_termios.py b/Lib/test/test_termios.py index 58698ffac2d..11928f04a8a 100644 --- a/Lib/test/test_termios.py +++ b/Lib/test/test_termios.py @@ -94,7 +94,7 @@ def test_tcsendbreak(self): try: termios.tcsendbreak(self.fd, 1) except termios.error as exc: - if exc.args[0] == errno.ENOTTY and sys.platform.startswith('freebsd'): + if exc.args[0] == errno.ENOTTY and sys.platform.startswith(('freebsd', "netbsd")): self.skipTest('termios.tcsendbreak() is not supported ' 'with pseudo-terminals (?) on this platform') raise diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py index 293799ff68e..530c317a852 100644 --- a/Lib/test/test_time.py +++ b/Lib/test/test_time.py @@ -654,8 +654,7 @@ def year4d(y): self.test_year('%04d', func=year4d) def skip_if_not_supported(y): - msg = "strftime() is limited to [1; 9999] with Visual Studio" - # Check that it doesn't crash for year > 9999 + msg = f"strftime() does not support year {y} on this platform" try: time.strftime('%Y', (y,) + (0,) * 8) except ValueError: diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 51aeb35f010..75710db7d05 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -228,7 +228,7 @@ def test_long(self): """) def test_float(self): - # Floating point numbers + # Floating-point numbers self.check_tokenize("x = 3.14159", """\ NAME 'x' (1, 0) (1, 1) OP '=' (1, 2) (1, 3) @@ -1919,6 +1919,26 @@ def test_roundtrip(self): self.check_roundtrip(r"f'\\\\N{{'") self.check_roundtrip(r"f'\\\\\\N{{'") self.check_roundtrip(r"f'\\\\\\\\N{{'") + + self.check_roundtrip(r"f'\n{{foo}}'") + self.check_roundtrip(r"f'\\n{{foo}}'") + self.check_roundtrip(r"f'\\\n{{foo}}'") + self.check_roundtrip(r"f'\\\\n{{foo}}'") + + self.check_roundtrip(r"f'\t{{foo}}'") + self.check_roundtrip(r"f'\\t{{foo}}'") + self.check_roundtrip(r"f'\\\t{{foo}}'") + self.check_roundtrip(r"f'\\\\t{{foo}}'") + + self.check_roundtrip(r"rf'\t{{foo}}'") + self.check_roundtrip(r"rf'\\t{{foo}}'") + self.check_roundtrip(r"rf'\\\t{{foo}}'") + self.check_roundtrip(r"rf'\\\\t{{foo}}'") + + self.check_roundtrip(r"rf'\{{foo}}'") + self.check_roundtrip(r"f'\\{{foo}}'") + self.check_roundtrip(r"rf'\\\{{foo}}'") + self.check_roundtrip(r"f'\\\\{{foo}}'") cases = [ """ if 1: diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index 5035de114b5..a78aded4ccf 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -696,6 +696,35 @@ def f_with_multiline(): result_lines = self.get_exception(f_with_multiline) self.assertEqual(result_lines, expected_f.splitlines()) + # Check custom error messages covering multiple lines + code = textwrap.dedent(""" + dummy_call( + "dummy value" + foo="bar", + ) + """) + + def f_with_multiline(): + # Need to defer the compilation until in self.get_exception(..) + return compile(code, "?", "exec") + + lineno_f = f_with_multiline.__code__.co_firstlineno + + expected_f = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + ' ~~~~~~~~^^\n' + f' File "{__file__}", line {lineno_f+2}, in f_with_multiline\n' + ' return compile(code, "?", "exec")\n' + ' File "?", line 3\n' + ' "dummy value"\n' + ' ^^^^^^^^^^^^^' + ) + + result_lines = self.get_exception(f_with_multiline) + self.assertEqual(result_lines, expected_f.splitlines()) + def test_caret_multiline_expression_bin_op(self): # Make sure no carets are printed for expressions spanning multiple # lines. @@ -2309,19 +2338,22 @@ def test_message_none(self): def test_syntax_error_various_offsets(self): for offset in range(-5, 10): for add in [0, 2]: - text = " "*add + "text%d" % offset + text = " " * add + "text%d" % offset expected = [' File "file.py", line 1'] if offset < 1: expected.append(" %s" % text.lstrip()) elif offset <= 6: expected.append(" %s" % text.lstrip()) - expected.append(" %s^" % (" "*(offset-1))) + # Set the caret length to match the length of the text minus the offset. + caret_length = max(1, len(text.lstrip()) - offset + 1) + expected.append(" %s%s" % (" " * (offset - 1), "^" * caret_length)) else: + caret_length = max(1, len(text.lstrip()) - 4) expected.append(" %s" % text.lstrip()) - expected.append(" %s^" % (" "*5)) + expected.append(" %s%s" % (" " * 5, "^" * caret_length)) expected.append("SyntaxError: msg") expected.append("") - err = self.get_report(SyntaxError("msg", ("file.py", 1, offset+add, text))) + err = self.get_report(SyntaxError("msg", ("file.py", 1, offset + add, text))) exp = "\n".join(expected) self.assertEqual(exp, err) @@ -3272,6 +3304,41 @@ def format_frame_summary(self, frame_summary, colorize=False): f' File "{__file__}", line {lno}, in f\n 1/0\n' ) + def test_summary_should_show_carets(self): + # See: https://github.com/python/cpython/issues/122353 + + # statement to execute and to get a ZeroDivisionError for a traceback + statement = "abcdef = 1 / 0 and 2.0" + colno = statement.index('1 / 0') + end_colno = colno + len('1 / 0') + + # Actual line to use when rendering the traceback + # and whose AST will be extracted (it will be empty). + cached_line = '# this line will be used during rendering' + self.addCleanup(unlink, TESTFN) + with open(TESTFN, "w") as file: + file.write(cached_line) + linecache.updatecache(TESTFN, {}) + + try: + exec(compile(statement, TESTFN, "exec")) + except ZeroDivisionError as exc: + # This is the simplest way to create a StackSummary + # whose FrameSummary items have their column offsets. + s = traceback.TracebackException.from_exception(exc).stack + self.assertIsInstance(s, traceback.StackSummary) + with unittest.mock.patch.object(s, '_should_show_carets', + wraps=s._should_show_carets) as ff: + self.assertEqual(len(s), 2) + self.assertListEqual( + s.format_frame_summary(s[1]).splitlines(), + [ + f' File "{TESTFN}", line 1, in ', + f' {cached_line}' + ] + ) + ff.assert_called_with(colno, end_colno, [cached_line], None) + class Unrepresentable: def __repr__(self) -> str: raise Exception("Unrepresentable") diff --git a/Lib/test/test_ttk/test_style.py b/Lib/test/test_ttk/test_style.py index 9a04a95dc40..eeaf5de2e30 100644 --- a/Lib/test/test_ttk/test_style.py +++ b/Lib/test/test_ttk/test_style.py @@ -227,13 +227,13 @@ def test_element_create_image(self): foreground='blue', background='yellow') img3 = tkinter.BitmapImage(master=self.root, file=imgfile, foreground='white', background='black') - style.element_create('Button.button', 'image', + style.element_create('TestButton.button', 'image', img1, ('pressed', img2), ('active', img3), border=(2, 4), sticky='we') - self.assertIn('Button.button', style.element_names()) + self.assertIn('TestButton.button', style.element_names()) - style.layout('Button', [('Button.button', {'sticky': 'news'})]) - b = ttk.Button(self.root, style='Button') + style.layout('TestButton', [('TestButton.button', {'sticky': 'news'})]) + b = ttk.Button(self.root, style='TestButton') b.pack(expand=True, fill='both') self.assertEqual(b.winfo_reqwidth(), 16) self.assertEqual(b.winfo_reqheight(), 16) diff --git a/Lib/test/test_ttk/test_widgets.py b/Lib/test/test_ttk/test_widgets.py index cb210b7d2fc..88740b18864 100644 --- a/Lib/test/test_ttk/test_widgets.py +++ b/Lib/test/test_ttk/test_widgets.py @@ -963,8 +963,7 @@ def create(self, **kwargs): return ttk.Scrollbar(self.root, **kwargs) -@add_standard_options(PixelSizeTests if tk_version >= (8, 7) else IntegerSizeTests, - StandardTtkOptionsTests) +@add_standard_options(StandardTtkOptionsTests) class NotebookTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'cursor', 'height', 'padding', 'style', 'takefocus', 'width', @@ -983,6 +982,20 @@ def setUp(self): def create(self, **kwargs): return ttk.Notebook(self.root, **kwargs) + def test_configure_height(self): + widget = self.create() + if get_tk_patchlevel(self.root) < (8, 6, 15): + self.checkIntegerParam(widget, 'height', 402, -402, 0) + else: + self.checkPixelsParam(widget, 'height', '10c', 402, -402, 0, conv=False) + + def test_configure_width(self): + widget = self.create() + if get_tk_patchlevel(self.root) < (8, 6, 15): + self.checkIntegerParam(widget, 'width', 402, -402, 0) + else: + self.checkPixelsParam(widget, 'width', '10c', 402, -402, 0, conv=False) + def test_tab_identifiers(self): self.nb.forget(0) self.nb.hide(self.child2) diff --git a/Lib/test/test_type_aliases.py b/Lib/test/test_type_aliases.py index f8b395fdc8b..4c17933e7f7 100644 --- a/Lib/test/test_type_aliases.py +++ b/Lib/test/test_type_aliases.py @@ -212,6 +212,19 @@ def test_generic(self): self.assertEqual(TA.__value__, list[T]) self.assertEqual(TA.__type_params__, (T,)) self.assertEqual(TA.__module__, __name__) + self.assertIs(type(TA[int]), types.GenericAlias) + + def test_not_generic(self): + TA = TypeAliasType("TA", list[int], type_params=()) + self.assertEqual(TA.__name__, "TA") + self.assertEqual(TA.__value__, list[int]) + self.assertEqual(TA.__type_params__, ()) + self.assertEqual(TA.__module__, __name__) + with self.assertRaisesRegex( + TypeError, + "Only generic type aliases are subscriptable", + ): + TA[int] def test_keywords(self): TA = TypeAliasType(name="TA", value=int) diff --git a/Lib/test/test_type_cache.py b/Lib/test/test_type_cache.py index 09c68d408cc..8e2bb0c2382 100644 --- a/Lib/test/test_type_cache.py +++ b/Lib/test/test_type_cache.py @@ -160,8 +160,8 @@ def load_foo_2(type_): self._check_specialization(load_foo_2, A, "LOAD_ATTR", should_specialize=False) def test_class_load_attr_specialization_static_type(self): - self._assign_valid_version_or_skip(str) - self._assign_valid_version_or_skip(bytes) + self.assertNotEqual(type_get_version(str), 0) + self.assertNotEqual(type_get_version(bytes), 0) def get_capitalize_1(type_): return type_.capitalize @@ -169,25 +169,6 @@ def get_capitalize_1(type_): self._check_specialization(get_capitalize_1, str, "LOAD_ATTR", should_specialize=True) self.assertEqual(get_capitalize_1(str)('hello'), 'Hello') self.assertEqual(get_capitalize_1(bytes)(b'hello'), b'Hello') - del get_capitalize_1 - - # Permanently overflow the static type version counter, and force str and bytes - # to have tp_version_tag == 0 - for _ in range(2**16): - type_modified(str) - type_assign_version(str) - type_modified(bytes) - type_assign_version(bytes) - - self.assertEqual(type_get_version(str), 0) - self.assertEqual(type_get_version(bytes), 0) - - def get_capitalize_2(type_): - return type_.capitalize - - self._check_specialization(get_capitalize_2, str, "LOAD_ATTR", should_specialize=False) - self.assertEqual(get_capitalize_2(str)('hello'), 'Hello') - self.assertEqual(get_capitalize_2(bytes)(b'hello'), b'Hello') def test_property_load_attr_specialization_user_type(self): class G: diff --git a/Lib/test/test_type_params.py b/Lib/test/test_type_params.py index bf1a34b9fc8..4eb4bf024de 100644 --- a/Lib/test/test_type_params.py +++ b/Lib/test/test_type_params.py @@ -950,6 +950,7 @@ class C[T](Base, a=1, b=2, **kwargs): T, = C.__type_params__ self.assertEqual(T.__name__, "T") self.assertEqual(C.kwargs, {"a": 1, "b": 2, "c": 3}) + self.assertEqual(C.__bases__, (Base, Generic)) bases = (Base,) class C2[T](*bases, **kwargs): @@ -958,6 +959,22 @@ class C2[T](*bases, **kwargs): T, = C2.__type_params__ self.assertEqual(T.__name__, "T") self.assertEqual(C2.kwargs, {"c": 3}) + self.assertEqual(C2.__bases__, (Base, Generic)) + + def test_starargs_base(self): + class C1[T](*()): pass + + T, = C1.__type_params__ + self.assertEqual(T.__name__, "T") + self.assertEqual(C1.__bases__, (Generic,)) + + class Base: pass + bases = [Base] + class C2[T](*bases): pass + + T, = C2.__type_params__ + self.assertEqual(T.__name__, "T") + self.assertEqual(C2.__bases__, (Base, Generic)) class TypeParamsTraditionalTypeVarsTest(unittest.TestCase): diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py index fbca198aab5..2844047bd84 100644 --- a/Lib/test/test_types.py +++ b/Lib/test/test_types.py @@ -1,6 +1,11 @@ # Python test set -- part 6, built-in types -from test.support import run_with_locale, cpython_only, MISSING_C_DOCSTRINGS +from test.support import ( + run_with_locale, is_apple_mobile, cpython_only, + iter_builtin_types, iter_slot_wrappers, + MISSING_C_DOCSTRINGS, +) +from test.test_import import no_rerun import collections.abc from collections import namedtuple, UserDict import copy @@ -10,6 +15,7 @@ import pickle import locale import sys +import textwrap import types import unittest.mock import weakref @@ -2345,5 +2351,50 @@ def ex(a, /, b, *, c): ) +class SubinterpreterTests(unittest.TestCase): + + @classmethod + def setUpClass(cls): + global interpreters + try: + from test.support import interpreters + except ModuleNotFoundError: + raise unittest.SkipTest('subinterpreters required') + import test.support.interpreters.channels + + @cpython_only + @no_rerun('channels (and queues) might have a refleak; see gh-122199') + def test_static_types_inherited_slots(self): + rch, sch = interpreters.channels.create() + + slots = [] + script = '' + for cls in iter_builtin_types(): + for slot, own in iter_slot_wrappers(cls): + slots.append((cls, slot, own)) + script += textwrap.dedent(f""" + text = repr({cls.__name__}.{slot}) + sch.send_nowait(({cls.__name__!r}, {slot!r}, text)) + """) + + exec(script) + all_expected = [] + for cls, slot, _ in slots: + result = rch.recv() + assert result == (cls.__name__, slot, result[-1]), (cls, slot, result) + all_expected.append(result) + + interp = interpreters.create() + interp.exec('from test.support import interpreters') + interp.prepare_main(sch=sch) + interp.exec(script) + + for i, (cls, slot, _) in enumerate(slots): + with self.subTest(cls=cls, slot=slot): + expected = all_expected[i] + result = rch.recv() + self.assertEqual(result, expected) + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index 0b6cae2093d..ec431af8119 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -578,6 +578,55 @@ def test_constructor(self): self.assertEqual(T.__name__, "T") self.assertEqual(T.__constraints__, ()) self.assertIs(T.__bound__, None) + self.assertIs(T.__default__, typing.NoDefault) + self.assertIs(T.__covariant__, False) + self.assertIs(T.__contravariant__, False) + self.assertIs(T.__infer_variance__, False) + + T = TypeVar(name="T", bound=type) + self.assertEqual(T.__name__, "T") + self.assertEqual(T.__constraints__, ()) + self.assertIs(T.__bound__, type) + self.assertIs(T.__default__, typing.NoDefault) + self.assertIs(T.__covariant__, False) + self.assertIs(T.__contravariant__, False) + self.assertIs(T.__infer_variance__, False) + + T = TypeVar(name="T", default=()) + self.assertEqual(T.__name__, "T") + self.assertEqual(T.__constraints__, ()) + self.assertIs(T.__bound__, None) + self.assertIs(T.__default__, ()) + self.assertIs(T.__covariant__, False) + self.assertIs(T.__contravariant__, False) + self.assertIs(T.__infer_variance__, False) + + T = TypeVar(name="T", covariant=True) + self.assertEqual(T.__name__, "T") + self.assertEqual(T.__constraints__, ()) + self.assertIs(T.__bound__, None) + self.assertIs(T.__default__, typing.NoDefault) + self.assertIs(T.__covariant__, True) + self.assertIs(T.__contravariant__, False) + self.assertIs(T.__infer_variance__, False) + + T = TypeVar(name="T", contravariant=True) + self.assertEqual(T.__name__, "T") + self.assertEqual(T.__constraints__, ()) + self.assertIs(T.__bound__, None) + self.assertIs(T.__default__, typing.NoDefault) + self.assertIs(T.__covariant__, False) + self.assertIs(T.__contravariant__, True) + self.assertIs(T.__infer_variance__, False) + + T = TypeVar(name="T", infer_variance=True) + self.assertEqual(T.__name__, "T") + self.assertEqual(T.__constraints__, ()) + self.assertIs(T.__bound__, None) + self.assertIs(T.__default__, typing.NoDefault) + self.assertIs(T.__covariant__, False) + self.assertIs(T.__contravariant__, False) + self.assertIs(T.__infer_variance__, True) class TypeParameterDefaultsTests(BaseTestCase): diff --git a/Lib/test/test_unittest/test_util.py b/Lib/test/test_unittest/test_util.py new file mode 100644 index 00000000000..d590a333930 --- /dev/null +++ b/Lib/test/test_unittest/test_util.py @@ -0,0 +1,33 @@ +import unittest +from unittest.util import safe_repr, sorted_list_difference, unorderable_list_difference + + +class TestUtil(unittest.TestCase): + def test_safe_repr(self): + class RaisingRepr: + def __repr__(self): + raise ValueError("Invalid repr()") + + class LongRepr: + def __repr__(self): + return 'x' * 100 + + safe_repr(RaisingRepr()) + self.assertEqual(safe_repr('foo'), "'foo'") + self.assertEqual(safe_repr(LongRepr(), short=True), 'x'*80 + ' [truncated]...') + + def test_sorted_list_difference(self): + self.assertEqual(sorted_list_difference([], []), ([], [])) + self.assertEqual(sorted_list_difference([1, 2], [2, 3]), ([1], [3])) + self.assertEqual(sorted_list_difference([1, 2], [1, 3]), ([2], [3])) + self.assertEqual(sorted_list_difference([1, 1, 1], [1, 2, 3]), ([], [2, 3])) + self.assertEqual(sorted_list_difference([4], [1, 2, 3, 4]), ([], [1, 2, 3])) + self.assertEqual(sorted_list_difference([1, 1], [2]), ([1], [2])) + self.assertEqual(sorted_list_difference([2], [1, 1]), ([2], [1])) + self.assertEqual(sorted_list_difference([1, 2], [1, 1]), ([2], [])) + + def test_unorderable_list_difference(self): + self.assertEqual(unorderable_list_difference([], []), ([], [])) + self.assertEqual(unorderable_list_difference([1, 2], []), ([2, 1], [])) + self.assertEqual(unorderable_list_difference([], [1, 2]), ([], [1, 2])) + self.assertEqual(unorderable_list_difference([1, 2], [1, 3]), ([2], [3])) diff --git a/Lib/test/test_unittest/testmock/testmagicmethods.py b/Lib/test/test_unittest/testmock/testmagicmethods.py index a4feae7e9d3..a8b52ce4871 100644 --- a/Lib/test/test_unittest/testmock/testmagicmethods.py +++ b/Lib/test/test_unittest/testmock/testmagicmethods.py @@ -331,6 +331,45 @@ def test_magic_methods_fspath(self): self.assertEqual(os.fspath(mock), expected_path) mock.__fspath__.assert_called_once() + def test_magic_mock_does_not_reset_magic_returns(self): + # https://github.com/python/cpython/issues/123934 + for reset in (True, False): + with self.subTest(reset=reset): + mm = MagicMock() + self.assertIs(type(mm.__str__()), str) + mm.__str__.assert_called_once() + + self.assertIs(type(mm.__hash__()), int) + mm.__hash__.assert_called_once() + + for _ in range(3): + # Repeat reset several times to be sure: + mm.reset_mock(return_value=reset) + + self.assertIs(type(mm.__str__()), str) + mm.__str__.assert_called_once() + + self.assertIs(type(mm.__hash__()), int) + mm.__hash__.assert_called_once() + + def test_magic_mock_resets_manual_mocks(self): + mm = MagicMock() + mm.__iter__ = MagicMock(return_value=iter([1])) + mm.custom = MagicMock(return_value=2) + self.assertEqual(list(iter(mm)), [1]) + self.assertEqual(mm.custom(), 2) + + mm.reset_mock(return_value=True) + self.assertEqual(list(iter(mm)), []) + self.assertIsInstance(mm.custom(), MagicMock) + + def test_magic_mock_resets_manual_mocks_empty_iter(self): + mm = MagicMock() + mm.__iter__.return_value = [] + self.assertEqual(list(iter(mm)), []) + + mm.reset_mock(return_value=True) + self.assertEqual(list(iter(mm)), []) def test_magic_methods_and_spec(self): class Iterable(object): diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py index 4faad733245..818e7e93dbb 100644 --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -207,6 +207,9 @@ def test_roundtrips(self): ('scheme://///path/to/file', ('scheme', '', '///path/to/file', '', '', ''), ('scheme', '', '///path/to/file', '', '')), + ('file:tmp/junk.txt', + ('file', '', 'tmp/junk.txt', '', '', ''), + ('file', '', 'tmp/junk.txt', '', '')), ('file:///tmp/junk.txt', ('file', '', '/tmp/junk.txt', '', '', ''), ('file', '', '/tmp/junk.txt', '', '')), @@ -216,6 +219,18 @@ def test_roundtrips(self): ('file://///tmp/junk.txt', ('file', '', '///tmp/junk.txt', '', '', ''), ('file', '', '///tmp/junk.txt', '', '')), + ('http:tmp/junk.txt', + ('http', '', 'tmp/junk.txt', '', '', ''), + ('http', '', 'tmp/junk.txt', '', '')), + ('http://example.com/tmp/junk.txt', + ('http', 'example.com', '/tmp/junk.txt', '', '', ''), + ('http', 'example.com', '/tmp/junk.txt', '', '')), + ('http:///example.com/tmp/junk.txt', + ('http', '', '/example.com/tmp/junk.txt', '', '', ''), + ('http', '', '/example.com/tmp/junk.txt', '', '')), + ('http:////example.com/tmp/junk.txt', + ('http', '', '//example.com/tmp/junk.txt', '', '', ''), + ('http', '', '//example.com/tmp/junk.txt', '', '')), ('imap://mail.python.org/mbox1', ('imap', 'mail.python.org', '/mbox1', '', '', ''), ('imap', 'mail.python.org', '/mbox1', '', '')), @@ -260,7 +275,8 @@ def _encode(t): ('', '', 'schème:path/to/file', '', '')), ] for url, parsed, split in str_cases + bytes_cases: - self.checkRoundtrips(url, parsed, split) + with self.subTest(url): + self.checkRoundtrips(url, parsed, split) def test_roundtrips_normalization(self): str_cases = [ @@ -292,7 +308,8 @@ def _encode(t): tuple(x.encode('ascii') for x in t[3])) bytes_cases = [_encode(x) for x in str_cases] for url, url2, parsed, split in str_cases + bytes_cases: - self.checkRoundtrips(url, parsed, split, url2) + with self.subTest(url): + self.checkRoundtrips(url, parsed, split, url2) def test_http_roundtrips(self): # urllib.parse.urlsplit treats 'http:' as an optimized special case, @@ -333,11 +350,17 @@ def _encode(t): self.checkRoundtrips(url, parsed, split) def checkJoin(self, base, relurl, expected): - str_components = (base, relurl, expected) - self.assertEqual(urllib.parse.urljoin(base, relurl), expected) - bytes_components = baseb, relurlb, expectedb = [ - x.encode('ascii') for x in str_components] - self.assertEqual(urllib.parse.urljoin(baseb, relurlb), expectedb) + with self.subTest(base=base, relurl=relurl): + self.assertEqual(urllib.parse.urljoin(base, relurl), expected) + baseb = base.encode('ascii') + relurlb = relurl.encode('ascii') + expectedb = expected.encode('ascii') + self.assertEqual(urllib.parse.urljoin(baseb, relurlb), expectedb) + + relurl = urllib.parse.urlunsplit(urllib.parse.urlsplit(relurl)) + self.assertEqual(urllib.parse.urljoin(base, relurl), expected) + relurlb = urllib.parse.urlunsplit(urllib.parse.urlsplit(relurlb)) + self.assertEqual(urllib.parse.urljoin(baseb, relurlb), expectedb) def test_unparse_parse(self): str_cases = ['Python', './Python','x-newscheme://foo.com/stuff','x://y','x:/y','x:/','/',] diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index 1769ed61b94..1ef08da326c 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -75,7 +75,7 @@ def setUp(self): self.include = 'Include' else: self.bindir = 'bin' - self.lib = ('lib', 'python%d.%d' % sys.version_info[:2]) + self.lib = ('lib', f'python{sysconfig._get_python_version_abi()}') self.include = 'include' executable = sys._base_executable self.exe = os.path.split(executable)[-1] @@ -504,6 +504,21 @@ def test_unicode_in_batch_file(self): ) self.assertEqual(out.strip(), '0') + @unittest.skipUnless(os.name == 'nt' and can_symlink(), + 'symlinks on Windows') + def test_failed_symlink(self): + """ + Test handling of failed symlinks on Windows. + """ + rmtree(self.env_dir) + env_dir = os.path.join(os.path.realpath(self.env_dir), 'venv') + with patch('os.symlink') as mock_symlink: + mock_symlink.side_effect = OSError() + builder = venv.EnvBuilder(clear=True, symlinks=True) + _, err = self.run_with_capture(builder.create, env_dir) + filepath_regex = r"'[A-Z]:\\\\(?:[^\\\\]+\\\\)*[^\\\\]+'" + self.assertRegex(err, rf"Unable to symlink {filepath_regex} to {filepath_regex}") + @requireVenvCreate def test_multiprocessing(self): """ @@ -593,7 +608,8 @@ def test_zippath_from_non_installed_posix(self): libdir = os.path.join(non_installed_dir, platlibdir, self.lib[1]) os.makedirs(libdir) landmark = os.path.join(libdir, "os.py") - stdlib_zip = "python%d%d.zip" % sys.version_info[:2] + abi_thread = "t" if sysconfig.get_config_var("Py_GIL_DISABLED") else "" + stdlib_zip = f"python{sys.version_info.major}{sys.version_info.minor}{abi_thread}" zip_landmark = os.path.join(non_installed_dir, platlibdir, stdlib_zip) @@ -885,6 +901,14 @@ def do_test_with_pip(self, system_site_packages): err = re.sub("^(WARNING: )?The directory .* or its parent directory " "is not owned or is not writable by the current user.*$", "", err, flags=re.MULTILINE) + # Ignore warning about missing optional module: + try: + import ssl + except ImportError: + err = re.sub( + "^WARNING: Disabling truststore since ssl support is missing$", + "", + err, flags=re.MULTILINE) self.assertEqual(err.rstrip(), "") # Being fairly specific regarding the expected behaviour for the # initial bundling phase in Python 3.4. If the output changes in diff --git a/Lib/test/test_warnings/__init__.py b/Lib/test/test_warnings/__init__.py index 4416ed0f3ed..5ed71495d84 100644 --- a/Lib/test/test_warnings/__init__.py +++ b/Lib/test/test_warnings/__init__.py @@ -1,6 +1,8 @@ from contextlib import contextmanager import linecache import os +import importlib +import inspect from io import StringIO import re import sys @@ -636,6 +638,97 @@ class NonWarningSubclass: self.module.warn('good warning category', MyWarningClass) self.assertIsInstance(cm.warning, Warning) + def check_module_globals(self, module_globals): + with original_warnings.catch_warnings(module=self.module, record=True) as w: + self.module.filterwarnings('default') + self.module.warn_explicit( + 'eggs', UserWarning, 'bar', 1, + module_globals=module_globals) + self.assertEqual(len(w), 1) + self.assertEqual(w[0].category, UserWarning) + self.assertEqual(str(w[0].message), 'eggs') + + def check_module_globals_error(self, module_globals, errmsg, errtype=ValueError): + if self.module is py_warnings: + self.check_module_globals(module_globals) + return + with original_warnings.catch_warnings(module=self.module, record=True) as w: + self.module.filterwarnings('always') + with self.assertRaisesRegex(errtype, re.escape(errmsg)): + self.module.warn_explicit( + 'eggs', UserWarning, 'bar', 1, + module_globals=module_globals) + self.assertEqual(len(w), 0) + + def check_module_globals_deprecated(self, module_globals, msg): + if self.module is py_warnings: + self.check_module_globals(module_globals) + return + with original_warnings.catch_warnings(module=self.module, record=True) as w: + self.module.filterwarnings('always') + self.module.warn_explicit( + 'eggs', UserWarning, 'bar', 1, + module_globals=module_globals) + self.assertEqual(len(w), 2) + self.assertEqual(w[0].category, DeprecationWarning) + self.assertEqual(str(w[0].message), msg) + self.assertEqual(w[1].category, UserWarning) + self.assertEqual(str(w[1].message), 'eggs') + + def test_gh86298_no_loader_and_no_spec(self): + self.check_module_globals({'__name__': 'bar'}) + + def test_gh86298_loader_is_none_and_no_spec(self): + self.check_module_globals({'__name__': 'bar', '__loader__': None}) + + def test_gh86298_no_loader_and_spec_is_none(self): + self.check_module_globals_error( + {'__name__': 'bar', '__spec__': None}, + 'Module globals is missing a __spec__.loader') + + def test_gh86298_loader_is_none_and_spec_is_none(self): + self.check_module_globals_error( + {'__name__': 'bar', '__loader__': None, '__spec__': None}, + 'Module globals is missing a __spec__.loader') + + def test_gh86298_loader_is_none_and_spec_loader_is_none(self): + self.check_module_globals_error( + {'__name__': 'bar', '__loader__': None, + '__spec__': types.SimpleNamespace(loader=None)}, + 'Module globals is missing a __spec__.loader') + + def test_gh86298_no_spec(self): + self.check_module_globals_deprecated( + {'__name__': 'bar', '__loader__': object()}, + 'Module globals is missing a __spec__.loader') + + def test_gh86298_spec_is_none(self): + self.check_module_globals_deprecated( + {'__name__': 'bar', '__loader__': object(), '__spec__': None}, + 'Module globals is missing a __spec__.loader') + + def test_gh86298_no_spec_loader(self): + self.check_module_globals_deprecated( + {'__name__': 'bar', '__loader__': object(), + '__spec__': types.SimpleNamespace()}, + 'Module globals is missing a __spec__.loader') + + def test_gh86298_loader_and_spec_loader_disagree(self): + self.check_module_globals_deprecated( + {'__name__': 'bar', '__loader__': object(), + '__spec__': types.SimpleNamespace(loader=object())}, + 'Module globals; __loader__ != __spec__.loader') + + def test_gh86298_no_loader_and_no_spec_loader(self): + self.check_module_globals_error( + {'__name__': 'bar', '__spec__': types.SimpleNamespace()}, + 'Module globals is missing a __spec__.loader', AttributeError) + + def test_gh86298_no_loader_with_spec_loader_okay(self): + self.check_module_globals( + {'__name__': 'bar', + '__spec__': types.SimpleNamespace(loader=object())}) + class CWarnTests(WarnTests, unittest.TestCase): module = c_warnings @@ -884,37 +977,46 @@ def test_issue31285(self): # warn_explicit() should neither raise a SystemError nor cause an # assertion failure, in case the return value of get_source() has a # bad splitlines() method. - def get_bad_loader(splitlines_ret_val): + get_source_called = [] + def get_module_globals(*, splitlines_ret_val): + class BadSource(str): + def splitlines(self): + return splitlines_ret_val + class BadLoader: def get_source(self, fullname): - class BadSource(str): - def splitlines(self): - return splitlines_ret_val + get_source_called.append(splitlines_ret_val) return BadSource('spam') - return BadLoader() + + loader = BadLoader() + spec = importlib.machinery.ModuleSpec('foobar', loader) + return {'__loader__': loader, + '__spec__': spec, + '__name__': 'foobar'} + wmod = self.module with original_warnings.catch_warnings(module=wmod): wmod.filterwarnings('default', category=UserWarning) + linecache.clearcache() with support.captured_stderr() as stderr: wmod.warn_explicit( 'foo', UserWarning, 'bar', 1, - module_globals={'__loader__': get_bad_loader(42), - '__name__': 'foobar'}) + module_globals=get_module_globals(splitlines_ret_val=42)) self.assertIn('UserWarning: foo', stderr.getvalue()) + self.assertEqual(get_source_called, [42]) - show = wmod._showwarnmsg - try: + linecache.clearcache() + with support.swap_attr(wmod, '_showwarnmsg', None): del wmod._showwarnmsg with support.captured_stderr() as stderr: wmod.warn_explicit( 'eggs', UserWarning, 'bar', 1, - module_globals={'__loader__': get_bad_loader([42]), - '__name__': 'foobar'}) + module_globals=get_module_globals(splitlines_ret_val=[42])) self.assertIn('UserWarning: eggs', stderr.getvalue()) - finally: - wmod._showwarnmsg = show + self.assertEqual(get_source_called, [42, [42]]) + linecache.clearcache() @support.cpython_only def test_issue31411(self): @@ -1682,6 +1784,29 @@ def d(): pass isinstance(cell.cell_contents, deprecated) for cell in d.__closure__ )) + def test_inspect(self): + @deprecated("depr") + def sync(): + pass + + @deprecated("depr") + async def coro(): + pass + + class Cls: + @deprecated("depr") + def sync(self): + pass + + @deprecated("depr") + async def coro(self): + pass + + self.assertFalse(inspect.iscoroutinefunction(sync)) + self.assertTrue(inspect.iscoroutinefunction(coro)) + self.assertFalse(inspect.iscoroutinefunction(Cls.sync)) + self.assertTrue(inspect.iscoroutinefunction(Cls.coro)) + def setUpModule(): py_warnings.onceregistry.clear() c_warnings.onceregistry.clear() diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py index ef2fe92cc21..1a820d089d6 100644 --- a/Lib/test/test_weakref.py +++ b/Lib/test/test_weakref.py @@ -123,10 +123,12 @@ def test_basic_ref(self): def test_ref_repr(self): obj = C() ref = weakref.ref(obj) - self.assertRegex(repr(ref), - rf"") + regex = ( + rf"" + ) + self.assertRegex(repr(ref), regex) obj = None gc_collect() @@ -141,10 +143,13 @@ def __name__(self): obj2 = WithName() ref2 = weakref.ref(obj2) - self.assertRegex(repr(ref2), - rf"") + regex = ( + rf"" + ) + self.assertRegex(repr(ref2), regex) def test_repr_failure_gh99184(self): class MyConfig(dict): @@ -229,10 +234,12 @@ def check(proxy): def test_proxy_repr(self): obj = C() ref = weakref.proxy(obj, self.callback) - self.assertRegex(repr(ref), - rf"") + regex = ( + rf"" + ) + self.assertRegex(repr(ref), regex) obj = None gc_collect() diff --git a/Lib/test/test_webbrowser.py b/Lib/test/test_webbrowser.py index ae8d776e841..4fcbc5c2e59 100644 --- a/Lib/test/test_webbrowser.py +++ b/Lib/test/test_webbrowser.py @@ -244,7 +244,7 @@ def _obj_ref(self, *args): @unittest.skipIf(getattr(webbrowser, "objc", None) is None, "iOS Webbrowser tests require ctypes") def setUp(self): - # Intercept the the objc library. Wrap the calls to get the + # Intercept the objc library. Wrap the calls to get the # references to classes and selectors to return strings, and # wrap msgSend to return stringified object references self.orig_objc = webbrowser.objc diff --git a/Lib/test/test_with.py b/Lib/test/test_with.py index d81902327a7..839cdec68d5 100644 --- a/Lib/test/test_with.py +++ b/Lib/test/test_with.py @@ -5,6 +5,7 @@ __email__ = "mbland at acm dot org" import sys +import traceback import unittest from collections import deque from contextlib import _GeneratorContextManager, contextmanager, nullcontext @@ -170,7 +171,10 @@ def __exit__(self, *args): def shouldThrow(): ct = EnterThrows() self.foo = None - with ct as self.foo: + # Ruff complains that we're redefining `self.foo` here, + # but the whole point of the test is to check that `self.foo` + # is *not* redefined (because `__enter__` raises) + with ct as self.foo: # ruff: noqa: F811 pass self.assertRaises(RuntimeError, shouldThrow) self.assertEqual(self.foo, None) @@ -251,7 +255,6 @@ def testInlineGeneratorBoundSyntax(self): self.assertAfterWithGeneratorInvariantsNoError(foo) def testInlineGeneratorBoundToExistingVariable(self): - foo = None with mock_contextmanager_generator() as foo: self.assertInWithGeneratorInvariants(foo) self.assertAfterWithGeneratorInvariantsNoError(foo) @@ -749,5 +752,48 @@ def testEnterReturnsTuple(self): self.assertEqual(10, b1) self.assertEqual(20, b2) + def testExceptionLocation(self): + # The location of an exception raised from + # __init__, __enter__ or __exit__ of a context + # manager should be just the context manager expression, + # pinpointing the precise context manager in case there + # is more than one. + + def init_raises(): + try: + with self.Dummy(), self.InitRaises() as cm, self.Dummy() as d: + pass + except Exception as e: + return e + + def enter_raises(): + try: + with self.EnterRaises(), self.Dummy() as d: + pass + except Exception as e: + return e + + def exit_raises(): + try: + with self.ExitRaises(), self.Dummy() as d: + pass + except Exception as e: + return e + + for func, expected in [(init_raises, "self.InitRaises()"), + (enter_raises, "self.EnterRaises()"), + (exit_raises, "self.ExitRaises()"), + ]: + with self.subTest(func): + exc = func() + f = traceback.extract_tb(exc.__traceback__)[0] + indent = 16 + co = func.__code__ + self.assertEqual(f.lineno, co.co_firstlineno + 2) + self.assertEqual(f.end_lineno, co.co_firstlineno + 2) + self.assertEqual(f.line[f.colno - indent : f.end_colno - indent], + expected) + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py index 3d9141fea1e..ebec9d8f18a 100644 --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -2423,6 +2423,22 @@ def test_39495_treebuilder_start(self): self.assertRaises(TypeError, ET.TreeBuilder().start, "tag") self.assertRaises(TypeError, ET.TreeBuilder().start, "tag", None) + def test_issue123213_correct_extend_exception(self): + # Does not hide the internal exception when extending the element + self.assertRaises(ZeroDivisionError, ET.Element('tag').extend, + (1/0 for i in range(2))) + + # Still raises the TypeError when extending with a non-iterable + self.assertRaises(TypeError, ET.Element('tag').extend, None) + + # Preserves the TypeError message when extending with a generator + def f(): + raise TypeError("mymessage") + + self.assertRaisesRegex( + TypeError, 'mymessage', + ET.Element('tag').extend, (f() for i in range(2))) + # -------------------------------------------------------------------- @@ -3748,6 +3764,22 @@ def test_setslice_negative_steps(self): e[1::-sys.maxsize<<64] = [ET.Element('d')] self.assertEqual(self._subelem_tags(e), ['a0', 'd', 'a2', 'a3']) + def test_issue123213_setslice_exception(self): + e = ET.Element('tag') + # Does not hide the internal exception when assigning to the element + with self.assertRaises(ZeroDivisionError): + e[:1] = (1/0 for i in range(2)) + + # Still raises the TypeError when assigning with a non-iterable + with self.assertRaises(TypeError): + e[:1] = None + + # Preserve the original TypeError message when assigning. + def f(): + raise TypeError("mymessage") + + with self.assertRaisesRegex(TypeError, 'mymessage'): + e[:1] = (f() for i in range(2)) class IOTest(unittest.TestCase): def test_encoding(self): diff --git a/Lib/test/test_zipfile/_path/test_path.py b/Lib/test/test_zipfile/_path/test_path.py index 99842ffd63a..aba515536f0 100644 --- a/Lib/test/test_zipfile/_path/test_path.py +++ b/Lib/test/test_zipfile/_path/test_path.py @@ -5,6 +5,7 @@ import pickle import stat import sys +import time import unittest import zipfile import zipfile._path @@ -101,7 +102,7 @@ def zipfile_ondisk(self, alpharep): def test_iterdir_and_types(self, alpharep): root = zipfile.Path(alpharep) assert root.is_dir() - a, k, b, g, j = root.iterdir() + a, n, b, g, j = root.iterdir() assert a.is_file() assert b.is_dir() assert g.is_dir() @@ -121,7 +122,7 @@ def test_is_file_missing(self, alpharep): @pass_alpharep def test_iterdir_on_file(self, alpharep): root = zipfile.Path(alpharep) - a, k, b, g, j = root.iterdir() + a, n, b, g, j = root.iterdir() with self.assertRaises(ValueError): a.iterdir() @@ -136,7 +137,7 @@ def test_subdir_is_dir(self, alpharep): @pass_alpharep def test_open(self, alpharep): root = zipfile.Path(alpharep) - a, k, b, g, j = root.iterdir() + a, n, b, g, j = root.iterdir() with a.open(encoding="utf-8") as strm: data = strm.read() self.assertEqual(data, "content of a") @@ -240,7 +241,7 @@ def test_open_missing_directory(self, alpharep): @pass_alpharep def test_read(self, alpharep): root = zipfile.Path(alpharep) - a, k, b, g, j = root.iterdir() + a, n, b, g, j = root.iterdir() assert a.read_text(encoding="utf-8") == "content of a" # Also check positional encoding arg (gh-101144). assert a.read_text("utf-8") == "content of a" @@ -306,7 +307,7 @@ def test_mutability(self, alpharep): reflect that change. """ root = zipfile.Path(alpharep) - a, k, b, g, j = root.iterdir() + a, n, b, g, j = root.iterdir() alpharep.writestr('foo.txt', 'foo') alpharep.writestr('bar/baz.txt', 'baz') assert any(child.name == 'foo.txt' for child in root.iterdir()) @@ -475,6 +476,18 @@ def test_glob_recursive(self, alpharep): assert list(root.glob("**/*.txt")) == list(root.rglob("*.txt")) + @pass_alpharep + def test_glob_dirs(self, alpharep): + root = zipfile.Path(alpharep) + assert list(root.glob('b')) == [zipfile.Path(alpharep, "b/")] + assert list(root.glob('b*')) == [zipfile.Path(alpharep, "b/")] + + @pass_alpharep + def test_glob_subdir(self, alpharep): + root = zipfile.Path(alpharep) + assert list(root.glob('g/h')) == [zipfile.Path(alpharep, "g/h/")] + assert list(root.glob('g*/h*')) == [zipfile.Path(alpharep, "g/h/")] + @pass_alpharep def test_glob_subdirs(self, alpharep): root = zipfile.Path(alpharep) @@ -577,3 +590,87 @@ def test_getinfo_missing(self, alpharep): zipfile.Path(alpharep) with self.assertRaises(KeyError): alpharep.getinfo('does-not-exist') + + def test_malformed_paths(self): + """ + Path should handle malformed paths gracefully. + + Paths with leading slashes are not visible. + + Paths with dots are treated like regular files. + """ + data = io.BytesIO() + zf = zipfile.ZipFile(data, "w") + zf.writestr("/one-slash.txt", b"content") + zf.writestr("//two-slash.txt", b"content") + zf.writestr("../parent.txt", b"content") + zf.filename = '' + root = zipfile.Path(zf) + assert list(map(str, root.iterdir())) == ['../'] + assert root.joinpath('..').joinpath('parent.txt').read_bytes() == b'content' + + def test_unsupported_names(self): + """ + Path segments with special characters are readable. + + On some platforms or file systems, characters like + ``:`` and ``?`` are not allowed, but they are valid + in the zip file. + """ + data = io.BytesIO() + zf = zipfile.ZipFile(data, "w") + zf.writestr("path?", b"content") + zf.writestr("V: NMS.flac", b"fLaC...") + zf.filename = '' + root = zipfile.Path(zf) + contents = root.iterdir() + assert next(contents).name == 'path?' + assert next(contents).name == 'V: NMS.flac' + assert root.joinpath('V: NMS.flac').read_bytes() == b"fLaC..." + + def test_backslash_not_separator(self): + """ + In a zip file, backslashes are not separators. + """ + data = io.BytesIO() + zf = zipfile.ZipFile(data, "w") + zf.writestr(DirtyZipInfo.for_name("foo\\bar", zf), b"content") + zf.filename = '' + root = zipfile.Path(zf) + (first,) = root.iterdir() + assert not first.is_dir() + assert first.name == 'foo\\bar' + + @pass_alpharep + def test_interface(self, alpharep): + from importlib.resources.abc import Traversable + + zf = zipfile.Path(alpharep) + assert isinstance(zf, Traversable) + + +class DirtyZipInfo(zipfile.ZipInfo): + """ + Bypass name sanitization. + """ + + def __init__(self, filename, *args, **kwargs): + super().__init__(filename, *args, **kwargs) + self.filename = filename + + @classmethod + def for_name(cls, name, archive): + """ + Construct the same way that ZipFile.writestr does. + + TODO: extract this functionality and re-use + """ + self = cls(filename=name, date_time=time.localtime(time.time())[:6]) + self.compress_type = archive.compression + self.compress_level = archive.compresslevel + if self.filename.endswith('/'): # pragma: no cover + self.external_attr = 0o40775 << 16 # drwxrwxr-x + self.external_attr |= 0x10 # MS-DOS directory flag + else: + self.external_attr = 0o600 << 16 # ?rw------- + return self diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py index e9c3218d2bb..edc809b736e 100644 --- a/Lib/test/test_zipimport.py +++ b/Lib/test/test_zipimport.py @@ -52,8 +52,11 @@ def module_path_to_dotted_name(path): TESTMOD = "ziptestmodule" +TESTMOD2 = "ziptestmodule2" +TESTMOD3 = "ziptestmodule3" TESTPACK = "ziptestpackage" TESTPACK2 = "ziptestpackage2" +TESTPACK3 = "ziptestpackage3" TEMP_DIR = os.path.abspath("junk95142") TEMP_ZIP = os.path.abspath("junk95142.zip") TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), "zipimport_data") @@ -95,8 +98,10 @@ def makeTree(self, files, dirName=TEMP_DIR): # defined by files under the directory dirName. self.addCleanup(os_helper.rmtree, dirName) - for name, (mtime, data) in files.items(): - path = os.path.join(dirName, name) + for name, data in files.items(): + if isinstance(data, tuple): + mtime, data = data + path = os.path.join(dirName, *name.split('/')) if path[-1] == os.sep: if not os.path.isdir(path): os.makedirs(path) @@ -107,22 +112,18 @@ def makeTree(self, files, dirName=TEMP_DIR): with open(path, 'wb') as fp: fp.write(data) - def makeZip(self, files, zipName=TEMP_ZIP, **kw): + def makeZip(self, files, zipName=TEMP_ZIP, *, + comment=None, file_comment=None, stuff=None, prefix='', **kw): # Create a zip archive based set of modules/packages - # defined by files in the zip file zipName. If the - # key 'stuff' exists in kw it is prepended to the archive. + # defined by files in the zip file zipName. + # If stuff is not None, it is prepended to the archive. self.addCleanup(os_helper.unlink, zipName) - with ZipFile(zipName, "w") as z: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - z.writestr(zinfo, data) - comment = kw.get("comment", None) + with ZipFile(zipName, "w", compression=self.compression) as z: + self.writeZip(z, files, file_comment=file_comment, prefix=prefix) if comment is not None: z.comment = comment - stuff = kw.get("stuff", None) if stuff is not None: # Prepend 'stuff' to the start of the zipfile with open(zipName, "rb") as f: @@ -131,26 +132,47 @@ def makeZip(self, files, zipName=TEMP_ZIP, **kw): f.write(stuff) f.write(data) + def writeZip(self, z, files, *, file_comment=None, prefix=''): + for name, data in files.items(): + if isinstance(data, tuple): + mtime, data = data + else: + mtime = NOW + name = name.replace(os.sep, '/') + zinfo = ZipInfo(prefix + name, time.localtime(mtime)) + zinfo.compress_type = self.compression + if file_comment is not None: + zinfo.comment = file_comment + if data is None: + zinfo.CRC = 0 + z.mkdir(zinfo) + else: + assert name[-1] != '/' + z.writestr(zinfo, data) + def getZip64Files(self): # This is the simplest way to make zipfile generate the zip64 EOCD block - return {f"f{n}.py": (NOW, test_src) for n in range(65537)} + return {f"f{n}.py": test_src for n in range(65537)} def doTest(self, expected_ext, files, *modules, **kw): + if 'prefix' not in kw: + kw['prefix'] = 'pre/fix/' self.makeZip(files, **kw) self.doTestWithPreBuiltZip(expected_ext, *modules, **kw) - def doTestWithPreBuiltZip(self, expected_ext, *modules, **kw): - sys.path.insert(0, TEMP_ZIP) + def doTestWithPreBuiltZip(self, expected_ext, *modules, + call=None, prefix='', **kw): + zip_path = os.path.join(TEMP_ZIP, *prefix.split('/')[:-1]) + sys.path.insert(0, zip_path) mod = importlib.import_module(".".join(modules)) - call = kw.get('call') if call is not None: call(mod) if expected_ext: file = mod.get_file() - self.assertEqual(file, os.path.join(TEMP_ZIP, + self.assertEqual(file, os.path.join(zip_path, *modules) + expected_ext) def testAFakeZlib(self): @@ -176,7 +198,7 @@ def testAFakeZlib(self): self.skipTest('zlib is a builtin module') if "zlib" in sys.modules: del sys.modules["zlib"] - files = {"zlib.py": (NOW, test_src)} + files = {"zlib.py": test_src} try: self.doTest(".py", files, "zlib") except ImportError: @@ -187,16 +209,16 @@ def testAFakeZlib(self): self.fail("expected test to raise ImportError") def testPy(self): - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD) def testPyc(self): - files = {TESTMOD + pyc_ext: (NOW, test_pyc)} + files = {TESTMOD + pyc_ext: test_pyc} self.doTest(pyc_ext, files, TESTMOD) def testBoth(self): - files = {TESTMOD + ".py": (NOW, test_src), - TESTMOD + pyc_ext: (NOW, test_pyc)} + files = {TESTMOD + ".py": test_src, + TESTMOD + pyc_ext: test_pyc} self.doTest(pyc_ext, files, TESTMOD) def testUncheckedHashBasedPyc(self): @@ -229,22 +251,22 @@ def check(mod): self.doTest(None, files, TESTMOD, call=check) def testEmptyPy(self): - files = {TESTMOD + ".py": (NOW, "")} + files = {TESTMOD + ".py": ""} self.doTest(None, files, TESTMOD) def testBadMagic(self): # make pyc magic word invalid, forcing loading from .py badmagic_pyc = bytearray(test_pyc) badmagic_pyc[0] ^= 0x04 # flip an arbitrary bit - files = {TESTMOD + ".py": (NOW, test_src), - TESTMOD + pyc_ext: (NOW, badmagic_pyc)} + files = {TESTMOD + ".py": test_src, + TESTMOD + pyc_ext: badmagic_pyc} self.doTest(".py", files, TESTMOD) def testBadMagic2(self): # make pyc magic word invalid, causing an ImportError badmagic_pyc = bytearray(test_pyc) badmagic_pyc[0] ^= 0x04 # flip an arbitrary bit - files = {TESTMOD + pyc_ext: (NOW, badmagic_pyc)} + files = {TESTMOD + pyc_ext: badmagic_pyc} try: self.doTest(".py", files, TESTMOD) self.fail("This should not be reached") @@ -257,22 +279,22 @@ def testBadMTime(self): # flip the second bit -- not the first as that one isn't stored in the # .py's mtime in the zip archive. badtime_pyc[11] ^= 0x02 - files = {TESTMOD + ".py": (NOW, test_src), - TESTMOD + pyc_ext: (NOW, badtime_pyc)} + files = {TESTMOD + ".py": test_src, + TESTMOD + pyc_ext: badtime_pyc} self.doTest(".py", files, TESTMOD) def test2038MTime(self): # Make sure we can handle mtimes larger than what a 32-bit signed number # can hold. twenty_thirty_eight_pyc = make_pyc(test_co, 2**32 - 1, len(test_src)) - files = {TESTMOD + ".py": (NOW, test_src), - TESTMOD + pyc_ext: (NOW, twenty_thirty_eight_pyc)} + files = {TESTMOD + ".py": test_src, + TESTMOD + pyc_ext: twenty_thirty_eight_pyc} self.doTest(".py", files, TESTMOD) def testPackage(self): packdir = TESTPACK + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir + TESTMOD + pyc_ext: (NOW, test_pyc)} + files = {packdir + "__init__" + pyc_ext: test_pyc, + packdir + TESTMOD + pyc_ext: test_pyc} self.doTest(pyc_ext, files, TESTPACK, TESTMOD) def testSubPackage(self): @@ -280,9 +302,9 @@ def testSubPackage(self): # archives. packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} + files = {packdir + "__init__" + pyc_ext: test_pyc, + packdir2 + "__init__" + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} self.doTest(pyc_ext, files, TESTPACK, TESTPACK2, TESTMOD) def testSubNamespacePackage(self): @@ -291,9 +313,9 @@ def testSubNamespacePackage(self): packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep # The first two files are just directory entries (so have no data). - files = {packdir: (NOW, ""), - packdir2: (NOW, ""), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} + files = {packdir: None, + packdir2: None, + packdir2 + TESTMOD + pyc_ext: test_pyc} self.doTest(pyc_ext, files, TESTPACK, TESTPACK2, TESTMOD) def testMixedNamespacePackage(self): @@ -301,19 +323,19 @@ def testMixedNamespacePackage(self): # real filesystem and a zip archive. packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - packdir3 = packdir2 + TESTPACK + '3' + os.sep - files1 = {packdir: (NOW, ""), - packdir + TESTMOD + pyc_ext: (NOW, test_pyc), - packdir2: (NOW, ""), - packdir3: (NOW, ""), - packdir3 + TESTMOD + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + '3' + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} - files2 = {packdir: (NOW, ""), - packdir + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), - packdir2: (NOW, ""), - packdir2 + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} + packdir3 = packdir2 + TESTPACK3 + os.sep + files1 = {packdir: None, + packdir + TESTMOD + pyc_ext: test_pyc, + packdir2: None, + packdir3: None, + packdir3 + TESTMOD + pyc_ext: test_pyc, + packdir2 + TESTMOD3 + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} + files2 = {packdir: None, + packdir + TESTMOD2 + pyc_ext: test_pyc, + packdir2: None, + packdir2 + TESTMOD2 + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} zip1 = os.path.abspath("path1.zip") self.makeZip(files1, zip1) @@ -346,8 +368,8 @@ def testMixedNamespacePackage(self): mod = importlib.import_module('.'.join((TESTPACK, TESTMOD))) self.assertEqual("path1.zip", mod.__file__.split(os.sep)[-3]) - # And TESTPACK/(TESTMOD + '2') only exists in path2. - mod = importlib.import_module('.'.join((TESTPACK, TESTMOD + '2'))) + # And TESTPACK/(TESTMOD2) only exists in path2. + mod = importlib.import_module('.'.join((TESTPACK, TESTMOD2))) self.assertEqual(os.path.basename(TEMP_DIR), mod.__file__.split(os.sep)[-3]) @@ -364,13 +386,13 @@ def testMixedNamespacePackage(self): self.assertEqual(os.path.basename(TEMP_DIR), mod.__file__.split(os.sep)[-4]) - # subpkg.TESTMOD + '2' only exists in zip2. - mod = importlib.import_module('.'.join((subpkg, TESTMOD + '2'))) + # subpkg.TESTMOD2 only exists in zip2. + mod = importlib.import_module('.'.join((subpkg, TESTMOD2))) self.assertEqual(os.path.basename(TEMP_DIR), mod.__file__.split(os.sep)[-4]) - # Finally subpkg.TESTMOD + '3' only exists in zip1. - mod = importlib.import_module('.'.join((subpkg, TESTMOD + '3'))) + # Finally subpkg.TESTMOD3 only exists in zip1. + mod = importlib.import_module('.'.join((subpkg, TESTMOD3))) self.assertEqual('path1.zip', mod.__file__.split(os.sep)[-4]) def testNamespacePackage(self): @@ -378,22 +400,22 @@ def testNamespacePackage(self): # archives. packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - packdir3 = packdir2 + TESTPACK + '3' + os.sep - files1 = {packdir: (NOW, ""), - packdir + TESTMOD + pyc_ext: (NOW, test_pyc), - packdir2: (NOW, ""), - packdir3: (NOW, ""), - packdir3 + TESTMOD + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + '3' + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} + packdir3 = packdir2 + TESTPACK3 + os.sep + files1 = {packdir: None, + packdir + TESTMOD + pyc_ext: test_pyc, + packdir2: None, + packdir3: None, + packdir3 + TESTMOD + pyc_ext: test_pyc, + packdir2 + TESTMOD3 + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} zip1 = os.path.abspath("path1.zip") self.makeZip(files1, zip1) - files2 = {packdir: (NOW, ""), - packdir + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), - packdir2: (NOW, ""), - packdir2 + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} + files2 = {packdir: None, + packdir + TESTMOD2 + pyc_ext: test_pyc, + packdir2: None, + packdir2 + TESTMOD2 + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} zip2 = os.path.abspath("path2.zip") self.makeZip(files2, zip2) @@ -422,8 +444,8 @@ def testNamespacePackage(self): mod = importlib.import_module('.'.join((TESTPACK, TESTMOD))) self.assertEqual("path1.zip", mod.__file__.split(os.sep)[-3]) - # And TESTPACK/(TESTMOD + '2') only exists in path2. - mod = importlib.import_module('.'.join((TESTPACK, TESTMOD + '2'))) + # And TESTPACK/(TESTMOD2) only exists in path2. + mod = importlib.import_module('.'.join((TESTPACK, TESTMOD2))) self.assertEqual("path2.zip", mod.__file__.split(os.sep)[-3]) # One level deeper... @@ -438,29 +460,22 @@ def testNamespacePackage(self): mod = importlib.import_module('.'.join((subpkg, TESTMOD))) self.assertEqual('path2.zip', mod.__file__.split(os.sep)[-4]) - # subpkg.TESTMOD + '2' only exists in zip2. - mod = importlib.import_module('.'.join((subpkg, TESTMOD + '2'))) + # subpkg.TESTMOD2 only exists in zip2. + mod = importlib.import_module('.'.join((subpkg, TESTMOD2))) self.assertEqual('path2.zip', mod.__file__.split(os.sep)[-4]) - # Finally subpkg.TESTMOD + '3' only exists in zip1. - mod = importlib.import_module('.'.join((subpkg, TESTMOD + '3'))) + # Finally subpkg.TESTMOD3 only exists in zip1. + mod = importlib.import_module('.'.join((subpkg, TESTMOD3))) self.assertEqual('path1.zip', mod.__file__.split(os.sep)[-4]) def testZipImporterMethods(self): packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc), - "spam" + pyc_ext: (NOW, test_pyc)} - - self.addCleanup(os_helper.unlink, TEMP_ZIP) - with ZipFile(TEMP_ZIP, "w") as z: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"spam" - z.writestr(zinfo, data) + files = {packdir + "__init__" + pyc_ext: test_pyc, + packdir2 + "__init__" + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc, + "spam" + pyc_ext: test_pyc} + self.makeZip(files, file_comment=b"spam") zi = zipimport.zipimporter(TEMP_ZIP) self.assertEqual(zi.archive, TEMP_ZIP) @@ -516,17 +531,11 @@ def testZipImporterMethods(self): def testInvalidateCaches(self): packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc), - "spam" + pyc_ext: (NOW, test_pyc)} - self.addCleanup(os_helper.unlink, TEMP_ZIP) - with ZipFile(TEMP_ZIP, "w") as z: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"spam" - z.writestr(zinfo, data) + files = {packdir + "__init__" + pyc_ext: test_pyc, + packdir2 + "__init__" + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc, + "spam" + pyc_ext: test_pyc} + self.makeZip(files, file_comment=b"spam") zi = zipimport.zipimporter(TEMP_ZIP) self.assertEqual(zi._get_files().keys(), files.keys()) @@ -534,14 +543,10 @@ def testInvalidateCaches(self): zi.invalidate_caches() self.assertEqual(zi._get_files().keys(), files.keys()) # Add a new file to the ZIP archive - newfile = {"spam2" + pyc_ext: (NOW, test_pyc)} + newfile = {"spam2" + pyc_ext: test_pyc} files.update(newfile) - with ZipFile(TEMP_ZIP, "a") as z: - for name, (mtime, data) in newfile.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"spam" - z.writestr(zinfo, data) + with ZipFile(TEMP_ZIP, "a", compression=self.compression) as z: + self.writeZip(z, newfile, file_comment=b"spam") # Check that we can detect the new file after invalidating the cache zi.invalidate_caches() self.assertEqual(zi._get_files().keys(), files.keys()) @@ -558,17 +563,11 @@ def testInvalidateCaches(self): def testInvalidateCachesWithMultipleZipimports(self): packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc), - "spam" + pyc_ext: (NOW, test_pyc)} - self.addCleanup(os_helper.unlink, TEMP_ZIP) - with ZipFile(TEMP_ZIP, "w") as z: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"spam" - z.writestr(zinfo, data) + files = {packdir + "__init__" + pyc_ext: test_pyc, + packdir2 + "__init__" + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc, + "spam" + pyc_ext: test_pyc} + self.makeZip(files, file_comment=b"spam") zi = zipimport.zipimporter(TEMP_ZIP) self.assertEqual(zi._get_files().keys(), files.keys()) @@ -576,14 +575,10 @@ def testInvalidateCachesWithMultipleZipimports(self): zi2 = zipimport.zipimporter(TEMP_ZIP) self.assertEqual(zi2._get_files().keys(), files.keys()) # Add a new file to the ZIP archive to make the cache wrong. - newfile = {"spam2" + pyc_ext: (NOW, test_pyc)} + newfile = {"spam2" + pyc_ext: test_pyc} files.update(newfile) - with ZipFile(TEMP_ZIP, "a") as z: - for name, (mtime, data) in newfile.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"spam" - z.writestr(zinfo, data) + with ZipFile(TEMP_ZIP, "a", compression=self.compression) as z: + self.writeZip(z, newfile, file_comment=b"spam") # Invalidate the cache of the first zipimporter. zi.invalidate_caches() # Check that the second zipimporter detects the new file and isn't using a stale cache. @@ -595,16 +590,9 @@ def testInvalidateCachesWithMultipleZipimports(self): def testZipImporterMethodsInSubDirectory(self): packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - files = {packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} - - self.addCleanup(os_helper.unlink, TEMP_ZIP) - with ZipFile(TEMP_ZIP, "w") as z: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"eggs" - z.writestr(zinfo, data) + files = {packdir2 + "__init__" + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} + self.makeZip(files, file_comment=b"eggs") zi = zipimport.zipimporter(TEMP_ZIP + os.sep + packdir) self.assertEqual(zi.archive, TEMP_ZIP) @@ -669,9 +657,9 @@ def get_file(): if __loader__.get_data("some.data") != b"some data": raise AssertionError("bad data")\n""" pyc = make_pyc(compile(src, "", "exec"), NOW, len(src)) - files = {TESTMOD + pyc_ext: (NOW, pyc), - "some.data": (NOW, "some data")} - self.doTest(pyc_ext, files, TESTMOD) + files = {TESTMOD + pyc_ext: pyc, + "some.data": "some data"} + self.doTest(pyc_ext, files, TESTMOD, prefix='') def testDefaultOptimizationLevel(self): # zipimport should use the default optimization level (#28131) @@ -679,7 +667,7 @@ def testDefaultOptimizationLevel(self): def test(val): assert(val) return val\n""" - files = {TESTMOD + '.py': (NOW, src)} + files = {TESTMOD + '.py': src} self.makeZip(files) sys.path.insert(0, TEMP_ZIP) mod = importlib.import_module(TESTMOD) @@ -692,7 +680,7 @@ def test(val): def testImport_WithStuff(self): # try importing from a zipfile which contains additional # stuff at the beginning of the file - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD, stuff=b"Some Stuff"*31) @@ -700,18 +688,18 @@ def assertModuleSource(self, module): self.assertEqual(inspect.getsource(module), test_src) def testGetSource(self): - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD, call=self.assertModuleSource) def testGetCompiledSource(self): pyc = make_pyc(compile(test_src, "", "exec"), NOW, len(test_src)) - files = {TESTMOD + ".py": (NOW, test_src), - TESTMOD + pyc_ext: (NOW, pyc)} + files = {TESTMOD + ".py": test_src, + TESTMOD + pyc_ext: pyc} self.doTest(pyc_ext, files, TESTMOD, call=self.assertModuleSource) def runDoctest(self, callback): - files = {TESTMOD + ".py": (NOW, test_src), - "xyz.txt": (NOW, ">>> log.append(True)\n")} + files = {TESTMOD + ".py": test_src, + "xyz.txt": ">>> log.append(True)\n"} self.doTest(".py", files, TESTMOD, call=callback) def doDoctestFile(self, module): @@ -763,29 +751,21 @@ def doTraceback(self, module): raise AssertionError("This ought to be impossible") def testTraceback(self): - files = {TESTMOD + ".py": (NOW, raise_src)} + files = {TESTMOD + ".py": raise_src} self.doTest(None, files, TESTMOD, call=self.doTraceback) @unittest.skipIf(os_helper.TESTFN_UNENCODABLE is None, "need an unencodable filename") def testUnencodable(self): filename = os_helper.TESTFN_UNENCODABLE + ".zip" - self.addCleanup(os_helper.unlink, filename) - with ZipFile(filename, "w") as z: - zinfo = ZipInfo(TESTMOD + ".py", time.localtime(NOW)) - zinfo.compress_type = self.compression - z.writestr(zinfo, test_src) + self.makeZip({TESTMOD + ".py": test_src}, filename) spec = zipimport.zipimporter(filename).find_spec(TESTMOD) mod = importlib.util.module_from_spec(spec) spec.loader.exec_module(mod) def testBytesPath(self): filename = os_helper.TESTFN + ".zip" - self.addCleanup(os_helper.unlink, filename) - with ZipFile(filename, "w") as z: - zinfo = ZipInfo(TESTMOD + ".py", time.localtime(NOW)) - zinfo.compress_type = self.compression - z.writestr(zinfo, test_src) + self.makeZip({TESTMOD + ".py": test_src}, filename) zipimport.zipimporter(filename) with self.assertRaises(TypeError): @@ -796,15 +776,15 @@ def testBytesPath(self): zipimport.zipimporter(memoryview(os.fsencode(filename))) def testComment(self): - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD, comment=b"comment") def testBeginningCruftAndComment(self): - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD, stuff=b"cruft" * 64, comment=b"hi") def testLargestPossibleComment(self): - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD, comment=b"c" * ((1 << 16) - 1)) def testZip64(self): diff --git a/Lib/threading.py b/Lib/threading.py index 31ab77c92b1..94ea2f08178 100644 --- a/Lib/threading.py +++ b/Lib/threading.py @@ -336,7 +336,7 @@ def wait(self, timeout=None): awakened or timed out, it re-acquires the lock and returns. When the timeout argument is present and not None, it should be a - floating point number specifying a timeout for the operation in seconds + floating-point number specifying a timeout for the operation in seconds (or fractions thereof). When the underlying lock is an RLock, it is not released using its @@ -646,7 +646,7 @@ def wait(self, timeout=None): the optional timeout occurs. When the timeout argument is present and not None, it should be a - floating point number specifying a timeout for the operation in seconds + floating-point number specifying a timeout for the operation in seconds (or fractions thereof). This method returns the internal flag on exit, so it will always return @@ -689,6 +689,8 @@ def __init__(self, parties, action=None, timeout=None): default for all subsequent 'wait()' calls. """ + if parties < 1: + raise ValueError("parties must be > 0") self._cond = Condition(Lock()) self._action = action self._timeout = timeout @@ -1059,7 +1061,7 @@ def join(self, timeout=None): or until the optional timeout occurs. When the timeout argument is present and not None, it should be a - floating point number specifying a timeout for the operation in seconds + floating-point number specifying a timeout for the operation in seconds (or fractions thereof). As join() always returns None, you must call is_alive() after join() to decide whether a timeout happened -- if the thread is still alive, the join() call timed out. diff --git a/Lib/tkinter/simpledialog.py b/Lib/tkinter/simpledialog.py index 0f0dc66460f..6e5b025a9f9 100644 --- a/Lib/tkinter/simpledialog.py +++ b/Lib/tkinter/simpledialog.py @@ -357,7 +357,7 @@ def askinteger(title, prompt, **kw): class _QueryFloat(_QueryDialog): - errormessage = "Not a floating point value." + errormessage = "Not a floating-point value." def getresult(self): return self.getdouble(self.entry.get()) diff --git a/Lib/tokenize.py b/Lib/tokenize.py index 7f418bb7a1b..4b4c3cfe169 100644 --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -200,7 +200,7 @@ def escape_brackets(self, token): characters[-2::-1] ) ) - if n_backslashes % 2 == 0: + if n_backslashes % 2 == 0 or characters[-1] != "N": characters.append(character) else: consume_until_next_bracket = True diff --git a/Lib/traceback.py b/Lib/traceback.py index 6ee1a50ca68..0fe7187a0c6 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -698,6 +698,8 @@ def _should_show_carets(self, start_offset, end_offset, all_lines, anchors): with suppress(SyntaxError, ImportError): import ast tree = ast.parse('\n'.join(all_lines)) + if not tree.body: + return False statement = tree.body[0] value = None def _spawns_full_line(value): @@ -1292,11 +1294,15 @@ def _format_syntax_error(self, stype, **kwargs): yield ' {}\n'.format(ltext) else: offset = self.offset - end_offset = self.end_offset if self.end_offset not in {None, 0} else offset + if self.lineno == self.end_lineno: + end_offset = self.end_offset if self.end_offset not in {None, 0} else offset + else: + end_offset = len(rtext) + 1 + if self.text and offset > len(self.text): - offset = len(self.text) + 1 + offset = len(rtext) + 1 if self.text and end_offset > len(self.text): - end_offset = len(self.text) + 1 + end_offset = len(rtext) + 1 if offset >= end_offset or end_offset < 0: end_offset = offset + 1 diff --git a/Lib/turtle.py b/Lib/turtle.py index 7bfe81351b0..99850ae5efe 100644 --- a/Lib/turtle.py +++ b/Lib/turtle.py @@ -1718,7 +1718,7 @@ def xcor(self): >>> reset() >>> turtle.left(60) >>> turtle.forward(100) - >>> print turtle.xcor() + >>> print(turtle.xcor()) 50.0 """ return self._position[0] @@ -1732,7 +1732,7 @@ def ycor(self): >>> reset() >>> turtle.left(60) >>> turtle.forward(100) - >>> print turtle.ycor() + >>> print(turtle.ycor()) 86.6025403784 """ return self._position[1] @@ -2335,7 +2335,7 @@ def isvisible(self): Example (for a Turtle instance named turtle): >>> turtle.hideturtle() - >>> print turtle.isvisible(): + >>> print(turtle.isvisible()) False """ return self._shown diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py index f7ab4a5ea22..da7da87748f 100644 --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -628,7 +628,7 @@ def __set_side_effect(self, value): side_effect = property(__get_side_effect, __set_side_effect) - def reset_mock(self, visited=None,*, return_value=False, side_effect=False): + def reset_mock(self, visited=None, *, return_value=False, side_effect=False): "Restore the mock object to its initial state." if visited is None: visited = [] @@ -2219,6 +2219,17 @@ def mock_add_spec(self, spec, spec_set=False): self._mock_add_spec(spec, spec_set) self._mock_set_magics() + def reset_mock(self, /, *args, return_value=False, **kwargs): + if ( + return_value + and self._mock_name + and _is_magic(self._mock_name) + ): + # Don't reset return values for magic methods, + # otherwise `m.__str__` will start + # to return `MagicMock` instances, instead of `str` instances. + return_value = False + super().reset_mock(*args, return_value=return_value, **kwargs) class MagicProxy(Base): diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py index 3932bb99c7e..24815952037 100644 --- a/Lib/urllib/parse.py +++ b/Lib/urllib/parse.py @@ -525,9 +525,13 @@ def urlunsplit(components): empty query; the RFC states that these are equivalent).""" scheme, netloc, url, query, fragment, _coerce_result = ( _coerce_args(*components)) - if netloc or (scheme and scheme in uses_netloc) or url[:2] == '//': + if netloc: if url and url[:1] != '/': url = '/' + url - url = '//' + (netloc or '') + url + url = '//' + netloc + url + elif url[:2] == '//': + url = '//' + url + elif scheme and scheme in uses_netloc and (not url or url[:1] == '/'): + url = '//' + url if scheme: url = scheme + ':' + url if query: diff --git a/Lib/venv/__init__.py b/Lib/venv/__init__.py index fa69d5846f2..028e9483196 100644 --- a/Lib/venv/__init__.py +++ b/Lib/venv/__init__.py @@ -393,7 +393,7 @@ def setup_python(self, context): os.symlink(src, dest) to_unlink.append(dest) except OSError: - logger.warning('Unable to symlink %r to %r', src, dst) + logger.warning('Unable to symlink %r to %r', src, dest) do_copies = True for f in to_unlink: try: diff --git a/Lib/warnings.py b/Lib/warnings.py index 20a39d54bf7..430e4748b97 100644 --- a/Lib/warnings.py +++ b/Lib/warnings.py @@ -629,12 +629,16 @@ def __init_subclass__(*args, **kwargs): return arg elif callable(arg): import functools + import inspect @functools.wraps(arg) def wrapper(*args, **kwargs): warn(msg, category=category, stacklevel=stacklevel + 1) return arg(*args, **kwargs) + if inspect.iscoroutinefunction(arg): + wrapper = inspect.markcoroutinefunction(wrapper) + arg.__deprecated__ = wrapper.__deprecated__ = msg return wrapper else: diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index b7fbc41853e..2f9555ad60d 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -32,7 +32,7 @@ def register(name, klass, instance=None, *, preferred=False): # Preferred browsers go to the front of the list. # Need to match to the default browser returned by xdg-settings, which # may be of the form e.g. "firefox.desktop". - if preferred or (_os_preferred_browser and name in _os_preferred_browser): + if preferred or (_os_preferred_browser and f'{name}.desktop' == _os_preferred_browser): _tryorder.insert(0, name) else: _tryorder.append(name) @@ -81,6 +81,9 @@ def open(url, new=0, autoraise=True): - 1: a new browser window. - 2: a new browser page ("tab"). If possible, autoraise raises the window (the default) or not. + + If opening the browser succeeds, return True. + If there is a problem, return False. """ if _tryorder is None: with _lock: diff --git a/Lib/zipfile/_path/__init__.py b/Lib/zipfile/_path/__init__.py index f5ea18cee61..5079db90648 100644 --- a/Lib/zipfile/_path/__init__.py +++ b/Lib/zipfile/_path/__init__.py @@ -1,3 +1,12 @@ +""" +A Path-like interface for zipfiles. + +This codebase is shared between zipfile.Path in the stdlib +and zipp in PyPI. See +https://github.com/python/importlib_metadata/wiki/Development-Methodology +for more detail. +""" + import io import posixpath import zipfile @@ -36,7 +45,7 @@ def _parents(path): def _ancestry(path): """ Given a path with elements separated by - posixpath.sep, generate all elements of that path + posixpath.sep, generate all elements of that path. >>> list(_ancestry('b/d')) ['b/d', 'b'] @@ -48,9 +57,14 @@ def _ancestry(path): ['b'] >>> list(_ancestry('')) [] + + Multiple separators are treated like a single. + + >>> list(_ancestry('//b//d///f//')) + ['//b//d///f', '//b//d', '//b'] """ path = path.rstrip(posixpath.sep) - while path and path != posixpath.sep: + while path.rstrip(posixpath.sep): yield path path, tail = posixpath.split(path) @@ -188,7 +202,10 @@ def _extract_text_encoding(encoding=None, *args, **kwargs): class Path: """ - A pathlib-compatible interface for zip files. + A :class:`importlib.resources.abc.Traversable` interface for zip files. + + Implements many of the features users enjoy from + :class:`pathlib.Path`. Consider a zip file with this structure:: @@ -404,8 +421,7 @@ def glob(self, pattern): prefix = re.escape(self.at) tr = Translator(seps='/') matches = re.compile(prefix + tr.translate(pattern)).fullmatch - names = (data.filename for data in self.root.filelist) - return map(self._next, filter(matches, names)) + return map(self._next, filter(matches, self.root.namelist())) def rglob(self, pattern): return self.glob(f'**/{pattern}') diff --git a/Lib/zipfile/_path/glob.py b/Lib/zipfile/_path/glob.py index 69c41d77c3f..4320f1c0bad 100644 --- a/Lib/zipfile/_path/glob.py +++ b/Lib/zipfile/_path/glob.py @@ -28,7 +28,7 @@ def translate(self, pattern): """ Given a glob pattern, produce a regex that matches it. """ - return self.extend(self.translate_core(pattern)) + return self.extend(self.match_dirs(self.translate_core(pattern))) def extend(self, pattern): r""" @@ -41,6 +41,14 @@ def extend(self, pattern): """ return rf'(?s:{pattern})\Z' + def match_dirs(self, pattern): + """ + Ensure that zipfile.Path directory names are matched. + + zipfile.Path directory names always end in a slash. + """ + return rf'{pattern}[/]?' + def translate_core(self, pattern): r""" Given a glob pattern, produce a regex that matches it. diff --git a/Lib/zipimport.py b/Lib/zipimport.py index a49a21f0799..fb312be115e 100644 --- a/Lib/zipimport.py +++ b/Lib/zipimport.py @@ -254,17 +254,9 @@ def load_module(self, fullname): def get_resource_reader(self, fullname): - """Return the ResourceReader for a package in a zip file. - - If 'fullname' is a package within the zip file, return the - 'ResourceReader' object for the package. Otherwise return None. - """ - try: - if not self.is_package(fullname): - return None - except ZipImportError: - return None + """Return the ResourceReader for a module in a zip file.""" from importlib.readers import ZipReader + return ZipReader(self, fullname) diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index 8386e407f49..b97738836d9 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -246,9 +246,9 @@ def library_recipes(): result.extend([ dict( - name="OpenSSL 3.0.13", - url="https://www.openssl.org/source/openssl-3.0.13.tar.gz", - checksum='88525753f79d3bec27d2fa7c66aa0b92b3aa9498dafd93d7cfa4b3780cdae313', + name="OpenSSL 3.0.15", + url="https://github.com/openssl/openssl/releases/download/openssl-3.0.15/openssl-3.0.15.tar.gz", + checksum='23c666d0edf20f14249b3d8f0368acaee9ab585b09e1de82107c66e1f3ec9533', buildrecipe=build_universal_openssl, configure=None, install=None, diff --git a/Mac/Resources/app-store-compliance.patch b/Mac/Resources/app-store-compliance.patch new file mode 100644 index 00000000000..f4b7decc01c --- /dev/null +++ b/Mac/Resources/app-store-compliance.patch @@ -0,0 +1,29 @@ +diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py +index d6c83a75c1c..19ed4e01091 100644 +--- a/Lib/test/test_urlparse.py ++++ b/Lib/test/test_urlparse.py +@@ -237,11 +237,6 @@ def test_roundtrips(self): + '','',''), + ('git+ssh', 'git@github.com','/user/project.git', + '', '')), +- ('itms-services://?action=download-manifest&url=https://example.com/app', +- ('itms-services', '', '', '', +- 'action=download-manifest&url=https://example.com/app', ''), +- ('itms-services', '', '', +- 'action=download-manifest&url=https://example.com/app', '')), + ('+scheme:path/to/file', + ('', '', '+scheme:path/to/file', '', '', ''), + ('', '', '+scheme:path/to/file', '', '')), +diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py +index 8f724f907d4..148caf742c9 100644 +--- a/Lib/urllib/parse.py ++++ b/Lib/urllib/parse.py +@@ -59,7 +59,7 @@ + 'imap', 'wais', 'file', 'mms', 'https', 'shttp', + 'snews', 'prospero', 'rtsp', 'rtsps', 'rtspu', 'rsync', + 'svn', 'svn+ssh', 'sftp', 'nfs', 'git', 'git+ssh', +- 'ws', 'wss', 'itms-services'] ++ 'ws', 'wss'] + + uses_params = ['', 'ftp', 'hdl', 'prospero', 'http', 'imap', + 'https', 'shttp', 'rtsp', 'rtsps', 'rtspu', 'sip', diff --git a/Makefile.pre.in b/Makefile.pre.in index 22d8bb96208..538229220fd 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -41,8 +41,9 @@ AR= @AR@ READELF= @READELF@ SOABI= @SOABI@ ABIFLAGS= @ABIFLAGS@ +ABI_THREAD= @ABI_THREAD@ LDVERSION= @LDVERSION@ -MODULE_LDFLAGS=@MODULE_LDFLAGS@ +LIBPYTHON=@LIBPYTHON@ GITVERSION= @GITVERSION@ GITTAG= @GITTAG@ GITBRANCH= @GITBRANCH@ @@ -158,7 +159,7 @@ WHEEL_PKG_DIR= @WHEEL_PKG_DIR@ # Detailed destination directories BINLIBDEST= @BINLIBDEST@ -LIBDEST= $(SCRIPTDIR)/python$(VERSION) +LIBDEST= $(SCRIPTDIR)/python$(VERSION)$(ABI_THREAD) INCLUDEPY= $(INCLUDEDIR)/python$(LDVERSION) CONFINCLUDEPY= $(CONFINCLUDEDIR)/python$(LDVERSION) @@ -167,7 +168,7 @@ SHLIB_SUFFIX= @SHLIB_SUFFIX@ EXT_SUFFIX= @EXT_SUFFIX@ LDSHARED= @LDSHARED@ $(PY_LDFLAGS) BLDSHARED= @BLDSHARED@ $(PY_CORE_LDFLAGS) -LDCXXSHARED= @LDCXXSHARED@ +LDCXXSHARED= @LDCXXSHARED@ $(PY_LDFLAGS) DESTSHARED= $(BINLIBDEST)/lib-dynload # List of exported symbols for AIX @@ -178,6 +179,9 @@ EXPORTSFROM= @EXPORTSFROM@ EXE= @EXEEXT@ BUILDEXE= @BUILDEXEEXT@ +# Name of the patch file to apply for app store compliance +APP_STORE_COMPLIANCE_PATCH=@APP_STORE_COMPLIANCE_PATCH@ + # Short name and location for Mac OS X Python framework UNIVERSALSDK=@UNIVERSALSDK@ PYTHONFRAMEWORK= @PYTHONFRAMEWORK@ @@ -691,7 +695,7 @@ list-targets: @grep -E '^[A-Za-z][-A-Za-z0-9]+:' Makefile | awk -F : '{print $$1}' .PHONY: build_all -build_all: check-clean-src $(BUILDPYTHON) platform sharedmods \ +build_all: check-clean-src check-app-store-compliance $(BUILDPYTHON) platform sharedmods \ gdbhooks Programs/_testembed scripts checksharedmods rundsymutil .PHONY: build_wasm @@ -714,6 +718,16 @@ check-clean-src: exit 1; \ fi +# Check that the app store compliance patch can be applied (if configured). +# This is checked as a dry-run against the original library sources; +# the patch will be actually applied during the install phase. +.PHONY: check-app-store-compliance +check-app-store-compliance: + @if [ "$(APP_STORE_COMPLIANCE_PATCH)" != "" ]; then \ + patch --dry-run --quiet --force --strip 1 --directory "$(abs_srcdir)" --input "$(abs_srcdir)/$(APP_STORE_COMPLIANCE_PATCH)"; \ + echo "App store compliance patch can be applied."; \ + fi + # Profile generation build must start from a clean tree. profile-clean-stamp: $(MAKE) clean @@ -1099,6 +1113,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/cpython/longobject.h \ $(srcdir)/Include/cpython/memoryobject.h \ $(srcdir)/Include/cpython/methodobject.h \ + $(srcdir)/Include/cpython/modsupport.h \ $(srcdir)/Include/cpython/monitoring.h \ $(srcdir)/Include/cpython/object.h \ $(srcdir)/Include/cpython/objimpl.h \ @@ -1648,7 +1663,7 @@ regen-unicodedata: regen-all: regen-cases regen-typeslots \ regen-token regen-ast regen-keyword regen-sre regen-frozen \ regen-pegen-metaparser regen-pegen regen-test-frozenmain \ - regen-test-levenshtein regen-global-objects regen-jit + regen-test-levenshtein regen-global-objects @echo @echo "Note: make regen-stdlib-module-names, make regen-limited-abi, " @echo "make regen-configure, make regen-sbom, and make regen-unicodedata should be run manually" @@ -2348,6 +2363,7 @@ LIBSUBDIRS= asyncio \ __phello__ TESTSUBDIRS= idlelib/idle_test \ test \ + test/test_ast \ test/archivetestdata \ test/audiodata \ test/certdata \ @@ -2427,21 +2443,6 @@ TESTSUBDIRS= idlelib/idle_test \ test/test_importlib/namespace_pkgs/project3/parent/child \ test/test_importlib/partial \ test/test_importlib/resources \ - test/test_importlib/resources/data01 \ - test/test_importlib/resources/data01/subdirectory \ - test/test_importlib/resources/data02 \ - test/test_importlib/resources/data02/one \ - test/test_importlib/resources/data02/subdirectory \ - test/test_importlib/resources/data02/subdirectory/subsubdir \ - test/test_importlib/resources/data02/two \ - test/test_importlib/resources/data03 \ - test/test_importlib/resources/data03/namespace \ - test/test_importlib/resources/data03/namespace/portion1 \ - test/test_importlib/resources/data03/namespace/portion2 \ - test/test_importlib/resources/namespacedata01 \ - test/test_importlib/resources/namespacedata01/subdirectory \ - test/test_importlib/resources/zipdata01 \ - test/test_importlib/resources/zipdata02 \ test/test_importlib/source \ test/test_inspect \ test/test_interpreters \ @@ -2566,6 +2567,14 @@ libinstall: all $(srcdir)/Modules/xxmodule.c $(INSTALL_DATA) `cat pybuilddir.txt`/_sysconfigdata_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH).py \ $(DESTDIR)$(LIBDEST); \ $(INSTALL_DATA) $(srcdir)/LICENSE $(DESTDIR)$(LIBDEST)/LICENSE.txt + @ # If app store compliance has been configured, apply the patch to the + @ # installed library code. The patch has been previously validated against + @ # the original source tree, so we can ignore any errors that are raised + @ # due to files that are missing because of --disable-test-modules etc. + @if [ "$(APP_STORE_COMPLIANCE_PATCH)" != "" ]; then \ + echo "Applying app store compliance patch"; \ + patch --force --reject-file "$(abs_builddir)/app-store-compliance.rej" --strip 2 --directory "$(DESTDIR)$(LIBDEST)" --input "$(abs_srcdir)/$(APP_STORE_COMPLIANCE_PATCH)" || true ; \ + fi @ # Build PYC files for the 3 optimization levels (0, 1, 2) -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ $(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \ @@ -2634,7 +2643,7 @@ inclinstall: $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(INCLUDEPY)/internal; \ else true; \ fi - @if test "$(INSTALL_MIMALLOC)" == "yes"; then \ + @if test "$(INSTALL_MIMALLOC)" = "yes"; then \ if test ! -d $(DESTDIR)$(INCLUDEPY)/internal/mimalloc/mimalloc; then \ echo "Creating directory $(DESTDIR)$(INCLUDEPY)/internal/mimalloc/mimalloc"; \ $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(INCLUDEPY)/internal/mimalloc/mimalloc; \ @@ -2655,7 +2664,7 @@ inclinstall: echo $(INSTALL_DATA) $$i $(INCLUDEPY)/internal; \ $(INSTALL_DATA) $$i $(DESTDIR)$(INCLUDEPY)/internal; \ done - @if test "$(INSTALL_MIMALLOC)" == "yes"; then \ + @if test "$(INSTALL_MIMALLOC)" = "yes"; then \ echo $(INSTALL_DATA) $(srcdir)/Include/internal/mimalloc/mimalloc.h $(DESTDIR)$(INCLUDEPY)/internal/mimalloc/mimalloc.h; \ $(INSTALL_DATA) $(srcdir)/Include/internal/mimalloc/mimalloc.h $(DESTDIR)$(INCLUDEPY)/internal/mimalloc/mimalloc.h; \ for i in $(srcdir)/Include/internal/mimalloc/mimalloc/*.h; \ diff --git a/Misc/ACKS b/Misc/ACKS index 41ff4fe59ae..b5d2f51a8bd 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -520,6 +520,7 @@ Michael Ernst Ben Escoto Andy Eskilsson André Espaze +Lucas Esposito Stefan Esser Nicolas Estibals Jonathan Eunice @@ -751,6 +752,7 @@ Kasun Herath Chris Herborth Ivan Herman Jürgen Hermann +Joshua Jay Herman Gary Herron Ernie Hershey Thomas Herve @@ -1548,6 +1550,7 @@ Lisa Roach Carl Robben Ben Roberts Mark Roberts +Tony Roberts Andy Robinson Jim Robinson Yolanda Robla @@ -1649,6 +1652,7 @@ Scott Schram Robin Schreiber Chad J. Schroeder Simon-Martin Schroeder +Brian Schubert Christian Schubert Sam Schulenburg Andreas Schwab @@ -1667,6 +1671,7 @@ Fred Sells Jiwon Seo Iñigo Serna Joakim Sernbrant +Rodrigo Girão Serrão Roger D. Serwy Jerry Seutter Pete Sevander @@ -1739,6 +1744,7 @@ Christopher Smith Eric V. Smith Ethan H. Smith Gregory P. Smith +Malcolm Smith Mark Smith Nathaniel J. Smith Roy Smith diff --git a/Misc/HISTORY b/Misc/HISTORY index 8ca35e1af62..d68aaa06677 100644 --- a/Misc/HISTORY +++ b/Misc/HISTORY @@ -3952,7 +3952,7 @@ Library - Issue #18626: the inspect module now offers a basic command line introspection interface (Initial patch by Claudiu Popa) -- Issue #3015: Fixed tkinter with wantobject=False. Any Tcl command call +- Issue #3015: Fixed tkinter with ``wantobjects=False``. Any Tcl command call returned empty string. - Issue #19037: The mailbox module now makes all changes to maildir files @@ -5590,7 +5590,7 @@ Library - Issue #16248: Disable code execution from the user's home directory by tkinter when the -E flag is passed to Python. Patch by Zachary Ware. -- Issue #13390: New function :func:`sys.getallocatedblocks()` returns the +- Issue #13390: New function :func:`sys.getallocatedblocks` returns the number of memory blocks currently allocated. - Issue #16628: Fix a memory leak in ctypes.resize(). @@ -6157,7 +6157,7 @@ Tests starting with a ".". Patch by Sebastian Kreft. - Issue #13390: The ``-R`` option to regrtest now also checks for memory - allocation leaks, using :func:`sys.getallocatedblocks()`. + allocation leaks, using :func:`sys.getallocatedblocks`. - Issue #16559: Add more tests for the json module, including some from the official test suite at json.org. Patch by Serhiy Storchaka. diff --git a/Misc/NEWS.d/3.10.0a1.rst b/Misc/NEWS.d/3.10.0a1.rst index 9a729a45b16..f09842f1e77 100644 --- a/Misc/NEWS.d/3.10.0a1.rst +++ b/Misc/NEWS.d/3.10.0a1.rst @@ -97,7 +97,7 @@ convention. Patch by Donghee Na. .. nonce: aJS9B3 .. section: Core and Builtins -Port the :mod:`_bisect` module to the multi-phase initialization API +Port the :mod:`!_bisect` module to the multi-phase initialization API (:pep:`489`). .. @@ -128,7 +128,7 @@ Taskaya. .. nonce: lh335O .. section: Core and Builtins -Port the :mod:`_lsprof` extension module to multi-phase initialization +Port the :mod:`!_lsprof` extension module to multi-phase initialization (:pep:`489`). .. @@ -148,7 +148,7 @@ Port the :mod:`cmath` extension module to multi-phase initialization .. nonce: jiXmyT .. section: Core and Builtins -Port the :mod:`_scproxy` extension module to multi-phase initialization +Port the :mod:`!_scproxy` extension module to multi-phase initialization (:pep:`489`). .. @@ -168,7 +168,7 @@ Port the :mod:`termios` extension module to multi-phase initialization .. nonce: QuDIut .. section: Core and Builtins -Convert the :mod:`_sha256` extension module types to heap types. +Convert the :mod:`!_sha256` extension module types to heap types. .. @@ -187,7 +187,7 @@ classes with a huge amount of arguments. Patch by Pablo Galindo. .. nonce: CnRME3 .. section: Core and Builtins -Port the :mod:`_overlapped` extension module to multi-phase initialization +Port the :mod:`!_overlapped` extension module to multi-phase initialization (:pep:`489`). .. @@ -197,7 +197,7 @@ Port the :mod:`_overlapped` extension module to multi-phase initialization .. nonce: X9CZgo .. section: Core and Builtins -Port the :mod:`_curses_panel` extension module to multi-phase initialization +Port the :mod:`!_curses_panel` extension module to multi-phase initialization (:pep:`489`). .. @@ -207,7 +207,7 @@ Port the :mod:`_curses_panel` extension module to multi-phase initialization .. nonce: 5jZymK .. section: Core and Builtins -Port the :mod:`_opcode` extension module to multi-phase initialization +Port the :mod:`!_opcode` extension module to multi-phase initialization (:pep:`489`). .. @@ -282,7 +282,7 @@ initialized ``_ast`` module. .. nonce: vcxSUa .. section: Core and Builtins -Convert :mod:`_operator` to use :c:func:`PyType_FromSpec`. +Convert :mod:`!_operator` to use :c:func:`PyType_FromSpec`. .. @@ -291,7 +291,7 @@ Convert :mod:`_operator` to use :c:func:`PyType_FromSpec`. .. nonce: fubBkb .. section: Core and Builtins -Port :mod:`_sha3` to multi-phase init. Convert static types to heap types. +Port :mod:`!_sha3` to multi-phase init. Convert static types to heap types. .. @@ -300,7 +300,7 @@ Port :mod:`_sha3` to multi-phase init. Convert static types to heap types. .. nonce: FC13e7 .. section: Core and Builtins -Port the :mod:`_blake2` extension module to the multi-phase initialization +Port the :mod:`!_blake2` extension module to the multi-phase initialization API (:pep:`489`). .. @@ -339,7 +339,7 @@ The output of ``python --help`` contains now only ASCII characters. .. nonce: O0d3ym .. section: Core and Builtins -Port the :mod:`_sha1`, :mod:`_sha512`, and :mod:`_md5` extension modules to +Port the :mod:`!_sha1`, :mod:`!_sha512`, and :mod:`!_md5` extension modules to multi-phase initialization API (:pep:`489`). .. @@ -636,7 +636,7 @@ Remove the remaining files from the old parser and the :mod:`symbol` module. .. nonce: _yI-ax .. section: Core and Builtins -Convert :mod:`_bz2` to use :c:func:`PyType_FromSpec`. +Convert :mod:`!_bz2` to use :c:func:`PyType_FromSpec`. .. @@ -666,7 +666,7 @@ by Brandt Bucher. .. nonce: 61iyYh .. section: Core and Builtins -Port :mod:`_gdbm` to multiphase initialization. +Port :mod:`!_gdbm` to multiphase initialization. .. @@ -696,7 +696,7 @@ for emitting syntax errors. Patch by Pablo Galindo. .. nonce: mmlp3Q .. section: Core and Builtins -Port :mod:`_dbm` to multiphase initialization. +Port :mod:`!_dbm` to multiphase initialization. .. @@ -1010,7 +1010,7 @@ Port :mod:`mmap` to multiphase initialization. .. nonce: Kfe9fT .. section: Core and Builtins -Port :mod:`_lzma` to multiphase initialization. +Port :mod:`!_lzma` to multiphase initialization. .. @@ -1032,7 +1032,7 @@ the :meth:`~object.__int__` method but do not have the .. nonce: AkRzjb .. section: Core and Builtins -Add :meth:`int.bit_count()`, counting the number of ones in the binary +Add :meth:`int.bit_count`, counting the number of ones in the binary representation of an integer. Patch by Niklas Fiekas. .. @@ -1499,7 +1499,7 @@ used to cause ZeroDivisionError now cause an OverflowError instead. .. nonce: rju34k .. section: Library -Add :func:`os.cpu_count()` support for VxWorks RTOS. +Add :func:`os.cpu_count` support for VxWorks RTOS. .. @@ -2452,7 +2452,7 @@ Added the *root_dir* and *dir_fd* parameters in :func:`glob.glob`. .. nonce: X-TJZO .. section: Library -Fix :meth:`IMAP4.noop()` when debug mode is enabled (ex: ``imaplib.Debug = +Fix :meth:`IMAP4.noop` when debug mode is enabled (ex: ``imaplib.Debug = 3``). .. diff --git a/Misc/NEWS.d/3.10.0a2.rst b/Misc/NEWS.d/3.10.0a2.rst index 79f570439b5..bd002b6ad3d 100644 --- a/Misc/NEWS.d/3.10.0a2.rst +++ b/Misc/NEWS.d/3.10.0a2.rst @@ -362,7 +362,7 @@ plistlib: fix parsing XML plists with hexadecimal integer values .. nonce: 85BsRA .. section: Library -Fix an incorrectly formatted error from :meth:`_codecs.charmap_decode` when +Fix an incorrectly formatted error from :meth:`!_codecs.charmap_decode` when called with a mapped value outside the range of valid Unicode code points. PR by Max Bernstein. @@ -383,7 +383,7 @@ Inwood. .. nonce: jd_gkA .. section: Library -:meth:`sched.scheduler.cancel()` will now cancel the correct event, if two +:meth:`sched.scheduler.cancel` will now cancel the correct event, if two events with same priority are scheduled for the same time. Patch by Bar Harel. diff --git a/Misc/NEWS.d/3.10.0a3.rst b/Misc/NEWS.d/3.10.0a3.rst index 179cf3e9cfb..33c3e14b7a4 100644 --- a/Misc/NEWS.d/3.10.0a3.rst +++ b/Misc/NEWS.d/3.10.0a3.rst @@ -477,7 +477,7 @@ object belongs to, potentially breaking the unpickling of those objects. Simplify the :mod:`importlib` external bootstrap code: ``importlib._bootstrap_external`` now uses regular imports to import builtin -modules. When it is imported, the builtin :func:`__import__()` function is +modules. When it is imported, the builtin :func:`__import__` function is already fully working and so can be used to import builtin modules like :mod:`sys`. Patch by Victor Stinner. @@ -517,8 +517,8 @@ Port the ``_signal`` extension module to the multi-phase initialization API .. nonce: Wh5svI .. section: Library -:func:`time.time()`, :func:`time.perf_counter()` and -:func:`time.monotonic()` functions can no longer fail with a Python fatal +:func:`time.time`, :func:`time.perf_counter` and +:func:`time.monotonic` functions can no longer fail with a Python fatal error, instead raise a regular Python exception on failure. .. @@ -550,10 +550,10 @@ deduplicate, use type to cache key). Patch provided by Yurii Karabas. .. nonce: iDbHrw .. section: Library -:func:`time.perf_counter()` on Windows and :func:`time.monotonic()` on macOS +:func:`time.perf_counter` on Windows and :func:`time.monotonic` on macOS are now system-wide. Previously, they used an offset computed at startup to reduce the precision loss caused by the float type. Use -:func:`time.perf_counter_ns()` and :func:`time.monotonic_ns()` added in +:func:`time.perf_counter_ns` and :func:`time.monotonic_ns` added in Python 3.7 to avoid this precision loss. .. @@ -1386,7 +1386,7 @@ Python already implicitly installs signal handlers: see The ``Py_TRASHCAN_BEGIN`` macro no longer accesses PyTypeObject attributes, but now can get the condition by calling the new private -:c:func:`_PyTrash_cond()` function which hides implementation details. +:c:func:`!_PyTrash_cond()` function which hides implementation details. .. diff --git a/Misc/NEWS.d/3.10.0a4.rst b/Misc/NEWS.d/3.10.0a4.rst index ae667f2bffe..19f0db9a6be 100644 --- a/Misc/NEWS.d/3.10.0a4.rst +++ b/Misc/NEWS.d/3.10.0a4.rst @@ -193,7 +193,7 @@ subinterpreters. Patch by Victor Stinner. .. nonce: j7nl6A .. section: Core and Builtins -Make :c:func:`_PyUnicode_FromId` function compatible with subinterpreters. +Make :c:func:`!_PyUnicode_FromId` function compatible with subinterpreters. Each interpreter now has an array of identifier objects (interned strings decoded from UTF-8). Patch by Victor Stinner. @@ -367,7 +367,7 @@ uses "options" instead. .. nonce: Quy3zn .. section: Library -Port the :mod:`_thread` extension module to the multiphase initialization +Port the :mod:`!_thread` extension module to the multiphase initialization API (:pep:`489`) and convert its static types to heap types. .. @@ -709,7 +709,7 @@ directories. .. nonce: ek38d_ .. section: Library -Add :func:`os.set_blocking()` support for VxWorks RTOS. +Add :func:`os.set_blocking` support for VxWorks RTOS. .. @@ -960,8 +960,8 @@ explicitly and so not exported. .. nonce: Je08Ny .. section: C API -Remove the private :c:func:`_Py_fopen` function which is no longer needed. -Use :c:func:`_Py_wfopen` or :c:func:`_Py_fopen_obj` instead. Patch by Victor +Remove the private :c:func:`!_Py_fopen` function which is no longer needed. +Use :c:func:`!_Py_wfopen` or :c:func:`!_Py_fopen_obj` instead. Patch by Victor Stinner. .. diff --git a/Misc/NEWS.d/3.10.0a5.rst b/Misc/NEWS.d/3.10.0a5.rst index dc95e8ce072..a85ea1ff1c2 100644 --- a/Misc/NEWS.d/3.10.0a5.rst +++ b/Misc/NEWS.d/3.10.0a5.rst @@ -108,7 +108,7 @@ a slice at the start of the ``bytearray`` to a shorter byte string). .. nonce: WfTdfg .. section: Core and Builtins -Fix the :c:func:`_PyUnicode_FromId` function (_Py_IDENTIFIER(var) API) when +Fix the :c:func:`!_PyUnicode_FromId` function (_Py_IDENTIFIER(var) API) when :c:func:`Py_Initialize` / :c:func:`Py_Finalize` is called multiple times: preserve ``_PyRuntime.unicode_ids.next_index`` value. diff --git a/Misc/NEWS.d/3.10.0a6.rst b/Misc/NEWS.d/3.10.0a6.rst index bad35280848..31b7df2c611 100644 --- a/Misc/NEWS.d/3.10.0a6.rst +++ b/Misc/NEWS.d/3.10.0a6.rst @@ -315,7 +315,7 @@ Adds :const:`resource.RLIMIT_KQUEUES` constant from FreeBSD to the .. section: Library Make the pure Python implementation of :mod:`xml.etree.ElementTree` behave -the same as the C implementation (:mod:`_elementree`) regarding default +the same as the C implementation (:mod:`!_elementree`) regarding default attribute values (by not setting ``specified_attributes=1``). .. diff --git a/Misc/NEWS.d/3.10.0a7.rst b/Misc/NEWS.d/3.10.0a7.rst index fe6213d95a8..d866e805fd3 100644 --- a/Misc/NEWS.d/3.10.0a7.rst +++ b/Misc/NEWS.d/3.10.0a7.rst @@ -83,7 +83,7 @@ instruction dispatch a bit. .. nonce: PhaT-B .. section: Core and Builtins -Fix reference leak in the :mod:`_hashopenssl` extension. Patch by Pablo +Fix reference leak in the :mod:`!_hashopenssl` extension. Patch by Pablo Galindo. .. @@ -654,7 +654,7 @@ support importlib.invalidate_caches(). Patch by Desmond Cheong. .. nonce: 3r0HFY .. section: Library -Fail fast in :func:`shutil.move()` to avoid creating destination directories +Fail fast in :func:`shutil.move` to avoid creating destination directories on failure. .. @@ -701,8 +701,8 @@ sessions in :mod:`pdb`'s interactive mode. When the :data:`tempfile.tempdir` global variable is set to a value of type bytes, it is now handled consistently. Previously exceptions could be raised from some tempfile APIs when the directory did not already exist in -this situation. Also ensures that the :func:`tempfile.gettempdir()` and -:func:`tempfile.gettempdirb()` functions *always* return ``str`` and +this situation. Also ensures that the :func:`tempfile.gettempdir` and +:func:`tempfile.gettempdirb` functions *always* return ``str`` and ``bytes`` respectively. .. @@ -715,7 +715,7 @@ this situation. Also ensures that the :func:`tempfile.gettempdir()` and Expose ``X509_V_FLAG_ALLOW_PROXY_CERTS`` as :const:`~ssl.VERIFY_ALLOW_PROXY_CERTS` to allow proxy certificate validation as explained in -https://www.openssl.org/docs/man1.1.1/man7/proxy-certificates.html. +https://docs.openssl.org/1.1.1/man7/proxy-certificates/. .. diff --git a/Misc/NEWS.d/3.10.0b1.rst b/Misc/NEWS.d/3.10.0b1.rst index 640f3ee58ad..25c6b827146 100644 --- a/Misc/NEWS.d/3.10.0b1.rst +++ b/Misc/NEWS.d/3.10.0b1.rst @@ -182,7 +182,7 @@ normally be possible, but might occur in some unusual circumstances. .. nonce: u5Y6bS .. section: Core and Builtins -Importing the :mod:`_signal` module in a subinterpreter has no longer side +Importing the :mod:`!_signal` module in a subinterpreter has no longer side effects. .. @@ -277,7 +277,7 @@ cause any runtime effects with ``from __future__ import annotations``. .. section: Core and Builtins :exc:`SyntaxError` exceptions raised by the interpreter will highlight the -full error range of the expression that consistutes the syntax error itself, +full error range of the expression that constitutes the syntax error itself, instead of just where the problem is detected. Patch by Pablo Galindo. .. @@ -776,11 +776,11 @@ builtins.open() is now io.open(). .. nonce: o1zEk_ .. section: Library -The Python :func:`_pyio.open` function becomes a static method to behave as +The Python :func:`!_pyio.open` function becomes a static method to behave as :func:`io.open` built-in function: don't become a bound method when stored as a class variable. It becomes possible since static methods are now -callable in Python 3.10. Moreover, :func:`_pyio.OpenWrapper` becomes a -simple alias to :func:`_pyio.open`. Patch by Victor Stinner. +callable in Python 3.10. Moreover, :func:`!_pyio.OpenWrapper` becomes a +simple alias to :func:`!_pyio.open`. Patch by Victor Stinner. .. @@ -958,7 +958,7 @@ Patch by Jelle Zijlstra. .. nonce: nnVd3h .. section: Library -Add an ``encoding`` parameter :func:`logging.fileConfig()`. +Add an ``encoding`` parameter :func:`logging.fileConfig`. .. @@ -1270,7 +1270,7 @@ Fix thread locks in zlib module may go wrong in rare case. Patch by Ma Lin. .. nonce: oi6Kdb .. section: Library -Fix dataclasses with ``InitVar``\s and :func:`~dataclasses.replace()`. Patch +Fix dataclasses with ``InitVar``\s and :func:`~dataclasses.replace`. Patch by Claudiu Popa. .. @@ -1310,11 +1310,11 @@ functions in the :mod:`os` module. .. nonce: 9adF3E .. section: Library -:func:`os.path.expanduser()` now refuses to guess Windows home directories +:func:`os.path.expanduser` now refuses to guess Windows home directories if the basename of current user's home directory does not match their username. -:meth:`pathlib.Path.expanduser()` and :meth:`~pathlib.Path.home()` now +:meth:`pathlib.Path.expanduser` and :meth:`~pathlib.Path.home` now consistently raise :exc:`RuntimeError` exception when a home directory cannot be resolved. Previously a :exc:`KeyError` exception could be raised on Windows when the ``"USERNAME"`` environment variable was unset. @@ -1516,7 +1516,7 @@ Adds additional arguments to :func:`os.startfile` function. .. nonce: F0Cg6X .. section: Windows -Avoid raising errors from :meth:`pathlib.Path.exists()` when passed an +Avoid raising errors from :meth:`pathlib.Path.exists` when passed an invalid filename. .. diff --git a/Misc/NEWS.d/3.11.0a1.rst b/Misc/NEWS.d/3.11.0a1.rst index 40fbb9d42b7..0b49c2a7877 100644 --- a/Misc/NEWS.d/3.11.0a1.rst +++ b/Misc/NEWS.d/3.11.0a1.rst @@ -613,7 +613,7 @@ Rename ``types.Union`` to ``types.UnionType``. .. section: Core and Builtins Expose specialization stats in python via -:func:`_opcode.get_specialization_stats`. +:func:`!_opcode.get_specialization_stats`. .. @@ -1701,7 +1701,7 @@ Remove many old deprecated :mod:`unittest` features: .. nonce: y1kEfP .. section: Library -Remove the deprecated ``split()`` method of :class:`_tkinter.TkappType`. +Remove the deprecated ``split()`` method of :class:`!_tkinter.TkappType`. Patch by Erlend E. Aasland. .. @@ -2000,7 +2000,7 @@ during file extraction. .. nonce: roUl0G .. section: Library -:mod:`subprocess` on Solaris now also uses :func:`os.posix_spawn()` for +:mod:`subprocess` on Solaris now also uses :func:`os.posix_spawn` for better performance. .. @@ -2298,9 +2298,9 @@ Adopt *binacii.a2b_base64*'s strict mode in *base64.b64decode*. .. nonce: ThuDMI .. section: Library -Fixed a bug in the :mod:`_ssl` module that was throwing :exc:`OverflowError` -when using :meth:`_ssl._SSLSocket.write` and :meth:`_ssl._SSLSocket.read` -for a big value of the ``len`` parameter. Patch by Pablo Galindo +Fixed a bug in the :mod:`!_ssl` module that was throwing :exc:`OverflowError` +when using :meth:`!_ssl._SSLSocket.write` and :meth:`!_ssl._SSLSocket.read` +for a big value of the ``len`` parameter. Patch by Pablo Galindo. .. @@ -2398,7 +2398,7 @@ class in the interactive session. Instead of :exc:`TypeError`, it should be .. nonce: R3IcM1 .. section: Library -Fix memory leak in :func:`_tkinter._flatten` if it is called with a sequence +Fix memory leak in :func:`!_tkinter._flatten` if it is called with a sequence or set, but not list or tuple. .. @@ -4187,7 +4187,7 @@ Add calls of :func:`gc.collect` in tests to support PyPy. .. nonce: mQZdXU .. section: Tests -Made tests relying on the :mod:`_asyncio` C extension module optional to +Made tests relying on the :mod:`!_asyncio` C extension module optional to allow running on alternative Python implementations. Patch by Serhiy Storchaka. @@ -4238,7 +4238,7 @@ harmless "malloc can't allocate region" messages spewed by test_decimal. .. nonce: KKsNOV .. section: Tests -Fixed floating point precision issue in turtle tests. +Fixed floating-point precision issue in turtle tests. .. diff --git a/Misc/NEWS.d/3.11.0a2.rst b/Misc/NEWS.d/3.11.0a2.rst index 05644d0a463..48cf2c1e428 100644 --- a/Misc/NEWS.d/3.11.0a2.rst +++ b/Misc/NEWS.d/3.11.0a2.rst @@ -15,7 +15,7 @@ Improve the :exc:`SyntaxError` message when using ``True``, ``None`` or .. section: Core and Builtins :data:`sys.stdlib_module_names` now contains the macOS-specific module -:mod:`_scproxy`. +:mod:`!_scproxy`. .. @@ -1023,7 +1023,7 @@ compile shared modules. .. nonce: 61gM2A .. section: Build -:mod:`pyexpat` and :mod:`_elementtree` no longer define obsolete macros +:mod:`pyexpat` and :mod:`!_elementtree` no longer define obsolete macros ``HAVE_EXPAT_CONFIG_H`` and ``USE_PYEXPAT_CAPI``. ``XML_POOR_ENTROPY`` is now defined in ``expat_config.h``. diff --git a/Misc/NEWS.d/3.11.0a3.rst b/Misc/NEWS.d/3.11.0a3.rst index 2842aad0e16..6a0ae20d1fb 100644 --- a/Misc/NEWS.d/3.11.0a3.rst +++ b/Misc/NEWS.d/3.11.0a3.rst @@ -27,7 +27,7 @@ invalid targets. Patch by Pablo Galindo .. nonce: 3TmTSw .. section: Core and Builtins -:c:func:`_PyErr_ChainStackItem` no longer normalizes ``exc_info`` (including +:c:func:`!_PyErr_ChainStackItem` no longer normalizes ``exc_info`` (including setting the traceback on the exception instance) because ``exc_info`` is always normalized. diff --git a/Misc/NEWS.d/3.11.0a4.rst b/Misc/NEWS.d/3.11.0a4.rst index a5ce7620016..64e2f39ad9d 100644 --- a/Misc/NEWS.d/3.11.0a4.rst +++ b/Misc/NEWS.d/3.11.0a4.rst @@ -258,7 +258,7 @@ instruction which performs the same operation, but without the loop. .. nonce: ADVaPT .. section: Core and Builtins -The code called from :c:func:`_PyErr_Display` was refactored to improve +The code called from :c:func:`!_PyErr_Display` was refactored to improve error handling. It now exits immediately upon an unrecoverable error. .. diff --git a/Misc/NEWS.d/3.11.0a6.rst b/Misc/NEWS.d/3.11.0a6.rst index 66ffa4ffba5..e88142e641f 100644 --- a/Misc/NEWS.d/3.11.0a6.rst +++ b/Misc/NEWS.d/3.11.0a6.rst @@ -1054,7 +1054,7 @@ Patch by Victor Stinner. .. nonce: ajJjkh .. section: Build -Building Python now requires support for floating point Not-a-Number (NaN): +Building Python now requires support for floating-point Not-a-Number (NaN): remove the ``Py_NO_NAN`` macro. Patch by Victor Stinner. .. diff --git a/Misc/NEWS.d/3.11.0a7.rst b/Misc/NEWS.d/3.11.0a7.rst index a376c8becea..eff2ea2dac1 100644 --- a/Misc/NEWS.d/3.11.0a7.rst +++ b/Misc/NEWS.d/3.11.0a7.rst @@ -224,7 +224,7 @@ Kumar Aditya. .. nonce: ZI05b5 .. section: Core and Builtins -Improved the performance of :meth:`list.append()` and list comprehensions by +Improved the performance of :meth:`list.append` and list comprehensions by optimizing for the common case, where no resize is needed. Patch by Dennis Sweeney. @@ -1127,7 +1127,7 @@ Raise more accurate and :pep:`249` compatible exceptions in :mod:`sqlite3`. * Don't overwrite :exc:`BufferError` with :exc:`ValueError` when conversion to BLOB fails. * Raise :exc:`~sqlite3.ProgrammingError` instead of :exc:`~sqlite3.Warning` if - user tries to :meth:`~sqlite3.Cursor.execute()` more than one SQL statement. + user tries to :meth:`~sqlite3.Cursor.execute` more than one SQL statement. * Raise :exc:`~sqlite3.ProgrammingError` instead of :exc:`ValueError` if an SQL query contains null characters. @@ -1401,7 +1401,7 @@ Christian's container image ``quay.io/tiran/cpython_autoconf:269``. .. nonce: fry4aK .. section: Build -Building Python now requires support of IEEE 754 floating point numbers. +Building Python now requires support of IEEE 754 floating-point numbers. Patch by Victor Stinner. .. diff --git a/Misc/NEWS.d/3.11.0b1.rst b/Misc/NEWS.d/3.11.0b1.rst index c35e8e2c1ca..85cb0f1b5cf 100644 --- a/Misc/NEWS.d/3.11.0b1.rst +++ b/Misc/NEWS.d/3.11.0b1.rst @@ -58,7 +58,7 @@ may have prevented Python-to-Python calls respecting PEP 523. .. nonce: -igcjS .. section: Core and Builtins -Add a closure keyword-only parameter to :func:`exec()`. It can only be specified +Add a closure keyword-only parameter to :func:`exec`. It can only be specified when exec-ing a code object that uses free variables. When specified, it must be a tuple, with exactly the number of cell variables referenced by the code object. closure has a default value of ``None``, and it must be ``None`` if the @@ -285,7 +285,7 @@ macros. .. nonce: 11YXHQ .. section: Core and Builtins -Add a new :c:func:`_PyFrame_IsEntryFrame` API function, to check if a +Add a new :c:func:`!_PyFrame_IsEntryFrame` API function, to check if a :c:type:`PyFrameObject` is an entry frame. Patch by Pablo Galindo. .. @@ -1801,8 +1801,8 @@ The documentation now lists which members of C structs are part of the .. section: Documentation All docstrings in code snippets are now wrapped into :c:macro:`PyDoc_STR` to -follow the guideline of `PEP 7's Documentation Strings paragraph -`_. Patch +follow the guideline of :pep:`PEP 7's Documentation Strings paragraph +<0007#documentation-strings>`. Patch by Oleg Iarygin. .. diff --git a/Misc/NEWS.d/3.12.0a1.rst b/Misc/NEWS.d/3.12.0a1.rst index fc654d60a4a..3074ead0d02 100644 --- a/Misc/NEWS.d/3.12.0a1.rst +++ b/Misc/NEWS.d/3.12.0a1.rst @@ -102,7 +102,7 @@ well as generator expressions. .. section: Core and Builtins Added unicode check for ``name`` attribute of ``spec`` argument passed in -:func:`_imp.create_builtin` function. +:func:`!_imp.create_builtin` function. .. @@ -483,7 +483,7 @@ Fix case of undefined behavior in ceval.c .. nonce: AfCi36 .. section: Core and Builtins -Convert :mod:`_functools` to argument clinic. +Convert :mod:`!_functools` to argument clinic. .. @@ -492,7 +492,7 @@ Convert :mod:`_functools` to argument clinic. .. nonce: wky0Fc .. section: Core and Builtins -Do not expose ``KeyWrapper`` in :mod:`_functools`. +Do not expose ``KeyWrapper`` in :mod:`!_functools`. .. @@ -1731,7 +1731,7 @@ tracing functions implemented in C. .. nonce: lenv9h .. section: Core and Builtins -:meth:`_warnings.warn_explicit` is ported to Argument Clinic. +:meth:`!_warnings.warn_explicit` is ported to Argument Clinic. .. @@ -3142,8 +3142,8 @@ test.test_codecs.EncodedFileTest`` instead. .. nonce: VhS1eS .. section: Library -Made :class:`_struct.Struct` GC-tracked in order to fix a reference leak in -the :mod:`_struct` module. +Made :class:`!_struct.Struct` GC-tracked in order to fix a reference leak in +the :mod:`!_struct` module. .. @@ -3221,9 +3221,9 @@ Stinner. .. section: Library :mod:`hashlib`: Remove the pure Python implementation of -:func:`hashlib.pbkdf2_hmac()`, deprecated in Python 3.10. Python 3.10 and +:func:`hashlib.pbkdf2_hmac`, deprecated in Python 3.10. Python 3.10 and newer requires OpenSSL 1.1.1 (:pep:`644`): this OpenSSL version provides a C -implementation of :func:`~hashlib.pbkdf2_hmac()` which is faster. Patch by +implementation of :func:`~hashlib.pbkdf2_hmac` which is faster. Patch by Victor Stinner. .. @@ -3258,7 +3258,7 @@ on the main thread Remove ``io.OpenWrapper`` and ``_pyio.OpenWrapper``, deprecated in Python 3.10: just use :func:`open` instead. The :func:`open` (:func:`io.open`) -function is a built-in function. Since Python 3.10, :func:`_pyio.open` is +function is a built-in function. Since Python 3.10, :func:`!_pyio.open` is also a static method. Patch by Victor Stinner. .. @@ -5610,7 +5610,7 @@ Accept os.PathLike for the argument to winsound.PlaySound Support native Windows case-insensitive path comparisons by using ``LCMapStringEx`` instead of :func:`str.lower` in :func:`ntpath.normcase`. -Add ``LCMapStringEx`` to the :mod:`_winapi` module. +Add ``LCMapStringEx`` to the :mod:`!_winapi` module. .. diff --git a/Misc/NEWS.d/3.12.0a2.rst b/Misc/NEWS.d/3.12.0a2.rst index 88d84ad93b3..bc028f30636 100644 --- a/Misc/NEWS.d/3.12.0a2.rst +++ b/Misc/NEWS.d/3.12.0a2.rst @@ -527,7 +527,7 @@ Stinner. .. nonce: Ai2KDh .. section: Library -Now :mod:`_pyio` is consistent with :mod:`_io` in raising ``ValueError`` +Now :mod:`!_pyio` is consistent with :mod:`!_io` in raising ``ValueError`` when executing methods over closed buffers. .. @@ -537,7 +537,7 @@ when executing methods over closed buffers. .. nonce: 0v8iyw .. section: Library -Clean up refleak on failed module initialisation in :mod:`_zoneinfo` +Clean up refleak on failed module initialisation in :mod:`!_zoneinfo` .. @@ -546,7 +546,7 @@ Clean up refleak on failed module initialisation in :mod:`_zoneinfo` .. nonce: qc_KHr .. section: Library -Clean up refleaks on failed module initialisation in :mod:`_pickle` +Clean up refleaks on failed module initialisation in :mod:`!_pickle` .. @@ -555,7 +555,7 @@ Clean up refleaks on failed module initialisation in :mod:`_pickle` .. nonce: LBl79O .. section: Library -Clean up refleak on failed module initialisation in :mod:`_io`. +Clean up refleak on failed module initialisation in :mod:`!_io`. .. @@ -706,7 +706,7 @@ Remove modules :mod:`!asyncore` and :mod:`!asynchat`, which were deprecated by .. section: Library Fix handling of ``bytes`` :term:`path-like objects ` in -:func:`os.ismount()`. +:func:`os.ismount`. .. diff --git a/Misc/NEWS.d/3.12.0a3.rst b/Misc/NEWS.d/3.12.0a3.rst index 07593998d80..04a2bf9fb91 100644 --- a/Misc/NEWS.d/3.12.0a3.rst +++ b/Misc/NEWS.d/3.12.0a3.rst @@ -70,7 +70,7 @@ Fix bug where compiler crashes on an if expression with an empty body block. .. nonce: DcKoBJ .. section: Core and Builtins -Fix a reference bug in :func:`_imp.create_builtin()` after the creation of +Fix a reference bug in :func:`!_imp.create_builtin` after the creation of the first sub-interpreter for modules ``builtins`` and ``sys``. Patch by Victor Stinner. @@ -221,7 +221,7 @@ returns an invalid file descriptor. Also \ escape \s in the http.server BaseHTTPRequestHandler.log_message so that it is technically possible to parse the line and reconstruct what the -original data was. Without this a \xHH is ambiguious as to if it is a hex +original data was. Without this a \xHH is ambiguous as to if it is a hex replacement we put in or the characters r"\x" came through in the original request line. @@ -399,7 +399,7 @@ Fix refcount error when arguments are packed to tuple in Argument Clinic. .. nonce: 7uCiIB .. section: Library -:meth:`pathlib.PurePath.relative_to()` now treats naked Windows drive paths +:meth:`pathlib.PurePath.relative_to` now treats naked Windows drive paths as relative. This brings its behaviour in line with other parts of pathlib. .. diff --git a/Misc/NEWS.d/3.12.0a4.rst b/Misc/NEWS.d/3.12.0a4.rst index d7af30f6c09..57fb2052764 100644 --- a/Misc/NEWS.d/3.12.0a4.rst +++ b/Misc/NEWS.d/3.12.0a4.rst @@ -241,7 +241,7 @@ are now always dumped, even if switched off. Improve ``BUILD_LIST`` opcode so that it works similarly to the ``BUILD_TUPLE`` opcode, by stealing references from the stack rather than repeatedly using stack operations to set list elements. Implementation -details are in a new private API :c:func:`_PyList_FromArraySteal`. +details are in a new private API :c:func:`!_PyList_FromArraySteal`. .. @@ -611,8 +611,8 @@ random.expovariate(). .. nonce: bgtzMV .. section: Library -A :exc:`DeprecationWarning` may be raised when :func:`os.fork()` or -:func:`os.forkpty()` is called from multi-threaded processes. Forking with +A :exc:`DeprecationWarning` may be raised when :func:`os.fork` or +:func:`os.forkpty` is called from multi-threaded processes. Forking with threads is unsafe and can cause deadlocks, crashes and subtle problems. Lack of a warning does not indicate that the fork call was actually safe, as Python may not be aware of all threads. diff --git a/Misc/NEWS.d/3.12.0a5.rst b/Misc/NEWS.d/3.12.0a5.rst index effda2be6fd..5dc443bb55b 100644 --- a/Misc/NEWS.d/3.12.0a5.rst +++ b/Misc/NEWS.d/3.12.0a5.rst @@ -287,7 +287,7 @@ a positional argument would lead to a :exc:`TypeError`. .. section: Library Group-related variables of ``_posixsubprocess`` module are renamed to stress -that supplimentary group affinity is added to a fork, not replace the +that supplementary group affinity is added to a fork, not replace the inherited ones. Patch by Oleg Iarygin. .. @@ -307,7 +307,7 @@ It must not drop the ``Unpack`` part. .. nonce: wz4Xgc .. section: Library -Add :func:`os.path.splitroot()`, which splits a path into a 3-item tuple +Add :func:`os.path.splitroot`, which splits a path into a 3-item tuple ``(drive, root, tail)``. This new function is used by :mod:`pathlib` to improve the performance of path construction by up to a third. diff --git a/Misc/NEWS.d/3.12.0a6.rst b/Misc/NEWS.d/3.12.0a6.rst index 382dae33fca..bc708d163ce 100644 --- a/Misc/NEWS.d/3.12.0a6.rst +++ b/Misc/NEWS.d/3.12.0a6.rst @@ -17,7 +17,7 @@ from the HACL* project. Updated the OpenSSL version used in Windows and macOS binary release builds to 1.1.1t to address :cve:`2023-0286`, :cve:`2022-4303`, and :cve:`2022-4303` per `the OpenSSL 2023-02-07 security advisory -`_. +`_. .. diff --git a/Misc/NEWS.d/3.12.0a7.rst b/Misc/NEWS.d/3.12.0a7.rst index a859be8a047..f48b9ce0550 100644 --- a/Misc/NEWS.d/3.12.0a7.rst +++ b/Misc/NEWS.d/3.12.0a7.rst @@ -219,7 +219,7 @@ Aasland. .. nonce: DqNehf .. section: Library -Pure python :func:`locale.getencoding()` will not warn deprecation. +Pure python :func:`locale.getencoding` will not warn deprecation. .. diff --git a/Misc/NEWS.d/3.12.0b1.rst b/Misc/NEWS.d/3.12.0b1.rst index 9f3095b2242..7126e08a20c 100644 --- a/Misc/NEWS.d/3.12.0b1.rst +++ b/Misc/NEWS.d/3.12.0b1.rst @@ -1828,7 +1828,7 @@ is relative. .. nonce: 511Tbh .. section: Library -Convert private :meth:`_posixsubprocess.fork_exec` to use Argument Clinic. +Convert private :meth:`!_posixsubprocess.fork_exec` to use Argument Clinic. .. diff --git a/Misc/NEWS.d/3.13.0.rst b/Misc/NEWS.d/3.13.0.rst new file mode 100644 index 00000000000..b0f43d8f8ab --- /dev/null +++ b/Misc/NEWS.d/3.13.0.rst @@ -0,0 +1,18 @@ +.. date: 2024-10-05-23-53-06 +.. gh-issue: 125008 +.. nonce: ETANpd +.. release date: 2024-10-07 +.. section: Core and Builtins + +Fix :func:`tokenize.untokenize` producing invalid syntax for double braces +preceded by certain escape characters. + +.. + +.. date: 2024-10-03-22-26-39 +.. gh-issue: 124871 +.. nonce: tAMF47 +.. section: Core and Builtins + +Fix compiler bug (in some versions of 3.13) where an assertion fails during +reachability analysis. diff --git a/Misc/NEWS.d/3.13.0a1.rst b/Misc/NEWS.d/3.13.0a1.rst index 9a321f779c2..c32c9a537d7 100644 --- a/Misc/NEWS.d/3.13.0a1.rst +++ b/Misc/NEWS.d/3.13.0a1.rst @@ -2888,9 +2888,9 @@ documented and were not intended to be used externally. .. nonce: vMbmj_ .. section: Library -:data:`opcode.ENABLE_SPECIALIZATION` (which was added in 3.12 but never +:data:`!opcode.ENABLE_SPECIALIZATION` (which was added in 3.12 but never documented or intended for external usage) is moved to -:data:`_opcode.ENABLE_SPECIALIZATION` where tests can access it. +:data:`!_opcode.ENABLE_SPECIALIZATION` where tests can access it. .. @@ -3053,7 +3053,7 @@ Donghee Na. .. nonce: U9nD_B .. section: Library -Optimize :meth:`_PollLikeSelector.select` for many iteration case. +Optimize :meth:`!_PollLikeSelector.select` for many iteration case. .. @@ -3173,7 +3173,7 @@ Disable tab completion in multiline mode of :mod:`pdb` .. nonce: pYSwMj .. section: Library -Expose opcode metadata through :mod:`_opcode`. +Expose opcode metadata through :mod:`!_opcode`. .. @@ -3735,7 +3735,7 @@ overwritten. .. nonce: _sZilh .. section: Library -Fix bugs in :mod:`_ctypes` where exceptions could end up being overwritten. +Fix bugs in :mod:`!_ctypes` where exceptions could end up being overwritten. .. @@ -5148,7 +5148,7 @@ frame did not save the PC`` is found. Patch by Victor Stinner. .. nonce: qxI4OG .. section: Tests -libregrtest now calls :func:`random.seed()` before running each test file +libregrtest now calls :func:`random.seed` before running each test file when ``-r/--randomize`` command line option is used. Moreover, it's also called in worker processes. It should help to make tests more deterministic. Previously, it was only called once in the main process diff --git a/Misc/NEWS.d/3.13.0a2.rst b/Misc/NEWS.d/3.13.0a2.rst index c6b2b1b263f..a1a2d8a42ec 100644 --- a/Misc/NEWS.d/3.13.0a2.rst +++ b/Misc/NEWS.d/3.13.0a2.rst @@ -632,8 +632,8 @@ Add extra argument validation for ``alias`` command in :mod:`pdb` .. nonce: FWqZIU .. section: Library -:mod:`time`: Make :func:`time.clock_gettime()` and -:func:`time.clock_gettime_ns()` functions up to 2x faster by faster calling +:mod:`time`: Make :func:`time.clock_gettime` and +:func:`time.clock_gettime_ns` functions up to 2x faster by faster calling convention. Patch by Victor Stinner. .. @@ -674,7 +674,7 @@ Unix socket when the server is closed. .. nonce: Bc8LvA .. section: Library -Added :func:`io.text_encoding()`, :data:`io.DEFAULT_BUFFER_SIZE`, and +Added :func:`io.text_encoding`, :data:`io.DEFAULT_BUFFER_SIZE`, and :class:`io.IncrementalNewlineDecoder` to ``io.__all__``. .. @@ -736,7 +736,7 @@ which allows to format :exc:`ExceptionGroup` instances. .. nonce: 5ePgFl .. section: Library -Another attempt at fixing :func:`asyncio.Server.wait_closed()`. It now +Another attempt at fixing :func:`asyncio.Server.wait_closed`. It now blocks until both conditions are true: the server is closed, *and* there are no more active connections. (This means that in some cases where in 3.12.0 this function would *incorrectly* have returned immediately, it will now @@ -777,7 +777,7 @@ Add error checking during :mod:`!_socket` module init. .. nonce: urFYtn .. section: Library -Fix :mod:`_blake2` not checking for errors when initializing. +Fix :mod:`!_blake2` not checking for errors when initializing. .. @@ -880,7 +880,7 @@ Make :mod:`pdb` enter post-mortem mode even for :exc:`SyntaxError` .. nonce: _M-cQC .. section: Library -Set ``f_trace_lines = True`` on all frames upon :func:`pdb.set_trace()` +Set ``f_trace_lines = True`` on all frames upon :func:`pdb.set_trace` .. diff --git a/Misc/NEWS.d/3.13.0a3.rst b/Misc/NEWS.d/3.13.0a3.rst index 2c660192dcd..0f8dee261c6 100644 --- a/Misc/NEWS.d/3.13.0a3.rst +++ b/Misc/NEWS.d/3.13.0a3.rst @@ -449,8 +449,8 @@ well-formed for surrogateescape encoding. Patch by Sidney Markowitz. .. nonce: N8E1zw .. section: Core and Builtins -Use the object's actual class name in :meth:`_io.FileIO.__repr__`, -:meth:`_io._WindowsConsoleIO` and :meth:`_io.TextIOWrapper.__repr__`, to +Use the object's actual class name in :meth:`!_io.FileIO.__repr__`, +:meth:`!_io._WindowsConsoleIO` and :meth:`!_io.TextIOWrapper.__repr__`, to make these methods subclass friendly. .. @@ -555,7 +555,7 @@ Added :data:`mmap.MAP_NORESERVE`, :data:`mmap.MAP_NOEXTEND`, .. nonce: kXoCy0 .. section: Library -:func:`asyncio.TaskGroup()` and :func:`asyncio.timeout()` context managers +:func:`asyncio.TaskGroup` and :func:`asyncio.timeout` context managers now handle :exc:`~asyncio.CancelledError` subclasses as well as exact :exc:`!CancelledError`. @@ -607,7 +607,7 @@ with the documentation) .. nonce: xN2LuL .. section: Library -:func:`asyncio.Condition.wait()` now re-raises the same +:func:`asyncio.Condition.wait` now re-raises the same :exc:`CancelledError` instance that may have caused it to be interrupted. Fixed race condition in :func:`asyncio.Semaphore.acquire` when interrupted with a :exc:`CancelledError`. @@ -863,7 +863,7 @@ Fixed tarfile list() method to show file type. .. nonce: jLWGlr .. section: Library -:meth:`asyncio.futures.Future.set_exception()` now transforms +:meth:`asyncio.futures.Future.set_exception` now transforms :exc:`StopIteration` into :exc:`RuntimeError` instead of hanging or other misbehavior. Patch contributed by Jamie Phan. @@ -1805,7 +1805,7 @@ size. .. nonce: xPOBBY .. section: Library -:func:`warnings.filterwarnings()` and :func:`warnings.simplefilter()` now +:func:`warnings.filterwarnings` and :func:`warnings.simplefilter` now raise appropriate exceptions instead of ``AssertionError``. Patch contributed by Rémi Lapeyre. diff --git a/Misc/NEWS.d/3.13.0a4.rst b/Misc/NEWS.d/3.13.0a4.rst index 5efc244c608..1b971113173 100644 --- a/Misc/NEWS.d/3.13.0a4.rst +++ b/Misc/NEWS.d/3.13.0a4.rst @@ -336,7 +336,7 @@ for decorated functions. .. nonce: RzxNYT .. section: Library -Fix several :func:`format()` bugs when using the C implementation of +Fix several :func:`format` bugs when using the C implementation of :class:`~decimal.Decimal`: * memory leak in some rare cases when using the ``z`` format option (coerce negative 0) * incorrect output when applying the ``z`` format option to type ``F`` (fixed-point with capital ``NAN`` / @@ -1283,7 +1283,7 @@ Update macOS installer to use OpenSSL 3.0.13. .. nonce: FrQOQ0 .. section: macOS -Add Mach-O linkage support for :func:`platform.architecture()`. +Add Mach-O linkage support for :func:`platform.architecture`. .. diff --git a/Misc/NEWS.d/3.13.0a5.rst b/Misc/NEWS.d/3.13.0a5.rst index 6d74c6bc5c4..d56b1542b01 100644 --- a/Misc/NEWS.d/3.13.0a5.rst +++ b/Misc/NEWS.d/3.13.0a5.rst @@ -446,7 +446,7 @@ regardless of *lineterminator* value. .. section: Library Restore support of space delimiter with ``skipinitialspace=True`` in -:mod:`csv`. :func:`csv.writer()` now quotes empty fields if delimiter is a +:mod:`csv`. :func:`csv.writer` now quotes empty fields if delimiter is a space and skipinitialspace is true and raises exception if quoting is not possible. @@ -541,7 +541,7 @@ descriptors in :meth:`inspect.Signature.from_callable`. .. nonce: sGMKr0 .. section: Library -Isolate :mod:`_lsprof` (apply :pep:`687`). +Isolate :mod:`!_lsprof` (apply :pep:`687`). .. @@ -606,10 +606,10 @@ is complete. .. nonce: SQ998l .. section: Library -:func:`posixpath.commonpath()` now raises a :exc:`ValueError` exception when +:func:`posixpath.commonpath` now raises a :exc:`ValueError` exception when passed an empty iterable. Previously, :exc:`IndexError` was raised. -:func:`posixpath.commonpath()` now raises a :exc:`TypeError` exception when +:func:`posixpath.commonpath` now raises a :exc:`TypeError` exception when passed ``None``. Previously, :exc:`ValueError` was raised. .. @@ -773,8 +773,8 @@ combination with unicode encoding. .. section: Library Fix :func:`io.BufferedReader.tell`, :func:`io.BufferedReader.seek`, -:func:`_pyio.BufferedReader.tell`, :func:`io.BufferedRandom.tell`, -:func:`io.BufferedRandom.seek` and :func:`_pyio.BufferedRandom.tell` being +:func:`!_pyio.BufferedReader.tell`, :func:`io.BufferedRandom.tell`, +:func:`io.BufferedRandom.seek` and :func:`!_pyio.BufferedRandom.tell` being able to return negative offsets. .. @@ -823,7 +823,7 @@ Add missing call to localization function in :mod:`argparse`. .. nonce: Me7fJe .. section: Library -Fix :meth:`multiprocessing.connection.Listener.accept()` to accept empty +Fix :meth:`multiprocessing.connection.Listener.accept` to accept empty bytes as authkey. Not accepting empty bytes as key causes it to hang indefinitely. diff --git a/Misc/NEWS.d/3.13.0a6.rst b/Misc/NEWS.d/3.13.0a6.rst index fff29083e0d..b9cdbc4e146 100644 --- a/Misc/NEWS.d/3.13.0a6.rst +++ b/Misc/NEWS.d/3.13.0a6.rst @@ -264,7 +264,7 @@ Improve performance of :func:`os.path.join` and :func:`os.path.expanduser`. .. nonce: hqk9Hn .. section: Library -Raise :exc:`TypeError` for non-paths in :func:`posixpath.relpath()`. +Raise :exc:`TypeError` for non-paths in :func:`posixpath.relpath`. .. @@ -550,7 +550,7 @@ or DuplicateOptionError. .. nonce: PBiRQB .. section: Library -:class:`_io.WindowsConsoleIO` now emit a warning if a boolean value is +:class:`!_io.WindowsConsoleIO` now emit a warning if a boolean value is passed as a filedescriptor argument. .. @@ -582,7 +582,7 @@ in other tests (like importlib.resources). .. nonce: LV16SL .. section: Library -On Windows, :func:`time.time()` now uses the +On Windows, :func:`time.time` now uses the ``GetSystemTimePreciseAsFileTime()`` clock to have a resolution better than 1 us, instead of the ``GetSystemTimeAsFileTime()`` clock which has a resolution of 15.6 ms. Patch by Victor Stinner. @@ -673,14 +673,14 @@ fused multiply-add function. Patch by Mark Dickinson and Victor Stinner. .. section: Library The :mod:`importlib.resources` functions -:func:`~importlib.resources.is_resource()`, -:func:`~importlib.resources.open_binary()`, -:func:`~importlib.resources.open_text()`, -:func:`~importlib.resources.path()`, -:func:`~importlib.resources.read_binary()`, and -:func:`~importlib.resources.read_text()` are un-deprecated, and support +:func:`~importlib.resources.is_resource`, +:func:`~importlib.resources.open_binary`, +:func:`~importlib.resources.open_text`, +:func:`~importlib.resources.path`, +:func:`~importlib.resources.read_binary`, and +:func:`~importlib.resources.read_text` are un-deprecated, and support subdirectories via multiple positional arguments. The -:func:`~importlib.resources.contents()` function also allows subdirectories, +:func:`~importlib.resources.contents` function also allows subdirectories, but remains deprecated. .. @@ -1061,7 +1061,7 @@ when available. Contributed by vxiiduu. .. nonce: Bwfmp7 .. section: Windows -On Windows, :func:`time.monotonic()` now uses the +On Windows, :func:`time.monotonic` now uses the ``QueryPerformanceCounter()`` clock to have a resolution better than 1 us, instead of the ``GetTickCount64()`` clock which has a resolution of 15.6 ms. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/3.13.0b1.rst b/Misc/NEWS.d/3.13.0b1.rst index ab5f24fe345..97731276679 100644 --- a/Misc/NEWS.d/3.13.0b1.rst +++ b/Misc/NEWS.d/3.13.0b1.rst @@ -354,16 +354,6 @@ asend().throw() .. -.. date: 2024-04-13-18-59-25 -.. gh-issue: 115874 -.. nonce: c3xG-E -.. section: Core and Builtins - -Fixed a possible segfault during garbage collection of -``_asyncio.FutureIter`` objects - -.. - .. date: 2024-04-13-16-55-53 .. gh-issue: 117536 .. nonce: xkVbfv @@ -666,7 +656,7 @@ by :pep:`738`. .. section: Library Allow to specify the signature of custom callable instances of extension -type by the :attr:`__text_signature__` attribute. Specify signatures of +type by the ``__text_signature__`` attribute. Specify signatures of :class:`operator.attrgetter`, :class:`operator.itemgetter`, and :class:`operator.methodcaller` instances. @@ -687,10 +677,10 @@ padding is not detected when no padding is necessary. .. nonce: 5N2Xcy .. section: Library -Add the :class:`!PhotoImage` methods :meth:`~tkinter.PhotoImage.read` to -read an image from a file and :meth:`~tkinter.PhotoImage.data` to get the +Add the :class:`!PhotoImage` methods :meth:`!read` to +read an image from a file and :meth:`!data` to get the image data. Add *background* and *grayscale* parameters to -:class:`!PhotoImage` method :meth:`~tkinter.PhotoImage.write`. +:class:`!PhotoImage` method :meth:`!write`. .. @@ -702,8 +692,8 @@ image data. Add *background* and *grayscale* parameters to Add the :class:`!PhotoImage` method :meth:`!copy_replace` to copy a region from one image to other image, possibly with pixel zooming and/or subsampling. Add *from_coords* parameter to :class:`!PhotoImage` methods -:meth:`!copy()`, :meth:`!zoom()` and :meth:`!subsample()`. Add *zoom* and -*subsample* parameters to :class:`!PhotoImage` method :meth:`!copy()`. +:meth:`!copy`, :meth:`!zoom` and :meth:`!subsample`. Add *zoom* and +*subsample* parameters to :class:`!PhotoImage` method :meth:`!copy`. .. @@ -855,7 +845,7 @@ is used to bind indexed, nameless placeholders. See also :gh:`100668`. .. nonce: RstWg- .. section: Library -Fix TypeError in :func:`email.Message.get_payload` when the charset is +Fix TypeError in :func:`email.message.Message.get_payload` when the charset is :rfc:`2231` encoded. .. @@ -883,6 +873,16 @@ Alex Waygood. .. +.. date: 2024-04-13-18-59-25 +.. gh-issue: 115874 +.. nonce: c3xG-E +.. section: Library + +Fixed a possible segfault during garbage collection of +``_asyncio.FutureIter`` objects. Patch by Savannah Ostrowski. + +.. + .. date: 2024-04-13-01-45-15 .. gh-issue: 115060 .. nonce: IxoM03 @@ -953,7 +953,7 @@ Speed up :meth:`pathlib.Path.walk` by working with strings internally. .. nonce: oxIUEI .. section: Library -Change the new multi-separator support in :meth:`asyncio.Stream.readuntil` +Change the new multi-separator support in :meth:`asyncio.StreamReader.readuntil` to only accept tuples of separators rather than arbitrary iterables. .. @@ -1213,7 +1213,7 @@ changed from ``'r'`` to ``'rb'``. .. nonce: eeS6w7 .. section: Library -Fix :func:`inspect.signature()` to correctly handle parameter defaults on +Fix :func:`inspect.signature` to correctly handle parameter defaults on methods in extension modules that use names defined in the module namespace. .. @@ -1260,7 +1260,7 @@ Support opcode events in :mod:`bdb` .. nonce: YoI8TV .. section: Library -:mod:`ncurses`: fixed a crash that could occur on macOS 13 or earlier when +:mod:`!ncurses`: fixed a crash that could occur on macOS 13 or earlier when Python was built with Apple Xcode 15's SDK. .. @@ -1315,7 +1315,7 @@ Hamdan. .. section: Library Adjust ``logging.LogRecord`` to use ``time.time_ns()`` and fix minor bug -related to floating point math. +related to floating-point math. .. @@ -1347,13 +1347,13 @@ urllib. .. nonce: du4UKW .. section: Library -Setting the :mod:`!tkinter` module global :data:`~tkinter.wantobject` to ``2`` +Setting the :mod:`!tkinter` module global :data:`!wantobjects` to ``2`` before creating the :class:`~tkinter.Tk` object or call the -:meth:`~tkinter.Tk.wantobject` method of the :class:`!Tk` object with argument +:meth:`!wantobjects` method of the :class:`!Tk` object with argument ``2`` makes now arguments to callbacks registered in the :mod:`tkinter` module to be passed as various Python objects (``int``, ``float``, ``bytes``, ``tuple``), depending on their internal representation in Tcl, instead of always ``str``. -:data:`!tkinter.wantobject` is now set to ``2`` by default. +:data:`!tkinter.wantobjects` is now set to ``2`` by default. .. diff --git a/Misc/NEWS.d/3.13.0b2.rst b/Misc/NEWS.d/3.13.0b2.rst index ec9a6ca4463..d5559fbaa33 100644 --- a/Misc/NEWS.d/3.13.0b2.rst +++ b/Misc/NEWS.d/3.13.0b2.rst @@ -477,6 +477,7 @@ Fix errors in calling Tkinter bindings on Windows. .. nonce: OCQsAY .. section: Library +Fix :func:`os.path.isfile` on Windows for pipes. Speedup :func:`os.path.isjunction` and :func:`os.path.lexists` on Windows with a native implementation. diff --git a/Misc/NEWS.d/3.13.0b3.rst b/Misc/NEWS.d/3.13.0b3.rst index e9b55d6e52b..87a178712d0 100644 --- a/Misc/NEWS.d/3.13.0b3.rst +++ b/Misc/NEWS.d/3.13.0b3.rst @@ -423,7 +423,7 @@ to achieve consistency with C-extension implementation. .. nonce: 8o9Dzr .. section: Library -Fix memory leak in :func:`re.sub()` when the replacement string contains +Fix memory leak in :func:`re.sub` when the replacement string contains backreferences. .. diff --git a/Misc/NEWS.d/3.13.0b4.rst b/Misc/NEWS.d/3.13.0b4.rst new file mode 100644 index 00000000000..69b64f3f860 --- /dev/null +++ b/Misc/NEWS.d/3.13.0b4.rst @@ -0,0 +1,476 @@ +.. date: 2024-07-04-15-10-29 +.. gh-issue: 121084 +.. nonce: qxcd5d +.. release date: 2024-07-18 +.. section: Tests + +Fix test_typing random leaks. Clear typing ABC caches when running tests for +refleaks (``-R`` option): call ``_abc_caches_clear()`` on typing abstract +classes and their subclasses. Patch by Victor Stinner. + +.. + +.. date: 2024-07-03-14-41-00 +.. gh-issue: 121160 +.. nonce: LEtiTd +.. section: Tests + +Add a test for :func:`readline.set_history_length`. Note that this test may +fail on readline libraries. + +.. + +.. date: 2024-07-01-16-15-06 +.. gh-issue: 121200 +.. nonce: 4Pc-gc +.. section: Tests + +Fix ``test_expanduser_pwd2()`` of ``test_posixpath``. Call ``getpwnam()`` +to get ``pw_dir``, since it can be different than ``getpwall()`` ``pw_dir``. +Patch by Victor Stinner. + +.. + +.. date: 2024-07-01-09-04-32 +.. gh-issue: 121188 +.. nonce: XbuTVa +.. section: Tests + +When creating the JUnit XML file, regrtest now escapes characters which are +invalid in XML, such as the chr(27) control character used in ANSI escape +sequences. Patch by Victor Stinner. + +.. + +.. date: 2024-07-14-06-24-02 +.. gh-issue: 57141 +.. nonce: C3jhDh +.. section: Library + +The *shallow* argument to :class:`filecmp.dircmp` (new in Python 3.13) is +now keyword-only. + +.. + +.. date: 2024-07-13-06-23-24 +.. gh-issue: 121245 +.. nonce: RfOgf4 +.. section: Library + +Simplify handling of the history file in ``site.register_readline()`` +helper. The ``CAN_USE_PYREPL`` variable now will be initialized, when +imported. Patch by Sergey B Kirpichev. + +.. + +.. date: 2024-07-03-07-25-21 +.. gh-issue: 121332 +.. nonce: Iz6FEq +.. section: Library + +Fix constructor of :mod:`ast` nodes with custom ``_attributes``. Previously, +passing custom attributes would raise a :py:exc:`DeprecationWarning`. +Passing arguments to the constructor that are not in ``_fields`` or +``_attributes`` remains deprecated. Patch by Jelle Zijlstra. + +.. + +.. date: 2024-07-02-19-36-54 +.. gh-issue: 121279 +.. nonce: BltDo9 +.. section: Library + +Avoid :exc:`NameError` for the :mod:`warnings` module when accessing the +depracated atributes of the :mod:`importlib.abc` module. + +.. + +.. date: 2024-07-02-11-34-06 +.. gh-issue: 121245 +.. nonce: sSkDAr +.. section: Library + +Fix a bug in the handling of the command history of the new :term:`REPL` +that caused the history file to be wiped at REPL exit. + +.. + +.. date: 2024-06-29-05-08-59 +.. gh-issue: 87744 +.. nonce: rpF6Jw +.. section: Library + +Fix waitpid race while calling +:meth:`~asyncio.subprocess.Process.send_signal` in asyncio. Patch by Kumar +Aditya. + +.. + +.. date: 2024-06-26-03-04-24 +.. gh-issue: 121018 +.. nonce: clVSc4 +.. section: Library + +Fixed other issues where :class:`argparse.ArgumentParser` did not honor +``exit_on_error=False``. + +.. + +.. date: 2024-06-22-17-01-56 +.. gh-issue: 120678 +.. nonce: Ik8dCg +.. section: Library + +Fix regression in the new REPL that meant that globals from files passed +using the ``-i`` argument would not be included in the REPL's global +namespace. Patch by Alex Waygood. + +.. + +.. date: 2024-06-21-12-00-16 +.. gh-issue: 120782 +.. nonce: LOE8tj +.. section: Library + +Fix wrong references of the :mod:`datetime` types after reloading the +module. + +.. + +.. date: 2024-06-21-06-37-46 +.. gh-issue: 120713 +.. nonce: WBbQx4 +.. section: Library + +:meth:`datetime.datetime.strftime` now 0-pads years with less than four +digits for the format specifiers ``%Y`` and ``%G`` on Linux. Patch by Ben +Hsing + +.. + +.. date: 2024-06-07-10-10-32 +.. gh-issue: 117983 +.. nonce: NeMR9n +.. section: Library + +Defer the ``threading`` import in ``importlib.util`` until lazy loading is +used. + +.. + +.. date: 2024-05-20-13-48-37 +.. gh-issue: 119189 +.. nonce: dhJVs5 +.. section: Library + +When using the ``**`` operator or :func:`pow` with +:class:`~fractions.Fraction` as the base and an exponent that is not +rational, a float, or a complex, the fraction is no longer converted to a +float. + +.. + +.. date: 2024-05-07-17-38-53 +.. gh-issue: 118714 +.. nonce: XXKpVZ +.. section: Library + +Allow ``restart`` in post-mortem debugging of :mod:`pdb`. Removed restart +message when the user quits pdb from post-mortem mode. + +.. + +.. date: 2023-06-17-09-07-06 +.. gh-issue: 105623 +.. nonce: 5G06od +.. section: Library + +Fix performance degradation in +:class:`logging.handlers.RotatingFileHandler`. Patch by Craig Robson. + +.. + +.. date: 2024-07-16-16-57-03 +.. gh-issue: 78889 +.. nonce: U7ghFD +.. section: IDLE + +Stop Shell freezes by blocking user access to non-method sys.stdout.shell +attributes, which are all private. + +.. + +.. date: 2024-07-14-11-48-10 +.. gh-issue: 121749 +.. nonce: nxHoTk +.. section: Documentation + +Fix documentation for :c:func:`PyModule_AddObjectRef`. + +.. + +.. date: 2024-06-05-12-36-18 +.. gh-issue: 120012 +.. nonce: f14DbQ +.. section: Documentation + +Clarify the behaviours of :meth:`multiprocessing.Queue.empty` and +:meth:`multiprocessing.SimpleQueue.empty` on closed queues. Patch by +Bénédikt Tran. + +.. + +.. date: 2024-07-16-18-23-22 +.. gh-issue: 121860 +.. nonce: -FTauD +.. section: Core and Builtins + +Fix crash when rematerializing a managed dictionary after it was deleted. + +.. + +.. date: 2024-07-15-20-41-06 +.. gh-issue: 121814 +.. nonce: oR2ixR +.. section: Core and Builtins + +Fixed the SegFault when :c:func:`PyEval_SetTrace` is used with no Python +frame on stack. + +.. + +.. date: 2024-07-15-20-03-29 +.. gh-issue: 121295 +.. nonce: w53ucI +.. section: Core and Builtins + +Fix PyREPL console getting into a blocked state after interrupting a long +paste + +.. + +.. date: 2024-07-15-16-26-32 +.. gh-issue: 121794 +.. nonce: fhBtiQ +.. section: Core and Builtins + +Fix bug in free-threaded Python where a resurrected object could lead to a +negative ref count assertion failure. + +.. + +.. date: 2024-07-13-12-27-31 +.. gh-issue: 121657 +.. nonce: wgOYLw +.. section: Core and Builtins + +Improve the :exc:`SyntaxError` message if the user tries to use +:keyword:`yield from ` outside a function. + +.. + +.. date: 2024-07-13-09-51-44 +.. gh-issue: 121609 +.. nonce: jWsE5t +.. section: Core and Builtins + +Fix pasting of characters containing unicode character joiners in the new +REPL. Patch by Marta Gomez Macias + +.. + +.. date: 2024-07-10-15-43-54 +.. gh-issue: 117482 +.. nonce: 5WYaXR +.. section: Core and Builtins + +Unexpected slot wrappers are no longer created for builtin static types in +subinterpreters. + +.. + +.. date: 2024-07-09-13-53-18 +.. gh-issue: 121499 +.. nonce: rpp7il +.. section: Core and Builtins + +Fix a bug affecting how multi-line history was being rendered in the new +REPL after interacting with the new screen cache. Patch by Pablo Galindo + +.. + +.. date: 2024-07-08-17-15-14 +.. gh-issue: 121497 +.. nonce: I8hMDC +.. section: Core and Builtins + +Fix a bug that was preventing the REPL to correctly respect the history when +an input hook was set. Patch by Pablo Galindo + +.. + +.. date: 2024-07-08-10-31-08 +.. gh-issue: 121012 +.. nonce: M5hHk- +.. section: Core and Builtins + +Tier 2 execution now ensures that list iterators remain exhausted, once they +become exhausted. + +.. + +.. date: 2024-07-08-02-24-55 +.. gh-issue: 121439 +.. nonce: jDHod3 +.. section: Core and Builtins + +Allow tuples of length 20 in the freelist to be reused. + +.. + +.. date: 2024-07-04-23-38-30 +.. gh-issue: 121368 +.. nonce: m3EF9E +.. section: Core and Builtins + +Fix race condition in ``_PyType_Lookup`` in the free-threaded build due to a +missing memory fence. This could lead to ``_PyType_Lookup`` returning +incorrect results on arm64. + +.. + +.. date: 2024-06-29-10-46-14 +.. gh-issue: 121130 +.. nonce: Rj66Xs +.. section: Core and Builtins + +Fix f-strings with debug expressions in format specifiers. Patch by Pablo +Galindo + +.. + +.. date: 2024-06-28-10-02-58 +.. gh-issue: 121115 +.. nonce: EeSLfc +.. section: Core and Builtins + +:c:func:`PyLong_AsNativeBytes` no longer uses :meth:`~object.__index__` +methods by default. The ``Py_ASNATIVEBYTES_ALLOW_INDEX`` flag has been added +to allow it. + +.. + +.. date: 2024-07-09-15-55-20 +.. gh-issue: 89364 +.. nonce: yYYroI +.. section: C API + +Export the :c:func:`PySignal_SetWakeupFd` function. Previously, the function +was documented but it couldn't be used in 3rd party code. Patch by Victor +Stinner. + +.. + +.. date: 2024-07-04-15-41-10 +.. gh-issue: 113993 +.. nonce: cLSiWV +.. section: C API + +:c:func:`PyUnicode_InternInPlace` no longer prevents its argument from being +garbage collected. + +Several functions that take ``char *`` are now documented as possibly +preventing string objects from being garbage collected; refer to their +documentation for details: :c:func:`PyUnicode_InternFromString`, +:c:func:`PyDict_SetItemString`, :c:func:`PyObject_SetAttrString`, +:c:func:`PyObject_DelAttrString`, :c:func:`PyUnicode_InternFromString`, and +``PyModule_Add*`` convenience functions. + +.. + +.. date: 2024-07-04-13-23-27 +.. gh-issue: 113601 +.. nonce: K3RLqp +.. section: C API + +Removed debug build assertions related to interning strings, which were +falsely triggered by stable ABI extensions. + +.. + +.. date: 2024-07-02-11-03-40 +.. gh-issue: 112136 +.. nonce: f3fiY8 +.. section: C API + +Restore the private ``_PyArg_Parser`` structure and the private +``_PyArg_ParseTupleAndKeywordsFast()`` function, previously removed in +Python 3.13 alpha 1. Patch by Victor Stinner. + +.. + +.. date: 2024-07-16-12-29-54 +.. gh-issue: 120371 +.. nonce: E7x858 +.. section: Build + +Support WASI SDK 22 by explicitly skipping functions that are just stubs in +wasi-libc. + +.. + +.. date: 2024-07-14-01-29-47 +.. gh-issue: 121731 +.. nonce: RMPGP3 +.. section: Build + +Fix mimalloc compile error on GNU/Hurd + +.. + +.. date: 2024-07-08-14-01-17 +.. gh-issue: 121487 +.. nonce: ekHmpR +.. section: Build + +Fix deprecation warning for ATOMIC_VAR_INIT in mimalloc. + +.. + +.. date: 2024-07-08-01-11-54 +.. gh-issue: 121467 +.. nonce: 3qWRQj +.. section: Build + +Fix a Makefile bug that prevented mimalloc header files from being +installed. + +.. + +.. date: 2024-07-02-20-16-09 +.. gh-issue: 121103 +.. nonce: TMef9j +.. section: Build + +On POSIX systems, excluding macOS framework installs, the lib directory for +the free-threaded build now includes a "t" suffix to avoid conflicts with a +co-located default build installation. + +.. + +.. date: 2024-07-02-12-42-25 +.. gh-issue: 120831 +.. nonce: i3eIjg +.. section: Build + +The default minimum iOS version was increased to 13.0. + +.. + +.. date: 2024-06-02-13-23-26 +.. gh-issue: 113565 +.. nonce: 8xBlId +.. section: Build + +Improve :mod:`curses` and :mod:`curses.panel` dependency checks in +:program:`configure`. diff --git a/Misc/NEWS.d/3.13.0rc1.rst b/Misc/NEWS.d/3.13.0rc1.rst new file mode 100644 index 00000000000..84e693e0616 --- /dev/null +++ b/Misc/NEWS.d/3.13.0rc1.rst @@ -0,0 +1,273 @@ +.. date: 2024-07-13-11-48-20 +.. gh-issue: 59022 +.. nonce: fYNbQ8 +.. release date: 2024-07-31 +.. section: Tests + +Add tests for :func:`pkgutil.extend_path`. Patch by Andreas Stocker. + +.. + +.. date: 2024-07-13-11-04-44 +.. gh-issue: 99242 +.. nonce: aGxnwz +.. section: Tests + +:func:`os.getloadavg` may throw :exc:`OSError` when running regression tests +under certain conditions (e.g. chroot). This error is now caught and +ignored, since reporting load average is optional. + +.. + +.. date: 2024-07-22-13-11-28 +.. gh-issue: 122133 +.. nonce: 0mPeta +.. section: Security + +Authenticate the socket connection for the ``socket.socketpair()`` fallback +on platforms where ``AF_UNIX`` is not available like Windows. + +Patch by Gregory P. Smith and Seth Larson +. Reported by Ellie + +.. + +.. date: 2024-07-18-13-17-47 +.. gh-issue: 121957 +.. nonce: QemKLU +.. section: Security + +Fixed missing audit events around interactive use of Python, now also +properly firing for ``python -i``, as well as for ``python -m asyncio``. The +events in question are ``cpython.run_stdin`` and ``cpython.run_startup``. + +.. + +.. date: 2024-07-29-16-47-08 +.. gh-issue: 122400 +.. nonce: fM0YSv +.. section: Library + +Handle :exc:`ValueError`\s raised by :func:`os.stat` in +:class:`filecmp.dircmp` and :func:`filecmp.cmpfiles`. Patch by Bénédikt +Tran. + +.. + +.. date: 2024-07-29-10-24-48 +.. gh-issue: 122311 +.. nonce: xChV1b +.. section: Library + +Fix some error messages in :mod:`pickle`. + +.. + +.. date: 2024-07-26-21-21-13 +.. gh-issue: 122332 +.. nonce: fvw88r +.. section: Library + +Fixed segfault with :meth:`asyncio.Task.get_coro` when using an eager task +factory. + +.. + +.. date: 2024-07-25-15-41-14 +.. gh-issue: 105733 +.. nonce: o3koJA +.. section: Library + +:func:`ctypes.ARRAY` is now :term:`soft deprecated`: it no longer emits +deprecation warnings and is not scheduled for removal. + +.. + +.. date: 2024-07-24-09-29-55 +.. gh-issue: 122087 +.. nonce: FdBrWo +.. section: Library + +Restore :func:`inspect.ismethoddescriptor` and :func:`inspect.isroutine` +returning ``False`` for :class:`functools.partial` objects. + +.. + +.. date: 2024-07-23-15-30-23 +.. gh-issue: 122170 +.. nonce: Z9gi3Y +.. section: Library + +Handle :exc:`ValueError`\s raised by :func:`os.stat` in :mod:`linecache`. +Patch by Bénédikt Tran. + +.. + +.. date: 2024-07-23-09-14-44 +.. gh-issue: 82951 +.. nonce: -F5p5A +.. section: Library + +Serializing objects with complex ``__qualname__`` (such as unbound methods +and nested classes) by name no longer involves serializing parent objects by +value in pickle protocols < 4. + +.. + +.. date: 2024-07-22-08-14-04 +.. gh-issue: 113785 +.. nonce: 6B_KNB +.. section: Library + +:mod:`csv` now correctly parses numeric fields (when used with +:const:`csv.QUOTE_NONNUMERIC` or :const:`csv.QUOTE_STRINGS`) which start +with an escape character. + +.. + +.. date: 2024-07-21-18-03-30 +.. gh-issue: 122088 +.. nonce: vi2bP- +.. section: Library + +:func:`@warnings.deprecated ` now copies the coroutine +status of functions and methods so that :func:`inspect.iscoroutinefunction` +returns the correct result. + +.. + +.. date: 2024-07-14-11-18-28 +.. gh-issue: 120930 +.. nonce: Kuo4L0 +.. section: Library + +Fixed a bug introduced by gh-92081 that added an incorrect extra blank to +encoded words occurring in wrapped headers. + +.. + +.. date: 2024-07-08-03-45-34 +.. gh-issue: 121474 +.. nonce: NsvrUN +.. section: Library + +Fix missing sanity check for ``parties`` arg in :class:`threading.Barrier` +constructor. Patch by Clinton Christian (pygeek). + +.. + +.. date: 2024-06-09-19-53-11 +.. gh-issue: 120289 +.. nonce: s4HXR0 +.. section: Library + +Fixed the use-after-free issue in :mod:`cProfile` by disallowing +``disable()`` and ``clear()`` in external timers. + +.. + +.. date: 2024-07-30-18-02-55 +.. gh-issue: 122482 +.. nonce: TerE0g +.. section: IDLE + +Change About IDLE to direct users to discuss.python.org instead of the now +unused idle-dev email and mailing list. + +.. + +.. date: 2024-07-29-10-55-46 +.. gh-issue: 116090 +.. nonce: p1MhU0 +.. section: Core and Builtins + +Fix an issue in JIT builds that prevented some :keyword:`for` loops from +correctly firing :monitoring-event:`RAISE` monitoring events. + +.. + +.. date: 2024-07-26-21-26-33 +.. gh-issue: 122208 +.. nonce: z8KHsY +.. section: Core and Builtins + +Dictionary watchers now only deliver the PyDict_EVENT_ADDED event when the +insertion is in a known good state to succeed. + +.. + +.. date: 2024-07-26-14-05-51 +.. gh-issue: 122300 +.. nonce: SVIF-l +.. section: Core and Builtins + +Preserve AST nodes for f-string with single-element format specifiers. Patch +by Pablo Galindo + +.. + +.. date: 2024-07-26-13-56-32 +.. gh-issue: 120906 +.. nonce: qBh2I9 +.. section: Core and Builtins + +:attr:`frame.f_locals` now supports arbitrary hashable objects as keys. + +.. + +.. date: 2024-07-21-01-23-54 +.. gh-issue: 122029 +.. nonce: gKv-e2 +.. section: Core and Builtins + +Emit ``c_call`` events in :func:`sys.setprofile` when a ``PyMethodObject`` +pointing to a ``PyCFunction`` is called. + +.. + +.. date: 2024-07-19-15-28-05 +.. gh-issue: 122026 +.. nonce: sta2Ca +.. section: Core and Builtins + +Fix a bug that caused the tokenizer to not correctly identify mismatched +parentheses inside f-strings in some situations. Patch by Pablo Galindo + +.. + +.. date: 2024-05-30-04-11-36 +.. gh-issue: 118934 +.. nonce: fbDqve +.. section: Core and Builtins + +Make ``PyEval_GetLocals`` return borrowed reference + +.. + +.. date: 2024-07-30-23-48-26 +.. gh-issue: 116622 +.. nonce: yTTtil +.. section: C API + +Make :any:`PyObject_Print` work around a bug in Android and OpenBSD which +prevented it from throwing an exception when trying to write to a read-only +stream. + +.. + +.. date: 2024-07-21-17-40-07 +.. gh-issue: 121489 +.. nonce: SUMFCr +.. section: C API + +Export private :c:func:`!_PyBytes_Join` again. + +.. + +.. date: 2024-07-18-07-53-07 +.. gh-issue: 120522 +.. nonce: dg3o5A +.. section: Build + +Added a :option:`--with-app-store-compliance` option to patch out known +issues with macOS/iOS App Store review processes. diff --git a/Misc/NEWS.d/3.13.0rc2.rst b/Misc/NEWS.d/3.13.0rc2.rst new file mode 100644 index 00000000000..efd66156685 --- /dev/null +++ b/Misc/NEWS.d/3.13.0rc2.rst @@ -0,0 +1,753 @@ +.. date: 2024-09-04-11-55-29 +.. gh-issue: 123418 +.. nonce: 8P4bmN +.. release date: 2024-09-06 +.. section: macOS + +Updated macOS installer build to use OpenSSL 3.0.15. + +.. + +.. date: 2024-09-04-09-59-18 +.. gh-issue: 123418 +.. nonce: QaMC12 +.. section: Windows + +Updated Windows build to use OpenSSL 3.0.15. + +.. + +.. date: 2024-08-01-10-55-15 +.. gh-issue: 122573 +.. nonce: 4-UCFY +.. section: Windows + +The Windows build of CPython now requires 3.10 or newer. + +.. + +.. date: 2024-07-19-21-50-54 +.. gh-issue: 100256 +.. nonce: GDrKba +.. section: Windows + +:mod:`mimetypes` no longer fails when it encounters an inaccessible registry +key. + +.. + +.. date: 2022-04-20-18-32-30 +.. gh-issue: 79846 +.. nonce: Vggv3f +.. section: Windows + +Makes :code:`ssl.create_default_context()` ignore invalid certificates in +the Windows certificate store + +.. + +.. date: 2024-09-04-10-07-51 +.. gh-issue: 123418 +.. nonce: 1eIFZb +.. section: Tools/Demos + +Update GitHub CI workflows to use OpenSSL 3.0.15 and multissltests to use +3.0.15, 3.1.7, and 3.2.3. + +.. + +.. date: 2024-05-29-15-28-08 +.. gh-issue: 119727 +.. nonce: dVkaZM +.. section: Tests + +Add ``--single-process`` command line option to Python test runner +(regrtest). Patch by Victor Stinner. + +.. + +.. date: 2024-05-04-22-56-41 +.. gh-issue: 101525 +.. nonce: LHK166 +.. section: Tests + +Skip ``test_gdb`` if the binary is relocated by BOLT. Patch by Donghee Na. + +.. + +.. date: 2024-09-04-12-41-35 +.. gh-issue: 123678 +.. nonce: N41y9n +.. section: Security + +Upgrade libexpat to 2.6.3 + +.. + +.. date: 2024-07-02-13-39-20 +.. gh-issue: 121285 +.. nonce: hrl-yI +.. section: Security + +Remove backtracking from tarfile header parsing for ``hdrcharset``, PAX, and +GNU sparse headers. + +.. + +.. date: 2024-09-04-18-23-43 +.. gh-issue: 123657 +.. nonce: Oks4So +.. section: Library + +Fix crash and memory leak in :func:`decimal.getcontext`. It crashed when +using a thread-local context by ``--with-decimal-contextvar=no``. + +.. + +.. date: 2024-08-28-20-08-19 +.. gh-issue: 123448 +.. nonce: tItJlp +.. section: Library + +Fixed memory leak of :class:`typing.NoDefault` by moving it to the static +types array. + +.. + +.. date: 2024-08-28-13-03-36 +.. gh-issue: 123409 +.. nonce: lW0YF- +.. section: Library + +Fix :attr:`ipaddress.IPv6Address.reverse_pointer` output according to +:rfc:`RFC 3596, §2.5 <3596#section-2.5>`. Patch by Bénédikt Tran. + +.. + +.. date: 2024-08-26-13-45-20 +.. gh-issue: 123270 +.. nonce: gXHvNJ +.. section: Library + +Applied a more surgical fix for malformed payloads in :class:`zipfile.Path` +causing infinite loops (gh-122905) without breaking contents using +legitimate characters. + +.. + +.. date: 2024-08-24-06-05-41 +.. gh-issue: 123228 +.. nonce: jR_5O5 +.. section: Library + +Fix return type for +:func:`!_pyrepl.readline._ReadlineWrapper.get_line_buffer` to be +:func:`str`. Patch by Sergey B Kirpichev. + +.. + +.. date: 2024-08-24-00-03-01 +.. gh-issue: 123240 +.. nonce: uFPG3l +.. section: Library + +Raise audit events for the :func:`input` in the new REPL. + +.. + +.. date: 2024-08-22-20-10-13 +.. gh-issue: 123243 +.. nonce: Kifj1L +.. section: Library + +Fix memory leak in :mod:`!_decimal`. + +.. + +.. date: 2024-08-22-11-25-19 +.. gh-issue: 122546 +.. nonce: BSmeE7 +.. section: Library + +Consistently use same file name for different exceptions in the new repl. +Patch by Sergey B Kirpichev. + +.. + +.. date: 2024-08-22-09-37-48 +.. gh-issue: 123213 +.. nonce: owmXnP +.. section: Library + +:meth:`xml.etree.ElementTree.Element.extend` and +:class:`~xml.etree.ElementTree.Element` assignment no longer hide the +internal exception if an erronous generator is passed. Patch by Bar Harel. + +.. + +.. date: 2024-08-20-18-02-27 +.. gh-issue: 85110 +.. nonce: 8_iDQy +.. section: Library + +Preserve relative path in URL without netloc in +:func:`urllib.parse.urlunsplit` and :func:`urllib.parse.urlunparse`. + +.. + +.. date: 2024-08-16-19-13-21 +.. gh-issue: 123067 +.. nonce: Nx9O4R +.. section: Library + +Fix quadratic complexity in parsing ``"``-quoted cookie values with +backslashes by :mod:`http.cookies`. + +.. + +.. date: 2024-08-14-10-41-11 +.. gh-issue: 122981 +.. nonce: BHV0Z9 +.. section: Library + +Fix :func:`inspect.getsource` for generated classes with Python base classes +(e.g. enums). + +.. + +.. date: 2024-08-11-14-23-07 +.. gh-issue: 122903 +.. nonce: xktZta +.. section: Library + +``zipfile.Path.glob`` now correctly matches directories instead of silently +omitting them. + +.. + +.. date: 2024-08-11-14-08-04 +.. gh-issue: 122905 +.. nonce: 7tDsxA +.. section: Library + +:class:`zipfile.Path` objects now sanitize names from the zipfile. + +.. + +.. date: 2024-08-08-15-05-58 +.. gh-issue: 122695 +.. nonce: f7pwBv +.. section: Library + +Fixed double-free when using :func:`gc.get_referents` with a freed +:class:`asyncio.Future` iterator. + +.. + +.. date: 2024-08-07-17-41-16 +.. gh-issue: 116263 +.. nonce: EcXir0 +.. section: Library + +:class:`logging.handlers.RotatingFileHandler` no longer rolls over empty log +files. + +.. + +.. date: 2024-08-07-14-12-19 +.. gh-issue: 105376 +.. nonce: QbGPdE +.. section: Library + +Restore the deprecated :mod:`logging` ``warn()`` method. It was removed in +Python 3.13 alpha 1. Keep the deprecated ``warn()`` method in Python 3.13. +Patch by Victor Stinner. + +.. + +.. date: 2024-08-06-18-07-19 +.. gh-issue: 122744 +.. nonce: kCzNDI +.. section: Library + +Bump the version of pip bundled in ensurepip to version 24.2. + +.. + +.. date: 2024-08-04-14-07-18 +.. gh-issue: 118814 +.. nonce: uiyks1 +.. section: Library + +Fix the :class:`typing.TypeVar` constructor when name is passed by keyword. + +.. + +.. date: 2024-07-31-20-43-21 +.. gh-issue: 122478 +.. nonce: sCU2Le +.. section: Library + +Remove internal frames from tracebacks shown in +:class:`code.InteractiveInterpreter` with non-default +:func:`sys.excepthook`. Save correct tracebacks in +:attr:`sys.last_traceback` and update ``__traceback__`` attribute of +:attr:`sys.last_value` and :attr:`sys.last_exc`. + +.. + +.. date: 2024-07-31-15-08-42 +.. gh-issue: 116622 +.. nonce: aKxIQA +.. section: Library + +On Android, the ``FICLONE`` and ``FICLONERANGE`` constants are no longer +exposed by :mod:`fcntl`, as these ioctls are blocked by SELinux. + +.. + +.. date: 2024-07-31-14-55-41 +.. gh-issue: 82378 +.. nonce: eZvYmR +.. section: Library + +Make sure that the new :term:`REPL` interprets :data:`sys.tracebacklimit` in +the same way that the classic REPL did. + +.. + +.. date: 2024-07-30-21-29-30 +.. gh-issue: 122334 +.. nonce: LeoE1x +.. section: Library + +Fix crash when importing :mod:`ssl` after the main interpreter restarts. + +.. + +.. date: 2024-07-30-14-46-16 +.. gh-issue: 87320 +.. nonce: -Yk1wb +.. section: Library + +In :class:`code.InteractiveInterpreter`, handle exceptions caused by calling +a non-default :func:`sys.excepthook`. Before, the exception bubbled up to +the caller, ending the :term:`REPL`. + +.. + +.. date: 2024-07-27-16-10-41 +.. gh-issue: 121650 +.. nonce: nf6oc9 +.. section: Library + +:mod:`email` headers with embedded newlines are now quoted on output. The +:mod:`~email.generator` will now refuse to serialize (write) headers that +are unsafely folded or delimited; see +:attr:`~email.policy.Policy.verify_generated_headers`. (Contributed by Bas +Bloemsaat and Petr Viktorin in :gh:`121650`.) + +.. + +.. date: 2024-07-23-10-59-38 +.. gh-issue: 121723 +.. nonce: iJEf7e +.. section: Library + +Make :func:`logging.config.dictConfig` accept any object implementing the +Queue public API. See the :ref:`queue configuration ` +section for details. Patch by Bénédikt Tran. + +.. + +.. date: 2024-07-21-10-45-24 +.. gh-issue: 122081 +.. nonce: dNrYMq +.. section: Library + +Fix a crash in the :func:`!decimal.IEEEContext` optional function available +via the ``EXTRA_FUNCTIONALITY`` configuration flag. + +.. + +.. date: 2024-07-16-20-49-07 +.. gh-issue: 121804 +.. nonce: gYN-In +.. section: Library + +Correctly show error locations, when :exc:`SyntaxError` raised in new repl. +Patch by Sergey B Kirpichev. + +.. + +.. date: 2024-06-29-15-23-26 +.. gh-issue: 121151 +.. nonce: HeLEvq +.. section: Library + +Fix wrapping of long usage text of arguments inside a mutually exclusive +group in :mod:`argparse`. + +.. + +.. date: 2023-12-12-15-19-58 +.. gh-issue: 108172 +.. nonce: KyDPuG +.. section: Library + +``webbrowser`` honors OS preferred browser on Linux when its desktop entry +name contains the text of a known browser name. + +.. + +.. date: 2023-09-19-17-56-24 +.. gh-issue: 109109 +.. nonce: WJvvX2 +.. section: Library + +You can now get the raw TLS certificate chains from TLS connections via +:meth:`ssl.SSLSocket.get_verified_chain` and +:meth:`ssl.SSLSocket.get_unverified_chain` methods. + +Contributed by Mateusz Nowak. + +.. + +.. date: 2024-06-16-21-42-45 +.. gh-issue: 120083 +.. nonce: nczuyv +.. section: IDLE + +Add explicit black IDLE Hovertip foreground color needed for recent macOS. +Fixes Sonoma showing unreadable white on pale yellow. Patch by John +Riggles. + +.. + +.. date: 2024-09-06-19-23-44 +.. gh-issue: 120221 +.. nonce: giJEDT +.. section: Core and Builtins + +asyncio REPL is now again properly recognizing KeyboardInterrupts. Display +of exceptions raised in secondary threads is fixed. + +.. + +.. date: 2024-09-06-14-13-01 +.. gh-issue: 119310 +.. nonce: WQxyDF +.. section: Core and Builtins + +Allow the new interactive shell to read history files written with the +editline library that use unicode-escaped entries. Patch by aorcajo and +Łukasz Langa. + +.. + +.. date: 2024-09-03-13-34-35 +.. gh-issue: 123572 +.. nonce: uuqoYV +.. section: Core and Builtins + +Fix key mappings for various F-keys in Windows for the new REPL. Patch by +devdanzin + +.. + +.. date: 2024-09-02-17-32-15 +.. gh-issue: 119034 +.. nonce: HYh5Vj +.. section: Core and Builtins + +Change ```` and ```` keys of the Python REPL to history +search forward/backward. Patch by Victor Stinner. + +.. + +.. date: 2024-09-01-00-02-05 +.. gh-issue: 123545 +.. nonce: 8nQNbL +.. section: Core and Builtins + +Fix a double decref in rare cases on experimental JIT builds. + +.. + +.. date: 2024-08-29-19-46-07 +.. gh-issue: 123484 +.. nonce: rjUn_F +.. section: Core and Builtins + +Fix ``_Py_DebugOffsets`` for long objects to be relative to the start of the +object rather than the start of a subobject. + +.. + +.. date: 2024-08-27-13-16-40 +.. gh-issue: 123344 +.. nonce: 56Or78 +.. section: Core and Builtins + +Add AST optimizations for type parameter defaults. + +.. + +.. date: 2024-08-26-00-58-26 +.. gh-issue: 123321 +.. nonce: ApxcnE +.. section: Core and Builtins + +Prevent Parser/myreadline race condition from segfaulting on multi-threaded +use. Patch by Bar Harel and Amit Wienner. + +.. + +.. date: 2024-08-25-18-27-49 +.. gh-issue: 123177 +.. nonce: yLuyqE +.. section: Core and Builtins + +Fix a bug causing stray prompts to appear in the middle of wrapped lines in +the new REPL. + +.. + +.. date: 2024-08-25-10-54-22 +.. gh-issue: 122982 +.. nonce: KLD91q +.. section: Core and Builtins + +Extend the deprecation period for bool inversion (``~``) by two years. + +.. + +.. date: 2024-08-23-18-31-10 +.. gh-issue: 123275 +.. nonce: DprIrj +.. section: Core and Builtins + +Support :option:`-X gil=1 <-X>` and :envvar:`PYTHON_GIL=1 ` on +non-free-threaded builds. + +.. + +.. date: 2024-08-23-15-59-54 +.. gh-issue: 123177 +.. nonce: OLcaC5 +.. section: Core and Builtins + +Deactivate line wrap in the Apple Terminal via a ANSI escape code. Patch by +Pablo Galindo + +.. + +.. date: 2024-08-23-13-08-27 +.. gh-issue: 123229 +.. nonce: aHm-dw +.. section: Core and Builtins + +Fix valgrind warning by initializing the f-string buffers to 0 in the +tokenizer. Patch by Pablo Galindo + +.. + +.. date: 2024-08-23-11-26-54 +.. gh-issue: 122298 +.. nonce: ZMyln4 +.. section: Core and Builtins + +Restore printout of GC stats when ``gc.set_debug(gc.DEBUG_STATS)`` is +called. This featue was accidentally removed when implementing incremental +GC. + +.. + +.. date: 2024-08-21-15-22-53 +.. gh-issue: 121804 +.. nonce: r5K3PS +.. section: Core and Builtins + +Correctly show error locations when a :exc:`SyntaxError` is raised in the +basic REPL. Patch by Sergey B Kirpichev. + +.. + +.. date: 2024-08-20-12-29-52 +.. gh-issue: 123142 +.. nonce: 3PXiNb +.. section: Core and Builtins + +Fix too-wide source location in exception tracebacks coming from broken +iterables in comprehensions. + +.. + +.. date: 2024-08-20-11-09-16 +.. gh-issue: 123048 +.. nonce: 2TISpv +.. section: Core and Builtins + +Fix a bug where pattern matching code could emit a :opcode:`JUMP_FORWARD` +with no source location. + +.. + +.. date: 2024-08-18-18-25-54 +.. gh-issue: 123123 +.. nonce: 0ZcaEB +.. section: Core and Builtins + +Fix displaying :exc:`SyntaxError` exceptions covering multiple lines. Patch +by Pablo Galindo + +.. + +.. date: 2024-08-17-17-26-25 +.. gh-issue: 123083 +.. nonce: 9xWLJ- +.. section: Core and Builtins + +Fix a potential use-after-free in ``STORE_ATTR_WITH_HINT``. + +.. + +.. date: 2024-08-15-19-28-43 +.. gh-issue: 123022 +.. nonce: m3EF9E +.. section: Core and Builtins + +Fix crash in free-threaded build when calling :c:func:`Py_Initialize` from a +non-main thread. + +.. + +.. date: 2024-08-10-12-44-03 +.. gh-issue: 122888 +.. nonce: TUyu9r +.. section: Core and Builtins + +Fix crash on certain calls to ``str()`` with positional arguments of the +wrong type. Patch by Jelle Zijlstra. + +.. + +.. date: 2024-08-05-19-04-06 +.. gh-issue: 116622 +.. nonce: 3LWUzE +.. section: Core and Builtins + +Fix Android stdout and stderr messages being truncated or lost. + +.. + +.. date: 2024-08-01-19-13-58 +.. gh-issue: 122527 +.. nonce: eztso6 +.. section: Core and Builtins + +Fix a crash that occurred when a ``PyStructSequence`` was deallocated after +its type's dictionary was cleared by the GC. The type's +:c:member:`~PyTypeObject.tp_basicsize` now accounts for non-sequence fields +that aren't included in the :c:macro:`Py_SIZE` of the sequence. + +.. + +.. date: 2024-07-30-11-41-35 +.. gh-issue: 122445 +.. nonce: Rq0bjS +.. section: Core and Builtins + +Add only fields which are modified via self.* to +:attr:`~type.__static_attributes__`. + +.. + +.. date: 2024-06-19-21-34-21 +.. gh-issue: 98442 +.. nonce: cqhjkN +.. section: Core and Builtins + +Fix too wide source locations of the cleanup instructions of a with +statement. + +.. + +.. date: 2024-06-05-18-29-18 +.. gh-issue: 93691 +.. nonce: 6OautB +.. section: Core and Builtins + +Fix source locations of instructions generated for with statements. + +.. + +.. date: 2024-06-05-10-32-44 +.. gh-issue: 120097 +.. nonce: 9S2klk +.. section: Core and Builtins + +``FrameLocalsProxy`` now subclasses ``collections.abc.Mapping`` and can be +matched as a mapping in ``match`` statements + +.. + +.. date: 2024-08-06-14-23-11 +.. gh-issue: 122728 +.. nonce: l-fQ-v +.. section: C API + +Fix :c:func:`PyEval_GetLocals` to avoid :exc:`SystemError` ("bad argument to +internal function"). Patch by Victor Stinner. + +.. + +.. date: 2024-09-04-12-01-43 +.. gh-issue: 123418 +.. nonce: ynzspB +.. section: Build + +Updated Android build to use OpenSSL 3.0.15. + +.. + +.. date: 2024-08-24-19-09-31 +.. gh-issue: 123297 +.. nonce: fdtXoe +.. section: Build + +Propagate the value of ``LDFLAGS`` to ``LDCXXSHARED`` in :mod:`sysconfig`. +Patch by Pablo Galindo + +.. + +.. date: 2024-08-07-00-20-18 +.. gh-issue: 116622 +.. nonce: U9cxHM +.. section: Build + +Rename build variable ``MODULE_LDFLAGS`` back to ``LIBPYTHON``, as it's used +by package build systems (e.g. Meson). + +.. + +.. date: 2024-08-02-12-59-15 +.. gh-issue: 118943 +.. nonce: vZQtET +.. section: Build + +Fix an issue where the experimental JIT could be built several times by the +``make regen-all`` target, leading to possible race conditions on heavily +parallelized builds. + +.. + +.. date: 2024-06-18-15-28-25 +.. gh-issue: 118943 +.. nonce: aie7nn +.. section: Build + +Fix a possible race condition affecting parallel builds configured with +``--enable-experimental-jit``, in which :exc:`FileNotFoundError` could be +caused by another process already moving ``jit_stencils.h.new`` to +``jit_stencils.h``. diff --git a/Misc/NEWS.d/3.13.0rc3.rst b/Misc/NEWS.d/3.13.0rc3.rst new file mode 100644 index 00000000000..974e8d99bef --- /dev/null +++ b/Misc/NEWS.d/3.13.0rc3.rst @@ -0,0 +1,397 @@ +.. date: 2024-09-07-12-14-54 +.. gh-issue: 123797 +.. nonce: yFDeug +.. release date: 2024-10-01 +.. section: macOS + +Check for runtime availability of ``ptsname_r`` function on macos. + +.. + +.. date: 2024-09-27-13-40-25 +.. gh-issue: 124609 +.. nonce: WaKk8G +.. section: Windows + +Fix ``_Py_ThreadId`` for Windows builds using MinGW. Patch by Tony Roberts. + +.. + +.. date: 2024-09-20-11-18-50 +.. gh-issue: 124254 +.. nonce: iPin-L +.. section: Windows + +Ensures experimental free-threaded binaries remain installed when updating. + +.. + +.. date: 2024-09-10-19-23-00 +.. gh-issue: 123915 +.. nonce: yZMEDO +.. section: Windows + +Ensure that ``Tools\msi\buildrelease.bat`` uses different directories for +AMD64 and ARM64 builds. + +.. + +.. date: 2024-09-25-12-39-34 +.. gh-issue: 124378 +.. nonce: Ywwgia +.. section: Tests + +Updated ``test_ttk`` to pass with Tcl/Tk 8.6.15. + +.. + +.. date: 2024-09-25-18-34-48 +.. gh-issue: 124538 +.. nonce: nXZk4R +.. section: Library + +Fixed crash when using :func:`gc.get_referents` on a capsule object. + +.. + +.. date: 2024-09-25-12-14-58 +.. gh-issue: 124498 +.. nonce: Ozxs55 +.. section: Library + +Fix :class:`typing.TypeAliasType` not to be generic, when ``type_params`` is +an empty tuple. + +.. + +.. date: 2024-09-24-21-15-27 +.. gh-issue: 123017 +.. nonce: dSAr2f +.. section: Library + +Due to unreliable results on some devices, :func:`time.strftime` no longer +accepts negative years on Android. + +.. + +.. date: 2024-09-24-19-32-14 +.. gh-issue: 123014 +.. nonce: zVcfkZ +.. section: Library + +:func:`os.pidfd_open` and :func:`signal.pidfd_send_signal` are now +unavailable when building against Android API levels older than 31, since +the underlying system calls may cause a crash. + +.. + +.. date: 2024-09-19-11-47-39 +.. gh-issue: 124248 +.. nonce: g7rufd +.. section: Library + +Fixed potential crash when using :mod:`struct` to process zero-width 'Pascal +string' fields (``0p``). + +.. + +.. date: 2024-09-19-03-46-59 +.. gh-issue: 87041 +.. nonce: 9Ox7Bv +.. section: Library + +Fix a bug in :mod:`argparse` where lengthy subparser argument help is +incorrectly indented. + +.. + +.. date: 2024-09-18-17-45-52 +.. gh-issue: 124212 +.. nonce: n6kIby +.. section: Library + +Fix invalid variable in :mod:`venv` handling of failed symlink on Windows + +.. + +.. date: 2024-09-17-18-06-42 +.. gh-issue: 124171 +.. nonce: PHCvRJ +.. section: Library + +Add workaround for broken :c:func:`!fmod()` implementations on Windows, that +loose zero sign (e.g. ``fmod(-10, 1)`` returns ``0.0``). Patch by Sergey B +Kirpichev. + +.. + +.. date: 2024-09-13-10-34-19 +.. gh-issue: 123934 +.. nonce: yMe7mL +.. section: Library + +Fix :class:`unittest.mock.MagicMock` reseting magic methods return values +after ``.reset_mock(return_value=True)`` was called. + +.. + +.. date: 2024-09-11-19-12-23 +.. gh-issue: 123968 +.. nonce: OwHON_ +.. section: Library + +Fix the command-line interface for the :mod:`random` module to select floats +between 0 and N, not 1 and N. + +.. + +.. date: 2024-09-10-11-26-14 +.. gh-issue: 123892 +.. nonce: 2gzIrz +.. section: Library + +Add ``"_wmi"`` to :data:`sys.stdlib_module_names`. Patch by Victor Stinner. + +.. + +.. date: 2024-09-02-20-34-04 +.. gh-issue: 123339 +.. nonce: czgcSu +.. section: Library + +Fix :func:`inspect.getsource` for classes in :mod:`collections.abc` and +:mod:`decimal` (for pure Python implementation) modules. +:func:`inspect.getcomments` now raises OSError instead of IndexError if the +``__firstlineno__`` value for a class is out of bound. + +.. + +.. date: 2024-08-15-09-45-34 +.. gh-issue: 121735 +.. nonce: _1q0qf +.. section: Library + +When working with zip archives, importlib.resources now properly honors +module-adjacent references (e.g. ``files(pkg.mod)`` and not just +``files(pkg)``). + +.. + +.. date: 2024-07-23-12-38-14 +.. gh-issue: 122145 +.. nonce: sTO8nX +.. section: Library + +Fix an issue when reporting tracebacks corresponding to Python code emitting +an empty AST body. Patch by Nikita Sobolev and Bénédikt Tran. + +.. + +.. date: 2024-07-03-14-23-04 +.. gh-issue: 119004 +.. nonce: L5MoUu +.. section: Library + +Fix a crash in :ref:`OrderedDict.__eq__ ` +when operands are mutated during the check. Patch by Bénédikt Tran. + +.. + +.. bpo: 44864 +.. date: 2021-08-24-19-37-46 +.. nonce: KzxaDh +.. section: Library + +Do not translate user-provided strings in :class:`argparse.ArgumentParser`. + +.. + +.. date: 2024-09-21-23-12-18 +.. gh-issue: 112938 +.. nonce: OeiDru +.. section: IDLE + +Fix uninteruptable hang when Shell gets rapid continuous output. + +.. + +.. date: 2024-06-05-14-54-24 +.. gh-issue: 120104 +.. nonce: j_thj4 +.. section: IDLE + +Fix padding in config and search dialog windows in IDLE. + +.. + +.. date: 2024-09-27-16-47-48 +.. gh-issue: 124720 +.. nonce: nVSTVb +.. section: Documentation + +Update "Using Python on a Mac" section of the "Python Setup and Usage" +document and include information on installing free-threading support. + +.. + +.. date: 2024-09-19-19-33-25 +.. gh-issue: 116622 +.. nonce: M65UZ6 +.. section: Documentation + +Add an Android platform guide, and flag modules not available on Android. + +.. + +.. date: 2024-09-30-21-02-10 +.. gh-issue: 124567 +.. nonce: tv_B_C +.. section: Core and Builtins + +Revert the incremental GC (in 3.13), since it's not clear the benefits +outweigh the costs at this point. + +.. + +.. date: 2024-09-27-17-18-53 +.. gh-issue: 124642 +.. nonce: OCjhBJ +.. section: Core and Builtins + +Fixed scalability issue in free-threaded builds for lock-free reads from +dictionaries in multi-threaded scenarios + +.. + +.. date: 2024-09-26-17-55-34 +.. gh-issue: 116510 +.. nonce: dhn8w8 +.. section: Core and Builtins + +Fix a bug that can cause a crash when sub-interpreters use "basic" +single-phase extension modules. Shared objects could refer to PyGC_Head +nodes that had been freed as part of interpreter cleanup. + +.. + +.. date: 2024-09-26-12-19-13 +.. gh-issue: 124547 +.. nonce: P_SHfU +.. section: Core and Builtins + +When deallocating an object with inline values whose ``__dict__`` is still +live: if memory allocation for the inline values fails, clear the +dictionary. Prevents an interpreter crash. + +.. + +.. date: 2024-09-25-14-45-56 +.. gh-issue: 124513 +.. nonce: ywiXtr +.. section: Core and Builtins + +Fix a crash in FrameLocalsProxy constructor: check the number of arguments. +Patch by Victor Stinner. + +.. + +.. date: 2024-09-25-11-53-22 +.. gh-issue: 124442 +.. nonce: EXC1Ve +.. section: Core and Builtins + +Fix nondeterminism in compilation by sorting the value of +:attr:`~type.__static_attributes__`. Patch by kp2pml30. + +.. + +.. date: 2024-09-23-15-23-14 +.. gh-issue: 123856 +.. nonce: yrgJ9m +.. section: Core and Builtins + +Fix PyREPL failure when a keyboard interrupt is triggered after using a +history search + +.. + +.. date: 2024-09-23-13-25-27 +.. gh-issue: 65961 +.. nonce: LDqXV2 +.. section: Core and Builtins + +Document the deprecation of setting and using ``__package__`` and +``__cached__``. + +.. + +.. date: 2024-09-13-02-25-06 +.. gh-issue: 124027 +.. nonce: to_9DY +.. section: Core and Builtins + +Support ````, ````, and ```` keys in the Python +REPL when ``$TERM`` is set to ``vt100``. + +.. + +.. date: 2024-09-10-13-27-16 +.. gh-issue: 77894 +.. nonce: ZC-Olu +.. section: Core and Builtins + +Fix possible crash in the garbage collector when it tries to break a +reference loop containing a :class:`memoryview` object. Now a +:class:`!memoryview` object can only be cleared if there are no buffers that +refer it. + +.. + +.. date: 2024-09-02-20-36-45 +.. gh-issue: 123339 +.. nonce: QcmpSs +.. section: Core and Builtins + +Setting the :attr:`!__module__` attribute for a class now removes the +``__firstlineno__`` item from the type's dict, so they will no longer be +inconsistent. + +.. + +.. date: 2024-09-18-18-40-30 +.. gh-issue: 124160 +.. nonce: Zy-VKi +.. section: C API + +Fix crash when importing modules containing state and single-phase +initialization in a subinterpreter. + +.. + +.. date: 2024-09-12-16-16-24 +.. gh-issue: 123880 +.. nonce: 2-8vcj +.. section: C API + +Fixed a bug that prevented circular imports of extension modules that use +single-phase initialization. + +.. + +.. date: 2024-09-27-15-58-10 +.. gh-issue: 124487 +.. nonce: PAZTQf +.. section: Build + +Windows builds now use Windows 8.1 as their API baseline (installation +already required Windows 8.1). + +.. + +.. date: 2024-09-13-17-48-37 +.. gh-issue: 124043 +.. nonce: Bruxpq +.. section: Build + +Building using :option:`--with-trace-refs` is (temporarily) disallowed when +the GIL is disabled. diff --git a/Misc/NEWS.d/3.5.0a1.rst b/Misc/NEWS.d/3.5.0a1.rst index 5244db107a7..b197f3137e9 100644 --- a/Misc/NEWS.d/3.5.0a1.rst +++ b/Misc/NEWS.d/3.5.0a1.rst @@ -3447,7 +3447,8 @@ tkinter.ttk now works when default root window is not set. .. nonce: FE_PII .. section: Library -_tkinter.create() now creates tkapp object with wantobject=1 by default. +``_tkinter.create()`` now creates ``tkapp`` object with ``wantobjects=1`` by +default. .. @@ -5467,7 +5468,7 @@ All resources are now allowed when tests are not run by regrtest.py. .. section: Tests Fix pystone micro-benchmark: use floor division instead of true division to -benchmark integers instead of floating point numbers. Set pystone version to +benchmark integers instead of floating-point numbers. Set pystone version to 1.2. Patch written by Lennart Regebro. .. diff --git a/Misc/NEWS.d/3.6.0a1.rst b/Misc/NEWS.d/3.6.0a1.rst index 144d217f609..c6a9b231b3e 100644 --- a/Misc/NEWS.d/3.6.0a1.rst +++ b/Misc/NEWS.d/3.6.0a1.rst @@ -1484,9 +1484,9 @@ on UNIX signals (SIGSEGV, SIGFPE, SIGABRT). .. nonce: RWN1jR .. section: Library -Add C functions :c:func:`_PyTraceMalloc_Track` and -:c:func:`_PyTraceMalloc_Untrack` to track memory blocks using the -:mod:`tracemalloc` module. Add :c:func:`_PyTraceMalloc_GetTraceback` to get +Add C functions :c:func:`!_PyTraceMalloc_Track` and +:c:func:`!_PyTraceMalloc_Untrack` to track memory blocks using the +:mod:`tracemalloc` module. Add :c:func:`!_PyTraceMalloc_GetTraceback` to get the traceback of an object. .. diff --git a/Misc/NEWS.d/3.6.5rc1.rst b/Misc/NEWS.d/3.6.5rc1.rst index 3d14cc49049..a45e97fb290 100644 --- a/Misc/NEWS.d/3.6.5rc1.rst +++ b/Misc/NEWS.d/3.6.5rc1.rst @@ -474,7 +474,7 @@ platforms with OpenSSL 1.0.2+ or inet_pton. .. nonce: ideco .. section: Library -:func:`urllib.parse.urlsplit()` does not convert zone-id (scope) to lower +:func:`urllib.parse.urlsplit` does not convert zone-id (scope) to lower case for scoped IPv6 addresses in hostnames now. .. diff --git a/Misc/NEWS.d/3.7.0a4.rst b/Misc/NEWS.d/3.7.0a4.rst index f2c6559037d..de50acb2235 100644 --- a/Misc/NEWS.d/3.7.0a4.rst +++ b/Misc/NEWS.d/3.7.0a4.rst @@ -434,7 +434,7 @@ loop.getaddrinfo, loop.getnameinfo. .. nonce: ideco .. section: Library -:func:`urllib.parse.urlsplit()` does not convert zone-id (scope) to lower +:func:`urllib.parse.urlsplit` does not convert zone-id (scope) to lower case for scoped IPv6 addresses in hostnames now. .. @@ -463,7 +463,7 @@ Fix ``stop_serving`` in asyncio proactor loop kill all listening servers .. nonce: CUbsb2 .. section: Library -:func:`re.sub()` now replaces empty matches adjacent to a previous non-empty +:func:`re.sub` now replaces empty matches adjacent to a previous non-empty match. .. diff --git a/Misc/NEWS.d/3.7.0b1.rst b/Misc/NEWS.d/3.7.0b1.rst index d1beec9cdcc..e3dcd4f59cd 100644 --- a/Misc/NEWS.d/3.7.0b1.rst +++ b/Misc/NEWS.d/3.7.0b1.rst @@ -601,7 +601,7 @@ Add socket.getblocking() method. Add :mod:`importlib.resources` and :class:`importlib.abc.ResourceReader` as the unified API for reading resources contained within packages. Loaders wishing to support resource reading must implement the -:meth:`get_resource_reader()` method. File-based and zipimport-based +:meth:`get_resource_reader` method. File-based and zipimport-based loaders both implement these APIs. :class:`importlib.abc.ResourceLoader` is deprecated in favor of these new APIs. diff --git a/Misc/NEWS.d/3.7.0b2.rst b/Misc/NEWS.d/3.7.0b2.rst index 702dbc960c0..10cd57ea7ed 100644 --- a/Misc/NEWS.d/3.7.0b2.rst +++ b/Misc/NEWS.d/3.7.0b2.rst @@ -274,7 +274,7 @@ collections.ChainMap() preserves the order of the underlying mappings. .. nonce: -T77_c .. section: Library -:func:`fnmatch.translate()` no longer produces patterns which contain set +:func:`fnmatch.translate` no longer produces patterns which contain set operations. Sets starting with '[' or containing '--', '&&', '~~' or '||' will be interpreted differently in regular expressions in future versions. Currently they emit warnings. fnmatch.translate() now avoids producing diff --git a/Misc/NEWS.d/3.7.0b4.rst b/Misc/NEWS.d/3.7.0b4.rst index b17c7e08d1d..87e3ebf32b4 100644 --- a/Misc/NEWS.d/3.7.0b4.rst +++ b/Misc/NEWS.d/3.7.0b4.rst @@ -235,7 +235,7 @@ End framing at the end of C implementation of :func:`pickle.Pickler.dump`. .. section: Library Improved error handling and fixed a reference leak in -:func:`os.posix_spawn()`. +:func:`os.posix_spawn`. .. diff --git a/Misc/NEWS.d/3.8.0a1.rst b/Misc/NEWS.d/3.8.0a1.rst index 63735c2c972..5d775a8d0ea 100644 --- a/Misc/NEWS.d/3.8.0a1.rst +++ b/Misc/NEWS.d/3.8.0a1.rst @@ -224,7 +224,7 @@ positives from posix, socket, time, test_io, and test_faulthandler. .. nonce: 9vMWSP .. section: Core and Builtins -Fix an assertion error in :func:`format` in debug build for floating point +Fix an assertion error in :func:`format` in debug build for floating-point formatting with "n" format, zero padding and small width. Release build is not impacted. Patch by Karthikeyan Singaravelan. @@ -625,7 +625,7 @@ Spytz. The C function ``property_descr_get()`` uses a "cached" tuple to optimize function calls. But this tuple can be discovered in debug mode with -:func:`sys.getobjects()`. Remove the optimization, it's not really worth it +:func:`sys.getobjects`. Remove the optimization, it's not really worth it and it causes 3 different crashes last years. .. @@ -2519,7 +2519,7 @@ non-Windows systems. .. nonce: dQS1ng .. section: Library -Fix incorrect parsing of :class:`_io.IncrementalNewlineDecoder`'s +Fix incorrect parsing of :class:`io.IncrementalNewlineDecoder`'s *translate* argument. .. @@ -2850,8 +2850,8 @@ allow for tweaking of protocols and also to add support by default for .. nonce: 37IdsA .. section: Library -Fixed integer overflow in the :meth:`~hashlib.shake.digest()` and -:meth:`~hashlib.shake.hexdigest()` methods for the SHAKE algorithm in the +Fixed integer overflow in the :meth:`~hashlib.shake.digest` and +:meth:`~hashlib.shake.hexdigest` methods for the SHAKE algorithm in the :mod:`hashlib` module. .. @@ -3211,10 +3211,10 @@ bytes objects. (microoptimization) .. nonce: i-F_E5 .. section: Library -Add :func:`~unittest.addModuleCleanup()` and -:meth:`~unittest.TestCase.addClassCleanup()` to unittest to support cleanups -for :func:`~unittest.setUpModule()` and -:meth:`~unittest.TestCase.setUpClass()`. Patch by Lisa Roach. +Add :func:`~unittest.addModuleCleanup` and +:meth:`~unittest.TestCase.addClassCleanup` to unittest to support cleanups +for :func:`~unittest.setUpModule` and +:meth:`~unittest.TestCase.setUpClass`. Patch by Lisa Roach. .. @@ -3458,7 +3458,7 @@ Running the :mod:`trace` module no longer creates the ``trace.cover`` file. .. section: Library Fix crash when an ``ABC``-derived class with invalid ``__subclasses__`` is -passed as the second argument to :func:`issubclass()`. Patch by Alexey +passed as the second argument to :func:`issubclass`. Patch by Alexey Izbyshev. .. @@ -3664,7 +3664,7 @@ Add pure Python fallback for functools.reduce. Patch by Robert Wright. .. section: Library The default asyncio task class now always has a name which can be get or set -using two new methods (:meth:`~asyncio.Task.get_name()` and +using two new methods (:meth:`~asyncio.Task.get_name` and :meth:`~asyncio.Task.set_name`) and is visible in the :func:`repr` output. An initial name can also be set using the new ``name`` keyword argument to :func:`asyncio.create_task` or the @@ -4152,12 +4152,12 @@ Convert content length to string before putting to header. :func:`~os.path.exists`, :func:`~os.path.lexists`, :func:`~os.path.isdir`, :func:`~os.path.isfile`, :func:`~os.path.islink`, and :func:`~os.path.ismount`, and :mod:`pathlib.Path` methods that return a -boolean result like :meth:`~pathlib.Path.exists()`, -:meth:`~pathlib.Path.is_dir()`, :meth:`~pathlib.Path.is_file()`, -:meth:`~pathlib.Path.is_mount()`, :meth:`~pathlib.Path.is_symlink()`, -:meth:`~pathlib.Path.is_block_device()`, -:meth:`~pathlib.Path.is_char_device()`, :meth:`~pathlib.Path.is_fifo()`, -:meth:`~pathlib.Path.is_socket()` now return ``False`` instead of raising +boolean result like :meth:`~pathlib.Path.exists`, +:meth:`~pathlib.Path.is_dir`, :meth:`~pathlib.Path.is_file`, +:meth:`~pathlib.Path.is_mount`, :meth:`~pathlib.Path.is_symlink`, +:meth:`~pathlib.Path.is_block_device`, +:meth:`~pathlib.Path.is_char_device`, :meth:`~pathlib.Path.is_fifo`, +:meth:`~pathlib.Path.is_socket` now return ``False`` instead of raising :exc:`ValueError` or its subclasses :exc:`UnicodeEncodeError` and :exc:`UnicodeDecodeError` for paths that contain characters or bytes unrepresentable at the OS level. @@ -5269,7 +5269,7 @@ performance and smaller size compared to protocol 3 introduced in Python .. section: Library Improved error handling and fixed a reference leak in -:func:`os.posix_spawn()`. +:func:`os.posix_spawn`. .. @@ -5857,7 +5857,7 @@ collections.ChainMap() preserves the order of the underlying mappings. .. nonce: -T77_c .. section: Library -:func:`fnmatch.translate()` no longer produces patterns which contain set +:func:`fnmatch.translate` no longer produces patterns which contain set operations. Sets starting with '[' or containing '--', '&&', '~~' or '||' will be interpreted differently in regular expressions in future versions. Currently they emit warnings. fnmatch.translate() now avoids producing @@ -8051,7 +8051,7 @@ Update macOS 10.9+ installer to Tcl/Tk 8.6.8. .. nonce: K6jCVG .. section: macOS -In :mod:`_scproxy`, drop the GIL when calling into ``SystemConfiguration`` +In :mod:`!_scproxy`, drop the GIL when calling into ``SystemConfiguration`` to avoid deadlocks. .. diff --git a/Misc/NEWS.d/3.8.0a2.rst b/Misc/NEWS.d/3.8.0a2.rst index c8620aeea7f..0dbfa2758fe 100644 --- a/Misc/NEWS.d/3.8.0a2.rst +++ b/Misc/NEWS.d/3.8.0a2.rst @@ -202,7 +202,7 @@ the mean and standard deviation of measurement data as single entity. .. nonce: V88MCD .. section: Library -Added statistics.fmean() as a faster, floating point variant of the existing +Added statistics.fmean() as a faster, floating-point variant of the existing mean() function. .. diff --git a/Misc/NEWS.d/3.8.0a4.rst b/Misc/NEWS.d/3.8.0a4.rst index 7bf0de12109..41eea7303bd 100644 --- a/Misc/NEWS.d/3.8.0a4.rst +++ b/Misc/NEWS.d/3.8.0a4.rst @@ -663,8 +663,8 @@ followed imports. Patch by Brandt Bucher. .. nonce: QmfNmY .. section: Library -Added :meth:`~socket.create_server()` and -:meth:`~socket.has_dualstack_ipv6()` convenience functions to automate the +Added :meth:`~socket.create_server` and +:meth:`~socket.has_dualstack_ipv6` convenience functions to automate the necessary tasks usually involved when creating a server socket, including accepting both IPv4 and IPv6 connections on the same socket. (Contributed by Giampaolo Rodola in :issue:`17561`.) @@ -945,7 +945,7 @@ P. Hemsley. .. nonce: __FTq9 .. section: Tests -Add a new :mod:`_testinternalcapi` module to test the internal C API. +Add a new :mod:`!_testinternalcapi` module to test the internal C API. .. @@ -1383,7 +1383,7 @@ Since Python 3.7.0, calling :c:func:`Py_DecodeLocale` before coerced and/or if the UTF-8 Mode is enabled by the user configuration. The LC_CTYPE coercion and UTF-8 Mode are now disabled by default to fix the mojibake issue. They must now be enabled explicitly (opt-in) using the new -:c:func:`_Py_PreInitialize` API with ``_PyPreConfig``. +:c:func:`!_Py_PreInitialize` API with ``_PyPreConfig``. .. diff --git a/Misc/NEWS.d/3.8.0b1.rst b/Misc/NEWS.d/3.8.0b1.rst index 4174ab8fac6..5010473269d 100644 --- a/Misc/NEWS.d/3.8.0b1.rst +++ b/Misc/NEWS.d/3.8.0b1.rst @@ -176,8 +176,8 @@ Added new ``replace()`` method to the code type (:class:`types.CodeType`). .. nonce: d1SOtF .. section: Core and Builtins -Implement :func:`socket.if_nameindex()`, :func:`socket.if_nametoindex()`, -and :func:`socket.if_indextoname()` on Windows. +Implement :func:`socket.if_nameindex`, :func:`socket.if_nametoindex`, +and :func:`socket.if_indextoname` on Windows. .. @@ -538,7 +538,7 @@ module. .. nonce: TQFOR4 .. section: Library -:meth:`!msilib.Directory.start_component()` no longer fails if *keyfile* is +:meth:`!msilib.Directory.start_component` no longer fails if *keyfile* is not ``None``. .. @@ -600,7 +600,7 @@ default. .. nonce: sLULGQ .. section: Library -Fix destructor :class:`_pyio.BytesIO` and :class:`_pyio.TextIOWrapper`: +Fix destructor :class:`!_pyio.BytesIO` and :class:`!_pyio.TextIOWrapper`: initialize their ``_buffer`` attribute as soon as possible (in the class body), because it's used by ``__del__()`` which calls ``close()``. @@ -1371,7 +1371,7 @@ Asyncio: Remove inner callback on outer cancellation in shield .. nonce: d8djAJ .. section: Library -Fix :meth:`asyncio.SelectorEventLoop.subprocess_exec()` leaks file +Fix :meth:`asyncio.SelectorEventLoop.subprocess_exec` leaks file descriptors if ``Popen`` fails and called with ``stdin=subprocess.PIPE``. Patch by Niklas Fiekas. diff --git a/Misc/NEWS.d/3.9.0a1.rst b/Misc/NEWS.d/3.9.0a1.rst index a38b93e4b76..d75132b0aac 100644 --- a/Misc/NEWS.d/3.9.0a1.rst +++ b/Misc/NEWS.d/3.9.0a1.rst @@ -149,7 +149,7 @@ exception in :meth:`float.__getformat__`. .. nonce: 9-vKtO .. section: Core and Builtins -Optimized :func:`math.floor()`, :func:`math.ceil()` and :func:`math.trunc()` +Optimized :func:`math.floor`, :func:`math.ceil` and :func:`math.trunc` for floats. .. @@ -299,7 +299,7 @@ Check the error from the system's underlying ``crypt`` or ``crypt_r``. .. section: Core and Builtins On FreeBSD, Python no longer calls ``fedisableexcept()`` at startup to -control the floating point control mode. The call became useless since +control the floating-point control mode. The call became useless since FreeBSD 6: it became the default mode. .. @@ -1384,7 +1384,7 @@ Nested subclasses of :class:`typing.NamedTuple` are now pickleable. .. nonce: hwrPN7 .. section: Library -Prevent :exc:`KeyError` thrown by :func:`_encoded_words.decode` when given +Prevent :exc:`KeyError` thrown by :func:`!_encoded_words.decode` when given an encoded-word with invalid content-type encoding from propagating all the way to :func:`email.message.get`. @@ -1395,7 +1395,7 @@ way to :func:`email.message.get`. .. nonce: S6Klvm .. section: Library -Deprecated the ``split()`` method in :class:`_tkinter.TkappType` in favour +Deprecated the ``split()`` method in :class:`!_tkinter.TkappType` in favour of the ``splitlist()`` method which has more consistent and predictable behavior. @@ -2990,7 +2990,7 @@ mode. .. nonce: FRGH4I .. section: Library -:func:`ctypes.create_unicode_buffer()` now also supports non-BMP characters +:func:`ctypes.create_unicode_buffer` now also supports non-BMP characters on platforms with 16-bit :c:type:`wchar_t` (for example, Windows and AIX). .. @@ -3013,7 +3013,7 @@ thread was still running. .. section: Library Allow pure Python implementation of :mod:`pickle` to work even when the C -:mod:`_pickle` module is unavailable. +:mod:`!_pickle` module is unavailable. .. @@ -3054,7 +3054,7 @@ Change the format of feature_version to be a (major, minor) tuple. .. nonce: 5_mJkQ .. section: Library -Eliminate :exc:`RuntimeError` raised by :func:`asyncio.all_tasks()` if +Eliminate :exc:`RuntimeError` raised by :func:`asyncio.all_tasks` if internal tasks weak set is changed by another thread during iteration. .. @@ -3064,8 +3064,8 @@ internal tasks weak set is changed by another thread during iteration. .. nonce: ADqCkq .. section: Library -:class:`_pyio.IOBase` destructor now does nothing if getting the ``closed`` -attribute fails to better mimic :class:`_io.IOBase` finalizer. +:class:`!_pyio.IOBase` destructor now does nothing if getting the ``closed`` +attribute fails to better mimic :class:`!_io.IOBase` finalizer. .. @@ -3536,7 +3536,7 @@ Add :meth:`~pathlib.Path.readlink`. Patch by Girts Folkmanis. .. nonce: La3TZz .. section: Library -Made :func:`urllib.parse.unquote()` accept bytes in addition to strings. +Made :func:`urllib.parse.unquote` accept bytes in addition to strings. Patch by Stein Karlsen. .. @@ -3839,7 +3839,7 @@ Added possible exceptions to the description of os.chdir(). .. nonce: r_wGRc .. section: Documentation -Documented that in :meth:`datetime.datetime.strptime()`, the leading zero in +Documented that in :meth:`datetime.datetime.strptime`, the leading zero in some two-digit formats is optional. Patch by Mike Gleen. .. @@ -4993,7 +4993,7 @@ Make :const:`winreg.REG_MULTI_SZ` support zero-length strings. .. section: Windows Replace use of :c:func:`strcasecmp` for the system function -:c:func:`_stricmp`. Patch by Minmin Gong. +:c:func:`!_stricmp`. Patch by Minmin Gong. .. @@ -5696,8 +5696,8 @@ Add :c:func:`PyConfig_SetWideStringList` function. .. section: C API Add fast functions for calling methods: -:c:func:`_PyObject_VectorcallMethod`, :c:func:`_PyObject_CallMethodNoArgs` -and :c:func:`_PyObject_CallMethodOneArg`. +:c:func:`!_PyObject_VectorcallMethod`, :c:func:`!_PyObject_CallMethodNoArgs` +and :c:func:`!_PyObject_CallMethodOneArg`. .. diff --git a/Misc/NEWS.d/3.9.0a4.rst b/Misc/NEWS.d/3.9.0a4.rst index ca0eb2abf1d..cce0c4c9acd 100644 --- a/Misc/NEWS.d/3.9.0a4.rst +++ b/Misc/NEWS.d/3.9.0a4.rst @@ -755,7 +755,7 @@ dependencies. .. nonce: X7FRaN .. section: Windows -:meth:`~pathlib.Path.home()` and :meth:`~pathlib.Path.expanduser()` on +:meth:`~pathlib.Path.home` and :meth:`~pathlib.Path.expanduser` on Windows now prefer :envvar:`USERPROFILE` and no longer use :envvar:`HOME`, which is not normally set for regular user accounts. This makes them again behave like :func:`os.path.expanduser`, which was changed to ignore diff --git a/Misc/NEWS.d/3.9.0a5.rst b/Misc/NEWS.d/3.9.0a5.rst index 7f7480539f2..9402e5077c2 100644 --- a/Misc/NEWS.d/3.9.0a5.rst +++ b/Misc/NEWS.d/3.9.0a5.rst @@ -548,7 +548,7 @@ large for an AF_UNIX socket address. Patch by Pablo Galindo. .. nonce: mxr5m8 .. section: Library -:func:`ast.dump()` no longer outputs optional fields and attributes with +:func:`ast.dump` no longer outputs optional fields and attributes with default values. The default values for optional fields and attributes of AST nodes are now set as class attributes (e.g. ``Constant.kind`` is set to ``None``). diff --git a/Misc/NEWS.d/3.9.0a6.rst b/Misc/NEWS.d/3.9.0a6.rst index b7ea1051c31..11309fccc0a 100644 --- a/Misc/NEWS.d/3.9.0a6.rst +++ b/Misc/NEWS.d/3.9.0a6.rst @@ -111,7 +111,7 @@ str.decode(). .. nonce: m15TTX .. section: Core and Builtins -Fix possible refleaks in :mod:`_json`, memo of PyScannerObject should be +Fix possible refleaks in :mod:`!_json`, memo of PyScannerObject should be traversed. .. @@ -403,7 +403,7 @@ after encoding it to utf-8, not before. .. nonce: pDZR6V .. section: Library -Added :meth:`pathlib.Path.with_stem()` to create a new Path with the stem +Added :meth:`pathlib.Path.with_stem` to create a new Path with the stem replaced. .. @@ -666,8 +666,8 @@ for _main_thread, instead of a _DummyThread instance. .. nonce: VTq_8s .. section: Library -Add a private ``_at_fork_reinit()`` method to :class:`_thread.Lock`, -:class:`_thread.RLock`, :class:`threading.RLock` and +Add a private ``_at_fork_reinit()`` method to :class:`!_thread.Lock`, +:class:`!_thread.RLock`, :class:`threading.RLock` and :class:`threading.Condition` classes: reinitialize the lock at fork in the child process, reset the lock to the unlocked state. Rename also the private ``_reset_internal_locks()`` method of :class:`threading.Event` to @@ -866,7 +866,7 @@ of source of the class. Patch by Karthikeyan Singaravelan. .. nonce: vHC7YQ .. section: Library -Deprecate passing None as an argument for :func:`shlex.split()`'s ``s`` +Deprecate passing None as an argument for :func:`shlex.split`'s ``s`` parameter. Patch by Zackery Spytz. .. diff --git a/Misc/NEWS.d/3.9.0b1.rst b/Misc/NEWS.d/3.9.0b1.rst index 40fb8474bf9..9a3630ddf21 100644 --- a/Misc/NEWS.d/3.9.0b1.rst +++ b/Misc/NEWS.d/3.9.0b1.rst @@ -532,7 +532,7 @@ Remove ``_random.Random.randbytes()``: the C implementation of .. section: Library Added default arguments to -:meth:`difflib.SequenceMatcher.find_longest_match()`. +:meth:`difflib.SequenceMatcher.find_longest_match`. .. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-06-05-06-26-04.gh-issue- b/Misc/NEWS.d/next/Core and Builtins/2024-06-05-06-26-04.gh-issue- deleted file mode 100644 index 29f06d43c35..00000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2024-06-05-06-26-04.gh-issue- +++ /dev/null @@ -1 +0,0 @@ -Support Linux perf profiler to see Python calls on RISC-V architecture diff --git a/Misc/NEWS.d/next/Library/2023-06-17-09-07-06.gh-issue-105623.5G06od.rst b/Misc/NEWS.d/next/Library/2023-06-17-09-07-06.gh-issue-105623.5G06od.rst deleted file mode 100644 index 2890674aac4..00000000000 --- a/Misc/NEWS.d/next/Library/2023-06-17-09-07-06.gh-issue-105623.5G06od.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix performance degradation in -:class:`logging.handlers.RotatingFileHandler`. Patch by Craig Robson. diff --git a/Misc/NEWS.d/next/Library/2024-06-26-03-04-24.gh-issue-121018.clVSc4.rst b/Misc/NEWS.d/next/Library/2024-06-26-03-04-24.gh-issue-121018.clVSc4.rst deleted file mode 100644 index 98a1044f887..00000000000 --- a/Misc/NEWS.d/next/Library/2024-06-26-03-04-24.gh-issue-121018.clVSc4.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed other issues where :class:`argparse.ArgumentParser` did not honor -``exit_on_error=False``. diff --git a/Misc/externals.spdx.json b/Misc/externals.spdx.json index 758d4191005..f7aea9e8f99 100644 --- a/Misc/externals.spdx.json +++ b/Misc/externals.spdx.json @@ -70,21 +70,21 @@ "checksums": [ { "algorithm": "SHA256", - "checksumValue": "e6a77c273ebb284fedd8ea19b081fce74a9455936ffd47215f7c24713e2614b2" + "checksumValue": "1550c87996a0858474a9dd179deab2c55eb73726b9a140b32865b02fd3d8a86b" } ], - "downloadLocation": "https://github.com/python/cpython-source-deps/archive/refs/tags/openssl-3.0.13.tar.gz", + "downloadLocation": "https://github.com/python/cpython-source-deps/archive/refs/tags/openssl-3.0.15.tar.gz", "externalRefs": [ { "referenceCategory": "SECURITY", - "referenceLocator": "cpe:2.3:a:openssl:openssl:3.0.13:*:*:*:*:*:*:*", + "referenceLocator": "cpe:2.3:a:openssl:openssl:3.0.15:*:*:*:*:*:*:*", "referenceType": "cpe23Type" } ], "licenseConcluded": "NOASSERTION", "name": "openssl", "primaryPackagePurpose": "SOURCE", - "versionInfo": "3.0.13" + "versionInfo": "3.0.15" }, { "SPDXID": "SPDXRef-PACKAGE-sqlite", diff --git a/Misc/python-config.sh.in b/Misc/python-config.sh.in index 9929f5b2653..555b0cb6ba2 100644 --- a/Misc/python-config.sh.in +++ b/Misc/python-config.sh.in @@ -47,7 +47,7 @@ LIBM="@LIBM@" LIBC="@LIBC@" SYSLIBS="$LIBM $LIBC" ABIFLAGS="@ABIFLAGS@" -LIBS="@MODULE_LDFLAGS@ @LIBS@ $SYSLIBS" +LIBS="@LIBPYTHON@ @LIBS@ $SYSLIBS" LIBS_EMBED="-lpython${VERSION}${ABIFLAGS} @LIBS@ $SYSLIBS" BASECFLAGS="@BASECFLAGS@" LDLIBRARY="@LDLIBRARY@" diff --git a/Misc/python.man b/Misc/python.man index 4c90c0e2a99..4076b8d3d1b 100644 --- a/Misc/python.man +++ b/Misc/python.man @@ -251,6 +251,7 @@ emitted by a process (even those that are otherwise ignored by default): -Wdefault # Warn once per call location -Werror # Convert to exceptions -Walways # Warn every time + -Wall # Same as -Walways -Wmodule # Warn once per calling module -Wonce # Warn once per Python process -Wignore # Never warn diff --git a/Misc/python.pc.in b/Misc/python.pc.in index c2c740e82b1..027dba38585 100644 --- a/Misc/python.pc.in +++ b/Misc/python.pc.in @@ -9,5 +9,5 @@ Description: Build a C extension for Python Requires: Version: @VERSION@ Libs.private: @LIBS@ -Libs: -L${libdir} @MODULE_LDFLAGS@ +Libs: -L${libdir} @LIBPYTHON@ Cflags: -I${includedir}/python@VERSION@@ABIFLAGS@ diff --git a/Misc/sbom.spdx.json b/Misc/sbom.spdx.json index b60adcfd362..a9f30881d16 100644 --- a/Misc/sbom.spdx.json +++ b/Misc/sbom.spdx.json @@ -48,11 +48,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "4076a884f0ca96873589b5c8159e2e5bfb8b829a" + "checksumValue": "6aaee1b194bea30f0a60d1cce71eada8b14d3526" }, { "algorithm": "SHA256", - "checksumValue": "1a434bf3d2f9fb8a0b5adb79201a942788d11824c3e5b46a0b9962c0c482016c" + "checksumValue": "7bd4e53a8015534b5bbb58afe1a131b3989d3d4fca29bca685c44d34bcaa2555" } ], "fileName": "Modules/expat/expat.h" @@ -146,11 +146,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "4c49b5df2bc702f663ba3b5a52d1940ec363226b" + "checksumValue": "aca27f46d9fd387b63ce7ff2e4f172cad130b39b" }, { "algorithm": "SHA256", - "checksumValue": "b5ec29f6560acc183f1ee8ab92bb3aea17b87b4c2120cd2e3f78deba7a12491e" + "checksumValue": "f537add526ecda8389503b7ef45fb52b6217e4dc171dcc3a8dc6903ff6134726" } ], "fileName": "Modules/expat/siphash.h" @@ -188,11 +188,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "fed1311be8577491b7f63085a27014eabf2caec8" + "checksumValue": "b2ec0ad170ccc21e63fbcfc8d7404cdd756eedd3" }, { "algorithm": "SHA256", - "checksumValue": "3dc233eca5fa1bb7387c503f8a12d840707e4374b229e05d5657db9645725040" + "checksumValue": "92159d4e17393e56ee85f47d9fb31348695a58589899aa01e7536cdc88f60b85" } ], "fileName": "Modules/expat/xmlparse.c" @@ -1562,14 +1562,14 @@ "checksums": [ { "algorithm": "SHA256", - "checksumValue": "d4cf38d26e21a56654ffe4acd9cd5481164619626802328506a2869afab29ab3" + "checksumValue": "17aa6cfc5c4c219c09287abfc10bc13f0c06f30bb654b28bfe6f567ca646eb79" } ], - "downloadLocation": "https://github.com/libexpat/libexpat/releases/download/R_2_6_2/expat-2.6.2.tar.gz", + "downloadLocation": "https://github.com/libexpat/libexpat/releases/download/R_2_6_3/expat-2.6.3.tar.gz", "externalRefs": [ { "referenceCategory": "SECURITY", - "referenceLocator": "cpe:2.3:a:libexpat_project:libexpat:2.6.2:*:*:*:*:*:*:*", + "referenceLocator": "cpe:2.3:a:libexpat_project:libexpat:2.6.3:*:*:*:*:*:*:*", "referenceType": "cpe23Type" } ], @@ -1577,7 +1577,7 @@ "name": "expat", "originator": "Organization: Expat development team", "primaryPackagePurpose": "SOURCE", - "versionInfo": "2.6.2" + "versionInfo": "2.6.3" }, { "SPDXID": "SPDXRef-PACKAGE-hacl-star", diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in index 78b979698fc..06b30feef43 100644 --- a/Modules/Setup.stdlib.in +++ b/Modules/Setup.stdlib.in @@ -164,7 +164,7 @@ @MODULE__TESTBUFFER_TRUE@_testbuffer _testbuffer.c @MODULE__TESTINTERNALCAPI_TRUE@_testinternalcapi _testinternalcapi.c _testinternalcapi/test_lock.c _testinternalcapi/pytime.c _testinternalcapi/set.c _testinternalcapi/test_critical_sections.c @MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/heaptype.c _testcapi/abstract.c _testcapi/unicode.c _testcapi/dict.c _testcapi/set.c _testcapi/list.c _testcapi/tuple.c _testcapi/getargs.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/complex.c _testcapi/numbers.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/buffer.c _testcapi/pyatomic.c _testcapi/run.c _testcapi/file.c _testcapi/codec.c _testcapi/immortal.c _testcapi/gc.c _testcapi/hash.c _testcapi/time.c _testcapi/bytes.c _testcapi/object.c _testcapi/monitoring.c -@MODULE__TESTLIMITEDCAPI_TRUE@_testlimitedcapi _testlimitedcapi.c _testlimitedcapi/abstract.c _testlimitedcapi/bytearray.c _testlimitedcapi/bytes.c _testlimitedcapi/complex.c _testlimitedcapi/dict.c _testlimitedcapi/float.c _testlimitedcapi/heaptype_relative.c _testlimitedcapi/list.c _testlimitedcapi/long.c _testlimitedcapi/object.c _testlimitedcapi/pyos.c _testlimitedcapi/set.c _testlimitedcapi/sys.c _testlimitedcapi/unicode.c _testlimitedcapi/vectorcall_limited.c +@MODULE__TESTLIMITEDCAPI_TRUE@_testlimitedcapi _testlimitedcapi.c _testlimitedcapi/abstract.c _testlimitedcapi/bytearray.c _testlimitedcapi/bytes.c _testlimitedcapi/complex.c _testlimitedcapi/dict.c _testlimitedcapi/float.c _testlimitedcapi/heaptype_relative.c _testlimitedcapi/list.c _testlimitedcapi/long.c _testlimitedcapi/object.c _testlimitedcapi/pyos.c _testlimitedcapi/set.c _testlimitedcapi/sys.c _testlimitedcapi/tuple.c _testlimitedcapi/unicode.c _testlimitedcapi/vectorcall_limited.c @MODULE__TESTCLINIC_TRUE@_testclinic _testclinic.c @MODULE__TESTCLINIC_LIMITED_TRUE@_testclinic_limited _testclinic_limited.c diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index a26714f9755..4b0aa0503f5 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -68,14 +68,13 @@ typedef struct { /* Imports from traceback. */ PyObject *traceback_extract_stack; - PyObject *cached_running_loop; // Borrowed reference - volatile uint64_t cached_running_loop_tsid; - /* Counter for autogenerated Task names */ uint64_t task_name_counter; +#ifndef Py_GIL_DISABLED futureiterobject *fi_freelist; Py_ssize_t fi_freelist_len; +#endif } asyncio_state; static inline asyncio_state * @@ -262,96 +261,15 @@ get_future_loop(asyncio_state *state, PyObject *fut) return PyObject_GetAttr(fut, &_Py_ID(_loop)); } - -static int -get_running_loop(asyncio_state *state, PyObject **loop) -{ - PyObject *rl; - - PyThreadState *ts = _PyThreadState_GET(); - uint64_t ts_id = PyThreadState_GetID(ts); - if (state->cached_running_loop_tsid == ts_id && - state->cached_running_loop != NULL) - { - // Fast path, check the cache. - rl = state->cached_running_loop; - } - else { - PyObject *ts_dict = _PyThreadState_GetDict(ts); // borrowed - if (ts_dict == NULL) { - goto not_found; - } - - rl = PyDict_GetItemWithError( - ts_dict, &_Py_ID(__asyncio_running_event_loop__)); // borrowed - if (rl == NULL) { - if (PyErr_Occurred()) { - goto error; - } - else { - goto not_found; - } - } - - state->cached_running_loop = rl; - state->cached_running_loop_tsid = ts_id; - } - - - if (rl == Py_None) { - goto not_found; - } - - *loop = Py_NewRef(rl); - return 0; - -not_found: - *loop = NULL; - return 0; - -error: - *loop = NULL; - return -1; -} - - -static int -set_running_loop(asyncio_state *state, PyObject *loop) -{ - PyObject *ts_dict = NULL; - - PyThreadState *tstate = _PyThreadState_GET(); - if (tstate != NULL) { - ts_dict = _PyThreadState_GetDict(tstate); // borrowed - } - - if (ts_dict == NULL) { - PyErr_SetString( - PyExc_RuntimeError, "thread-local storage is not available"); - return -1; - } - if (PyDict_SetItem( - ts_dict, &_Py_ID(__asyncio_running_event_loop__), loop) < 0) - { - return -1; - } - - state->cached_running_loop = loop; // borrowed, kept alive by ts_dict - state->cached_running_loop_tsid = PyThreadState_GetID(tstate); - - return 0; -} - - static PyObject * get_event_loop(asyncio_state *state) { PyObject *loop; PyObject *policy; - if (get_running_loop(state, &loop)) { - return NULL; - } + _PyThreadStateImpl *ts = (_PyThreadStateImpl *)_PyThreadState_GET(); + loop = Py_XNewRef(ts->asyncio_running_loop); + if (loop != NULL) { return loop; } @@ -1415,7 +1333,7 @@ FutureObj_get_state(FutureObj *fut, void *Py_UNUSED(ignored)) default: assert (0); } - assert(_Py_IsImmortal(ret)); + assert(_Py_IsImmortalLoose(ret)); return ret; } @@ -1602,29 +1520,29 @@ FutureIter_dealloc(futureiterobject *it) { PyTypeObject *tp = Py_TYPE(it); - // FutureIter is a heap type so any subclass must also be a heap type. assert(_PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE)); - PyObject *module = ((PyHeapTypeObject*)tp)->ht_module; - asyncio_state *state = NULL; - PyObject_GC_UnTrack(it); tp->tp_clear((PyObject *)it); +#ifndef Py_GIL_DISABLED // GH-115874: We can't use PyType_GetModuleByDef here as the type might have // already been cleared, which is also why we must check if ht_module != NULL. - // Due to this restriction, subclasses that belong to a different module - // will not be able to use the free list. + PyObject *module = ((PyHeapTypeObject*)tp)->ht_module; + asyncio_state *state = NULL; if (module && _PyModule_GetDef(module) == &_asynciomodule) { state = get_asyncio_state(module); } + // TODO GH-121621: This should be moved to thread state as well. if (state && state->fi_freelist_len < FI_FREELIST_MAXLEN) { state->fi_freelist_len++; it->future = (FutureObj*) state->fi_freelist; state->fi_freelist = it; } - else { + else +#endif + { PyObject_GC_Del(it); Py_DECREF(tp); } @@ -1828,6 +1746,7 @@ future_new_iter(PyObject *fut) asyncio_state *state = get_asyncio_state_by_def((PyObject *)fut); ENSURE_FUTURE_ALIVE(state, fut) +#ifndef Py_GIL_DISABLED if (state->fi_freelist_len) { state->fi_freelist_len--; it = state->fi_freelist; @@ -1835,7 +1754,9 @@ future_new_iter(PyObject *fut) it->future = NULL; _Py_NewReference((PyObject*) it); } - else { + else +#endif + { it = PyObject_GC_New(futureiterobject, state->FutureIterType); if (it == NULL) { return NULL; @@ -2007,14 +1928,11 @@ static int enter_task(asyncio_state *state, PyObject *loop, PyObject *task) { PyObject *item; - Py_hash_t hash; - hash = PyObject_Hash(loop); - if (hash == -1) { + int res = PyDict_SetDefaultRef(state->current_tasks, loop, task, &item); + if (res < 0) { return -1; } - item = _PyDict_GetItem_KnownHash(state->current_tasks, loop, hash); - if (item != NULL) { - Py_INCREF(item); + else if (res == 1) { PyErr_Format( PyExc_RuntimeError, "Cannot enter into task %R while another " \ @@ -2023,36 +1941,58 @@ enter_task(asyncio_state *state, PyObject *loop, PyObject *task) Py_DECREF(item); return -1; } - if (PyErr_Occurred()) { - return -1; - } - return _PyDict_SetItem_KnownHash(state->current_tasks, loop, task, hash); + Py_DECREF(item); + return 0; +} + +static int +err_leave_task(PyObject *item, PyObject *task) +{ + PyErr_Format( + PyExc_RuntimeError, + "Leaving task %R does not match the current task %R.", + task, item); + return -1; } +static int +leave_task_predicate(PyObject *item, void *task) +{ + if (item != task) { + return err_leave_task(item, (PyObject *)task); + } + return 1; +} static int leave_task(asyncio_state *state, PyObject *loop, PyObject *task) /*[clinic end generated code: output=0ebf6db4b858fb41 input=51296a46313d1ad8]*/ { - PyObject *item; - Py_hash_t hash; - hash = PyObject_Hash(loop); - if (hash == -1) { - return -1; + int res = _PyDict_DelItemIf(state->current_tasks, loop, + leave_task_predicate, task); + if (res == 0) { + // task was not found + return err_leave_task(Py_None, task); } - item = _PyDict_GetItem_KnownHash(state->current_tasks, loop, hash); - if (item != task) { - if (item == NULL) { - /* Not entered, replace with None */ - item = Py_None; - } - PyErr_Format( - PyExc_RuntimeError, - "Leaving task %R does not match the current task %R.", - task, item, NULL); - return -1; + return res; +} + +static PyObject * +swap_current_task_lock_held(PyDictObject *current_tasks, PyObject *loop, + Py_hash_t hash, PyObject *task) +{ + PyObject *prev_task; + if (_PyDict_GetItemRef_KnownHash_LockHeld(current_tasks, loop, hash, &prev_task) < 0) { + return NULL; + } + if (_PyDict_SetItem_KnownHash_LockHeld(current_tasks, loop, task, hash) < 0) { + Py_XDECREF(prev_task); + return NULL; + } + if (prev_task == NULL) { + Py_RETURN_NONE; } - return _PyDict_DelItem_KnownHash(state->current_tasks, loop, hash); + return prev_task; } static PyObject * @@ -2070,24 +2010,15 @@ swap_current_task(asyncio_state *state, PyObject *loop, PyObject *task) return prev_task; } - Py_hash_t hash; - hash = PyObject_Hash(loop); + Py_hash_t hash = PyObject_Hash(loop); if (hash == -1) { return NULL; } - prev_task = _PyDict_GetItem_KnownHash(state->current_tasks, loop, hash); - if (prev_task == NULL) { - if (PyErr_Occurred()) { - return NULL; - } - prev_task = Py_None; - } - Py_INCREF(prev_task); - if (_PyDict_SetItem_KnownHash(state->current_tasks, loop, task, hash) == -1) { - Py_DECREF(prev_task); - return NULL; - } + PyDictObject *current_tasks = (PyDictObject *)state->current_tasks; + Py_BEGIN_CRITICAL_SECTION(current_tasks); + prev_task = swap_current_task_lock_held(current_tasks, loop, hash, task); + Py_END_CRITICAL_SECTION(); return prev_task; } @@ -2149,7 +2080,12 @@ _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, // optimization: defer task name formatting // store the task counter as PyLong in the name // for deferred formatting in get_name - name = PyLong_FromUnsignedLongLong(++state->task_name_counter); +#ifdef Py_GIL_DISABLED + unsigned long long counter = _Py_atomic_add_uint64(&state->task_name_counter, 1) + 1; +#else + unsigned long long counter = ++state->task_name_counter; +#endif + name = PyLong_FromUnsignedLongLong(counter); } else if (!PyUnicode_CheckExact(name)) { name = PyObject_Str(name); } else { @@ -2523,7 +2459,11 @@ static PyObject * _asyncio_Task_get_coro_impl(TaskObj *self) /*[clinic end generated code: output=bcac27c8cc6c8073 input=d2e8606c42a7b403]*/ { - return Py_NewRef(self->task_coro); + if (self->task_coro) { + return Py_NewRef(self->task_coro); + } + + Py_RETURN_NONE; } /*[clinic input] @@ -3270,11 +3210,8 @@ static PyObject * _asyncio__get_running_loop_impl(PyObject *module) /*[clinic end generated code: output=b4390af721411a0a input=0a21627e25a4bd43]*/ { - PyObject *loop; - asyncio_state *state = get_asyncio_state(module); - if (get_running_loop(state, &loop)) { - return NULL; - } + _PyThreadStateImpl *ts = (_PyThreadStateImpl *)_PyThreadState_GET(); + PyObject *loop = Py_XNewRef(ts->asyncio_running_loop); if (loop == NULL) { /* There's no currently running event loop */ Py_RETURN_NONE; @@ -3297,10 +3234,11 @@ static PyObject * _asyncio__set_running_loop(PyObject *module, PyObject *loop) /*[clinic end generated code: output=ae56bf7a28ca189a input=4c9720233d606604]*/ { - asyncio_state *state = get_asyncio_state(module); - if (set_running_loop(state, loop)) { - return NULL; + _PyThreadStateImpl *ts = (_PyThreadStateImpl *)_PyThreadState_GET(); + if (loop == Py_None) { + loop = NULL; } + Py_XSETREF(ts->asyncio_running_loop, Py_XNewRef(loop)); Py_RETURN_NONE; } @@ -3338,14 +3276,13 @@ _asyncio_get_running_loop_impl(PyObject *module) /*[clinic end generated code: output=c247b5f9e529530e input=2a3bf02ba39f173d]*/ { PyObject *loop; - asyncio_state *state = get_asyncio_state(module); - if (get_running_loop(state, &loop)) { - return NULL; - } + _PyThreadStateImpl *ts = (_PyThreadStateImpl *)_PyThreadState_GET(); + loop = Py_XNewRef(ts->asyncio_running_loop); if (loop == NULL) { /* There's no currently running event loop */ PyErr_SetString( PyExc_RuntimeError, "no running event loop"); + return NULL; } return loop; } @@ -3547,6 +3484,7 @@ _asyncio_current_task_impl(PyObject *module, PyObject *loop) static void module_free_freelists(asyncio_state *state) { +#ifndef Py_GIL_DISABLED PyObject *next; PyObject *current; @@ -3561,6 +3499,7 @@ module_free_freelists(asyncio_state *state) } assert(state->fi_freelist_len == 0); state->fi_freelist = NULL; +#endif } static int @@ -3590,14 +3529,6 @@ module_traverse(PyObject *mod, visitproc visit, void *arg) Py_VISIT(state->iscoroutine_typecache); Py_VISIT(state->context_kwname); - - // Visit freelist. - PyObject *next = (PyObject*) state->fi_freelist; - while (next != NULL) { - PyObject *current = next; - Py_VISIT(current); - next = (PyObject*) ((futureiterobject*) current)->future; - } return 0; } diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c index 644a90a8c71..c821f18b170 100644 --- a/Modules/_collectionsmodule.c +++ b/Modules/_collectionsmodule.c @@ -2537,12 +2537,9 @@ _collections__count_elements_impl(PyObject *module, PyObject *mapping, if (key == NULL) break; - if (!PyUnicode_CheckExact(key) || - (hash = _PyASCIIObject_CAST(key)->hash) == -1) - { - hash = PyObject_Hash(key); - if (hash == -1) - goto done; + hash = _PyObject_HashFast(key); + if (hash == -1) { + goto done; } oldval = _PyDict_GetItem_KnownHash(mapping, key, hash); diff --git a/Modules/_csv.c b/Modules/_csv.c index 9d6b66d4938..8d35d3bcee3 100644 --- a/Modules/_csv.c +++ b/Modules/_csv.c @@ -749,7 +749,6 @@ parse_process_char(ReaderObj *self, _csvstate *module_state, Py_UCS4 c) } else if (c == dialect->escapechar) { /* possible escaped character */ - self->unquoted_field = false; self->state = ESCAPED_CHAR; } else if (c == ' ' && dialect->skipinitialspace) diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index 1d9534671a4..236adaf6c4c 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -2288,9 +2288,14 @@ PyCSimpleType_init(PyObject *self, PyObject *args, PyObject *kwds) if (!meth) { return -1; } - x = PyDict_SetItemString(((PyTypeObject*)self)->tp_dict, - ml->ml_name, - meth); + PyObject *name = PyUnicode_FromString(ml->ml_name); + if (name == NULL) { + Py_DECREF(meth); + return -1; + } + PyUnicode_InternInPlace(&name); + x = PyDict_SetItem(((PyTypeObject*)self)->tp_dict, name, meth); + Py_DECREF(name); Py_DECREF(meth); if (x == -1) { return -1; diff --git a/Modules/_ctypes/_ctypes_test.c b/Modules/_ctypes/_ctypes_test.c index f46f6362ddd..2d4877d42d7 100644 --- a/Modules/_ctypes/_ctypes_test.c +++ b/Modules/_ctypes/_ctypes_test.c @@ -178,7 +178,7 @@ _testfunc_array_in_struct3B_set_defaults(void) /* * Test3C struct tests the MAX_STRUCT_SIZE 32. Structs containing arrays of up - * to four floating point types are passed in registers on Arm platforms. + * to four floating-point types are passed in registers on Arm platforms. * This struct is used for within bounds test on Arm platfroms and for an * out-of-bounds tests for platfroms where MAX_STRUCT_SIZE is less than 32. * See gh-110190. @@ -202,7 +202,7 @@ _testfunc_array_in_struct3C_set_defaults(void) /* * Test3D struct tests the MAX_STRUCT_SIZE 64. Structs containing arrays of up - * to eight floating point types are passed in registers on PPC64LE platforms. + * to eight floating-point types are passed in registers on PPC64LE platforms. * This struct is used for within bounds test on PPC64LE platfroms and for an * out-of-bounds tests for platfroms where MAX_STRUCT_SIZE is less than 64. * See gh-110190. diff --git a/Modules/_curses_panel.c b/Modules/_curses_panel.c index 125c72dbbe7..bbbb62c9066 100644 --- a/Modules/_curses_panel.c +++ b/Modules/_curses_panel.c @@ -19,7 +19,13 @@ static const char PyCursesVersion[] = "2.1"; #include "py_curses.h" -#include +#if defined(HAVE_NCURSESW_PANEL_H) +# include +#elif defined(HAVE_NCURSES_PANEL_H) +# include +#elif defined(HAVE_PANEL_H) +# include +#endif typedef struct { PyObject *PyCursesError; diff --git a/Modules/_cursesmodule.c b/Modules/_cursesmodule.c index ee3d4c6eae7..d32cff66144 100644 --- a/Modules/_cursesmodule.c +++ b/Modules/_cursesmodule.c @@ -128,7 +128,7 @@ static const char PyCursesVersion[] = "2.2"; #include #endif -#if !defined(HAVE_NCURSES_H) && (defined(sgi) || defined(__sun) || defined(SCO5)) +#if !defined(NCURSES_VERSION) && (defined(sgi) || defined(__sun) || defined(SCO5)) #define STRICT_SYSV_CURSES /* Don't use ncurses extensions */ typedef chtype attr_t; /* No attr_t type is available */ #endif diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c index 135d6cb683f..4706a93bd15 100644 --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -7207,49 +7207,51 @@ _datetime_exec(PyObject *module) Py_DECREF(value); \ } while(0) - /* timedelta values */ - PyObject *d = _PyType_GetDict(&PyDateTime_DeltaType); - DATETIME_ADD_MACRO(d, "resolution", new_delta(0, 0, 1, 0)); - DATETIME_ADD_MACRO(d, "min", new_delta(-MAX_DELTA_DAYS, 0, 0, 0)); - DATETIME_ADD_MACRO(d, "max", - new_delta(MAX_DELTA_DAYS, 24*3600-1, 1000000-1, 0)); - - /* date values */ - d = _PyType_GetDict(&PyDateTime_DateType); - DATETIME_ADD_MACRO(d, "min", new_date(1, 1, 1)); - DATETIME_ADD_MACRO(d, "max", new_date(MAXYEAR, 12, 31)); - DATETIME_ADD_MACRO(d, "resolution", new_delta(1, 0, 0, 0)); - - /* time values */ - d = _PyType_GetDict(&PyDateTime_TimeType); - DATETIME_ADD_MACRO(d, "min", new_time(0, 0, 0, 0, Py_None, 0)); - DATETIME_ADD_MACRO(d, "max", new_time(23, 59, 59, 999999, Py_None, 0)); - DATETIME_ADD_MACRO(d, "resolution", new_delta(0, 0, 1, 0)); - - /* datetime values */ - d = _PyType_GetDict(&PyDateTime_DateTimeType); - DATETIME_ADD_MACRO(d, "min", - new_datetime(1, 1, 1, 0, 0, 0, 0, Py_None, 0)); - DATETIME_ADD_MACRO(d, "max", new_datetime(MAXYEAR, 12, 31, 23, 59, 59, - 999999, Py_None, 0)); - DATETIME_ADD_MACRO(d, "resolution", new_delta(0, 0, 1, 0)); - - /* timezone values */ - d = _PyType_GetDict(&PyDateTime_TimeZoneType); - if (PyDict_SetItemString(d, "utc", (PyObject *)&utc_timezone) < 0) { - goto error; - } + if (!reloading) { + /* timedelta values */ + PyObject *d = _PyType_GetDict(&PyDateTime_DeltaType); + DATETIME_ADD_MACRO(d, "resolution", new_delta(0, 0, 1, 0)); + DATETIME_ADD_MACRO(d, "min", new_delta(-MAX_DELTA_DAYS, 0, 0, 0)); + DATETIME_ADD_MACRO(d, "max", + new_delta(MAX_DELTA_DAYS, 24*3600-1, 1000000-1, 0)); + + /* date values */ + d = _PyType_GetDict(&PyDateTime_DateType); + DATETIME_ADD_MACRO(d, "min", new_date(1, 1, 1)); + DATETIME_ADD_MACRO(d, "max", new_date(MAXYEAR, 12, 31)); + DATETIME_ADD_MACRO(d, "resolution", new_delta(1, 0, 0, 0)); + + /* time values */ + d = _PyType_GetDict(&PyDateTime_TimeType); + DATETIME_ADD_MACRO(d, "min", new_time(0, 0, 0, 0, Py_None, 0)); + DATETIME_ADD_MACRO(d, "max", new_time(23, 59, 59, 999999, Py_None, 0)); + DATETIME_ADD_MACRO(d, "resolution", new_delta(0, 0, 1, 0)); + + /* datetime values */ + d = _PyType_GetDict(&PyDateTime_DateTimeType); + DATETIME_ADD_MACRO(d, "min", + new_datetime(1, 1, 1, 0, 0, 0, 0, Py_None, 0)); + DATETIME_ADD_MACRO(d, "max", new_datetime(MAXYEAR, 12, 31, 23, 59, 59, + 999999, Py_None, 0)); + DATETIME_ADD_MACRO(d, "resolution", new_delta(0, 0, 1, 0)); + + /* timezone values */ + d = _PyType_GetDict(&PyDateTime_TimeZoneType); + if (PyDict_SetItemString(d, "utc", (PyObject *)&utc_timezone) < 0) { + goto error; + } - /* bpo-37642: These attributes are rounded to the nearest minute for backwards - * compatibility, even though the constructor will accept a wider range of - * values. This may change in the future.*/ + /* bpo-37642: These attributes are rounded to the nearest minute for backwards + * compatibility, even though the constructor will accept a wider range of + * values. This may change in the future.*/ - /* -23:59 */ - DATETIME_ADD_MACRO(d, "min", create_timezone_from_delta(-1, 60, 0, 1)); + /* -23:59 */ + DATETIME_ADD_MACRO(d, "min", create_timezone_from_delta(-1, 60, 0, 1)); - /* +23:59 */ - DATETIME_ADD_MACRO( - d, "max", create_timezone_from_delta(0, (23 * 60 + 59) * 60, 0, 0)); + /* +23:59 */ + DATETIME_ADD_MACRO( + d, "max", create_timezone_from_delta(0, (23 * 60 + 59) * 60, 0, 0)); + } #undef DATETIME_ADD_MACRO diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index 2daa24c823a..29ae5f402a0 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -76,8 +76,9 @@ typedef struct { #ifndef WITH_DECIMAL_CONTEXTVAR /* Key for thread state dictionary */ PyObject *tls_context_key; - /* Invariant: NULL or the most recently accessed thread local context */ - struct PyDecContextObject *cached_context; + /* Invariant: NULL or a strong reference to the most recently accessed + thread local context. */ + struct PyDecContextObject *cached_context; /* Not borrowed */ #else PyObject *current_context_var; #endif @@ -1390,6 +1391,10 @@ context_new(PyTypeObject *type, PyObject *args UNUSED, PyObject *kwds UNUSED) CtxCaps(self) = 1; self->tstate = NULL; + if (type == state->PyDecContext_Type) { + PyObject_GC_Track(self); + } + assert(PyObject_GC_IsTracked((PyObject *)self)); return (PyObject *)self; } @@ -1415,12 +1420,6 @@ context_dealloc(PyDecContextObject *self) { PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); -#ifndef WITH_DECIMAL_CONTEXTVAR - decimal_state *state = get_module_state_by_def(Py_TYPE(self)); - if (self == state->cached_context) { - state->cached_context = NULL; - } -#endif (void)context_clear(self); tp->tp_free(self); Py_DECREF(tp); @@ -1519,7 +1518,7 @@ init_extended_context(PyObject *v) #ifdef EXTRA_FUNCTIONALITY /* Factory function for creating IEEE interchange format contexts */ static PyObject * -ieee_context(PyObject *dummy UNUSED, PyObject *v) +ieee_context(PyObject *module, PyObject *v) { PyObject *context; mpd_ssize_t bits; @@ -1536,7 +1535,7 @@ ieee_context(PyObject *dummy UNUSED, PyObject *v) goto error; } - decimal_state *state = get_module_state_by_def(Py_TYPE(v)); + decimal_state *state = get_module_state(module); context = PyObject_CallObject((PyObject *)state->PyDecContext_Type, NULL); if (context == NULL) { return NULL; @@ -1697,7 +1696,8 @@ current_context_from_dict(decimal_state *modstate) /* Cache the context of the current thread, assuming that it * will be accessed several times before a thread switch. */ - modstate->cached_context = (PyDecContextObject *)tl_context; + Py_XSETREF(modstate->cached_context, + (PyDecContextObject *)Py_NewRef(tl_context)); modstate->cached_context->tstate = tstate; /* Borrowed reference with refcount==1 */ @@ -1765,7 +1765,7 @@ PyDec_SetCurrentContext(PyObject *self, PyObject *v) Py_INCREF(v); } - state->cached_context = NULL; + Py_CLEAR(state->cached_context); if (PyDict_SetItem(dict, state->tls_context_key, v) < 0) { Py_DECREF(v); return NULL; @@ -2038,6 +2038,10 @@ PyDecType_New(PyTypeObject *type) MPD(dec)->alloc = _Py_DEC_MINALLOC; MPD(dec)->data = dec->data; + if (type == state->PyDec_Type) { + PyObject_GC_Track(dec); + } + assert(PyObject_GC_IsTracked((PyObject *)dec)); return (PyObject *)dec; } #define dec_alloc(st) PyDecType_New((st)->PyDec_Type) @@ -6114,6 +6118,16 @@ decimal_traverse(PyObject *module, visitproc visit, void *arg) Py_VISIT(state->Rational); Py_VISIT(state->SignalTuple); + if (state->signal_map != NULL) { + for (DecCondMap *cm = state->signal_map; cm->name != NULL; cm++) { + Py_VISIT(cm->ex); + } + } + if (state->cond_map != NULL) { + for (DecCondMap *cm = state->cond_map + 1; cm->name != NULL; cm++) { + Py_VISIT(cm->ex); + } + } return 0; } @@ -6143,8 +6157,22 @@ decimal_clear(PyObject *module) Py_CLEAR(state->SignalTuple); Py_CLEAR(state->PyDecimal); - PyMem_Free(state->signal_map); - PyMem_Free(state->cond_map); + if (state->signal_map != NULL) { + for (DecCondMap *cm = state->signal_map; cm->name != NULL; cm++) { + Py_DECREF(cm->ex); + } + PyMem_Free(state->signal_map); + state->signal_map = NULL; + } + + if (state->cond_map != NULL) { + // cond_map[0].ex has borrowed a reference from signal_map[0].ex + for (DecCondMap *cm = state->cond_map + 1; cm->name != NULL; cm++) { + Py_DECREF(cm->ex); + } + PyMem_Free(state->cond_map); + state->cond_map = NULL; + } return 0; } diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c index 3818e20b4f0..ec999582d2f 100644 --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -1213,12 +1213,8 @@ _elementtree_Element_extend_impl(ElementObject *self, PyTypeObject *cls, PyObject* seq; Py_ssize_t i; - seq = PySequence_Fast(elements, ""); + seq = PySequence_Fast(elements, "'elements' must be an iterable"); if (!seq) { - PyErr_Format( - PyExc_TypeError, - "expected sequence, not \"%.200s\"", Py_TYPE(elements)->tp_name - ); return NULL; } @@ -1918,12 +1914,8 @@ element_ass_subscr(PyObject* self_, PyObject* item, PyObject* value) } /* A new slice is actually being assigned */ - seq = PySequence_Fast(value, ""); + seq = PySequence_Fast(value, "assignment expects an iterable"); if (!seq) { - PyErr_Format( - PyExc_TypeError, - "expected sequence, not \"%.200s\"", Py_TYPE(value)->tp_name - ); return -1; } newlen = PySequence_Fast_GET_SIZE(seq); diff --git a/Modules/_interpchannelsmodule.c b/Modules/_interpchannelsmodule.c index ff8dacf5bd1..a8b4a8d76b0 100644 --- a/Modules/_interpchannelsmodule.c +++ b/Modules/_interpchannelsmodule.c @@ -18,7 +18,9 @@ #endif #define REGISTERS_HEAP_TYPES +#define HAS_UNBOUND_ITEMS #include "_interpreters_common.h" +#undef HAS_UNBOUND_ITEMS #undef REGISTERS_HEAP_TYPES @@ -511,8 +513,14 @@ _waiting_finish_releasing(_waiting_t *waiting) struct _channelitem; typedef struct _channelitem { + /* The interpreter that added the item to the queue. + The actual bound interpid is found in item->data. + This is necessary because item->data might be NULL, + meaning the interpreter has been destroyed. */ + int64_t interpid; _PyCrossInterpreterData *data; _waiting_t *waiting; + int unboundop; struct _channelitem *next; } _channelitem; @@ -524,11 +532,22 @@ _channelitem_ID(_channelitem *item) static void _channelitem_init(_channelitem *item, - _PyCrossInterpreterData *data, _waiting_t *waiting) + int64_t interpid, _PyCrossInterpreterData *data, + _waiting_t *waiting, int unboundop) { + if (interpid < 0) { + interpid = _get_interpid(data); + } + else { + assert(data == NULL + || _PyCrossInterpreterData_INTERPID(data) < 0 + || interpid == _PyCrossInterpreterData_INTERPID(data)); + } *item = (_channelitem){ + .interpid = interpid, .data = data, .waiting = waiting, + .unboundop = unboundop, }; if (waiting != NULL) { waiting->itemid = _channelitem_ID(item); @@ -536,17 +555,15 @@ _channelitem_init(_channelitem *item, } static void -_channelitem_clear(_channelitem *item) +_channelitem_clear_data(_channelitem *item, int removed) { - item->next = NULL; - if (item->data != NULL) { // It was allocated in channel_send(). (void)_release_xid_data(item->data, XID_IGNORE_EXC & XID_FREE); item->data = NULL; } - if (item->waiting != NULL) { + if (item->waiting != NULL && removed) { if (item->waiting->status == WAITING_ACQUIRED) { _waiting_release(item->waiting, 0); } @@ -554,15 +571,23 @@ _channelitem_clear(_channelitem *item) } } +static void +_channelitem_clear(_channelitem *item) +{ + item->next = NULL; + _channelitem_clear_data(item, 1); +} + static _channelitem * -_channelitem_new(_PyCrossInterpreterData *data, _waiting_t *waiting) +_channelitem_new(int64_t interpid, _PyCrossInterpreterData *data, + _waiting_t *waiting, int unboundop) { _channelitem *item = GLOBAL_MALLOC(_channelitem); if (item == NULL) { PyErr_NoMemory(); return NULL; } - _channelitem_init(item, data, waiting); + _channelitem_init(item, interpid, data, waiting, unboundop); return item; } @@ -585,17 +610,48 @@ _channelitem_free_all(_channelitem *item) static void _channelitem_popped(_channelitem *item, - _PyCrossInterpreterData **p_data, _waiting_t **p_waiting) + _PyCrossInterpreterData **p_data, _waiting_t **p_waiting, + int *p_unboundop) { assert(item->waiting == NULL || item->waiting->status == WAITING_ACQUIRED); *p_data = item->data; *p_waiting = item->waiting; + *p_unboundop = item->unboundop; // We clear them here, so they won't be released in _channelitem_clear(). item->data = NULL; item->waiting = NULL; _channelitem_free(item); } +static int +_channelitem_clear_interpreter(_channelitem *item) +{ + assert(item->interpid >= 0); + if (item->data == NULL) { + // Its interpreter was already cleared (or it was never bound). + // For UNBOUND_REMOVE it should have been freed at that time. + assert(item->unboundop != UNBOUND_REMOVE); + return 0; + } + assert(_PyCrossInterpreterData_INTERPID(item->data) == item->interpid); + + switch (item->unboundop) { + case UNBOUND_REMOVE: + // The caller must free/clear it. + return 1; + case UNBOUND_ERROR: + case UNBOUND_REPLACE: + // We won't need the cross-interpreter data later + // so we completely throw it away. + _channelitem_clear_data(item, 0); + return 0; + default: + Py_FatalError("not reachable"); + return -1; + } +} + + typedef struct _channelqueue { int64_t count; _channelitem *first; @@ -634,9 +690,10 @@ _channelqueue_free(_channelqueue *queue) static int _channelqueue_put(_channelqueue *queue, - _PyCrossInterpreterData *data, _waiting_t *waiting) + int64_t interpid, _PyCrossInterpreterData *data, + _waiting_t *waiting, int unboundop) { - _channelitem *item = _channelitem_new(data, waiting); + _channelitem *item = _channelitem_new(interpid, data, waiting, unboundop); if (item == NULL) { return -1; } @@ -659,7 +716,8 @@ _channelqueue_put(_channelqueue *queue, static int _channelqueue_get(_channelqueue *queue, - _PyCrossInterpreterData **p_data, _waiting_t **p_waiting) + _PyCrossInterpreterData **p_data, _waiting_t **p_waiting, + int *p_unboundop) { _channelitem *item = queue->first; if (item == NULL) { @@ -671,7 +729,7 @@ _channelqueue_get(_channelqueue *queue, } queue->count -= 1; - _channelitem_popped(item, p_data, p_waiting); + _channelitem_popped(item, p_data, p_waiting, p_unboundop); return 0; } @@ -737,7 +795,8 @@ _channelqueue_remove(_channelqueue *queue, _channelitem_id_t itemid, } queue->count -= 1; - _channelitem_popped(item, p_data, p_waiting); + int unboundop; + _channelitem_popped(item, p_data, p_waiting, &unboundop); } static void @@ -748,14 +807,17 @@ _channelqueue_clear_interpreter(_channelqueue *queue, int64_t interpid) while (next != NULL) { _channelitem *item = next; next = item->next; - if (_PyCrossInterpreterData_INTERPID(item->data) == interpid) { + int remove = (item->interpid == interpid) + ? _channelitem_clear_interpreter(item) + : 0; + if (remove) { + _channelitem_free(item); if (prev == NULL) { - queue->first = item->next; + queue->first = next; } else { - prev->next = item->next; + prev->next = next; } - _channelitem_free(item); queue->count -= 1; } else { @@ -1018,12 +1080,15 @@ typedef struct _channel { PyThread_type_lock mutex; _channelqueue *queue; _channelends *ends; + struct { + int unboundop; + } defaults; int open; struct _channel_closing *closing; } _channel_state; static _channel_state * -_channel_new(PyThread_type_lock mutex) +_channel_new(PyThread_type_lock mutex, int unboundop) { _channel_state *chan = GLOBAL_MALLOC(_channel_state); if (chan == NULL) { @@ -1041,6 +1106,7 @@ _channel_new(PyThread_type_lock mutex) GLOBAL_FREE(chan); return NULL; } + chan->defaults.unboundop = unboundop; chan->open = 1; chan->closing = NULL; return chan; @@ -1061,7 +1127,8 @@ _channel_free(_channel_state *chan) static int _channel_add(_channel_state *chan, int64_t interpid, - _PyCrossInterpreterData *data, _waiting_t *waiting) + _PyCrossInterpreterData *data, _waiting_t *waiting, + int unboundop) { int res = -1; PyThread_acquire_lock(chan->mutex, WAIT_LOCK); @@ -1075,7 +1142,7 @@ _channel_add(_channel_state *chan, int64_t interpid, goto done; } - if (_channelqueue_put(chan->queue, data, waiting) != 0) { + if (_channelqueue_put(chan->queue, interpid, data, waiting, unboundop) != 0) { goto done; } // Any errors past this point must cause a _waiting_release() call. @@ -1088,7 +1155,8 @@ _channel_add(_channel_state *chan, int64_t interpid, static int _channel_next(_channel_state *chan, int64_t interpid, - _PyCrossInterpreterData **p_data, _waiting_t **p_waiting) + _PyCrossInterpreterData **p_data, _waiting_t **p_waiting, + int *p_unboundop) { int err = 0; PyThread_acquire_lock(chan->mutex, WAIT_LOCK); @@ -1102,11 +1170,15 @@ _channel_next(_channel_state *chan, int64_t interpid, goto done; } - int empty = _channelqueue_get(chan->queue, p_data, p_waiting); - assert(empty == 0 || empty == ERR_CHANNEL_EMPTY); + int empty = _channelqueue_get(chan->queue, p_data, p_waiting, p_unboundop); assert(!PyErr_Occurred()); - if (empty && chan->closing != NULL) { - chan->open = 0; + if (empty) { + assert(empty == ERR_CHANNEL_EMPTY); + if (chan->closing != NULL) { + chan->open = 0; + } + err = ERR_CHANNEL_EMPTY; + goto done; } done: @@ -1528,18 +1600,27 @@ _channels_release_cid_object(_channels *channels, int64_t cid) PyThread_release_lock(channels->mutex); } -static int64_t * +struct channel_id_and_info { + int64_t id; + int unboundop; +}; + +static struct channel_id_and_info * _channels_list_all(_channels *channels, int64_t *count) { - int64_t *cids = NULL; + struct channel_id_and_info *cids = NULL; PyThread_acquire_lock(channels->mutex, WAIT_LOCK); - int64_t *ids = PyMem_NEW(int64_t, (Py_ssize_t)(channels->numopen)); + struct channel_id_and_info *ids = + PyMem_NEW(struct channel_id_and_info, (Py_ssize_t)(channels->numopen)); if (ids == NULL) { goto done; } _channelref *ref = channels->head; for (int64_t i=0; ref != NULL; ref = ref->next, i++) { - ids[i] = ref->cid; + ids[i] = (struct channel_id_and_info){ + .id = ref->cid, + .unboundop = ref->chan->defaults.unboundop, + }; } *count = channels->numopen; @@ -1624,13 +1705,13 @@ _channel_finish_closing(_channel_state *chan) { // Create a new channel. static int64_t -channel_create(_channels *channels) +channel_create(_channels *channels, int unboundop) { PyThread_type_lock mutex = PyThread_allocate_lock(); if (mutex == NULL) { return ERR_CHANNEL_MUTEX_INIT; } - _channel_state *chan = _channel_new(mutex); + _channel_state *chan = _channel_new(mutex, unboundop); if (chan == NULL) { PyThread_free_lock(mutex); return -1; @@ -1662,7 +1743,7 @@ channel_destroy(_channels *channels, int64_t cid) // Optionally request to be notified when it is received. static int channel_send(_channels *channels, int64_t cid, PyObject *obj, - _waiting_t *waiting) + _waiting_t *waiting, int unboundop) { PyInterpreterState *interp = _get_current_interp(); if (interp == NULL) { @@ -1698,7 +1779,7 @@ channel_send(_channels *channels, int64_t cid, PyObject *obj, } // Add the data to the channel. - int res = _channel_add(chan, interpid, data, waiting); + int res = _channel_add(chan, interpid, data, waiting, unboundop); PyThread_release_lock(mutex); if (res != 0) { // We may chain an exception here: @@ -1735,7 +1816,7 @@ channel_clear_sent(_channels *channels, int64_t cid, _waiting_t *waiting) // Like channel_send(), but strictly wait for the object to be received. static int channel_send_wait(_channels *channels, int64_t cid, PyObject *obj, - PY_TIMEOUT_T timeout) + int unboundop, PY_TIMEOUT_T timeout) { // We use a stack variable here, so we must ensure that &waiting // is not held by any channel item at the point this function exits. @@ -1746,7 +1827,7 @@ channel_send_wait(_channels *channels, int64_t cid, PyObject *obj, } /* Queue up the object. */ - int res = channel_send(channels, cid, obj, &waiting); + int res = channel_send(channels, cid, obj, &waiting, unboundop); if (res < 0) { assert(waiting.status == WAITING_NO_STATUS); goto finally; @@ -1788,7 +1869,7 @@ channel_send_wait(_channels *channels, int64_t cid, PyObject *obj, // The current interpreter gets associated with the recv end of the channel. // XXX Support a "wait" mutex? static int -channel_recv(_channels *channels, int64_t cid, PyObject **res) +channel_recv(_channels *channels, int64_t cid, PyObject **res, int *p_unboundop) { int err; *res = NULL; @@ -1816,13 +1897,15 @@ channel_recv(_channels *channels, int64_t cid, PyObject **res) // Pop off the next item from the channel. _PyCrossInterpreterData *data = NULL; _waiting_t *waiting = NULL; - err = _channel_next(chan, interpid, &data, &waiting); + err = _channel_next(chan, interpid, &data, &waiting, p_unboundop); PyThread_release_lock(mutex); if (err != 0) { return err; } else if (data == NULL) { + // The item was unbound. assert(!PyErr_Occurred()); + *res = NULL; return 0; } @@ -1915,6 +1998,23 @@ channel_is_associated(_channels *channels, int64_t cid, int64_t interpid, return (end != NULL && end->open); } +static int +_channel_get_count(_channels *channels, int64_t cid, Py_ssize_t *p_count) +{ + PyThread_type_lock mutex = NULL; + _channel_state *chan = NULL; + int err = _channels_lookup(channels, cid, &mutex, &chan); + if (err != 0) { + return err; + } + assert(chan != NULL); + int64_t count = chan->queue->count; + PyThread_release_lock(mutex); + + *p_count = (Py_ssize_t)count; + return 0; +} + /* channel info */ @@ -2615,10 +2715,10 @@ _get_current_channelend_type(int end) } if (cls == NULL) { // Force the module to be loaded, to register the type. - PyObject *highlevel = PyImport_ImportModule("interpreters.channel"); + PyObject *highlevel = PyImport_ImportModule("interpreters.channels"); if (highlevel == NULL) { PyErr_Clear(); - highlevel = PyImport_ImportModule("test.support.interpreters.channel"); + highlevel = PyImport_ImportModule("test.support.interpreters.channels"); if (highlevel == NULL) { return NULL; } @@ -2767,9 +2867,22 @@ clear_interpreter(void *data) static PyObject * -channelsmod_create(PyObject *self, PyObject *Py_UNUSED(ignored)) +channelsmod_create(PyObject *self, PyObject *args, PyObject *kwds) { - int64_t cid = channel_create(&_globals.channels); + static char *kwlist[] = {"unboundop", NULL}; + int unboundop; + if (!PyArg_ParseTupleAndKeywords(args, kwds, "i:create", kwlist, + &unboundop)) + { + return NULL; + } + if (!check_unbound(unboundop)) { + PyErr_Format(PyExc_ValueError, + "unsupported unboundop %d", unboundop); + return NULL; + } + + int64_t cid = channel_create(&_globals.channels, unboundop); if (cid < 0) { (void)handle_channel_error(-1, self, cid); return NULL; @@ -2796,7 +2909,7 @@ channelsmod_create(PyObject *self, PyObject *Py_UNUSED(ignored)) } PyDoc_STRVAR(channelsmod_create_doc, -"channel_create() -> cid\n\ +"channel_create(unboundop) -> cid\n\ \n\ Create a new cross-interpreter channel and return a unique generated ID."); @@ -2831,7 +2944,8 @@ static PyObject * channelsmod_list_all(PyObject *self, PyObject *Py_UNUSED(ignored)) { int64_t count = 0; - int64_t *cids = _channels_list_all(&_globals.channels, &count); + struct channel_id_and_info *cids = + _channels_list_all(&_globals.channels, &count); if (cids == NULL) { if (count == 0) { return PyList_New(0); @@ -2848,19 +2962,26 @@ channelsmod_list_all(PyObject *self, PyObject *Py_UNUSED(ignored)) ids = NULL; goto finally; } - int64_t *cur = cids; + struct channel_id_and_info *cur = cids; for (int64_t i=0; i < count; cur++, i++) { PyObject *cidobj = NULL; - int err = newchannelid(state->ChannelIDType, *cur, 0, + int err = newchannelid(state->ChannelIDType, cur->id, 0, &_globals.channels, 0, 0, (channelid **)&cidobj); - if (handle_channel_error(err, self, *cur)) { + if (handle_channel_error(err, self, cur->id)) { assert(cidobj == NULL); Py_SETREF(ids, NULL); break; } assert(cidobj != NULL); - PyList_SET_ITEM(ids, (Py_ssize_t)i, cidobj); + + PyObject *item = Py_BuildValue("Oi", cidobj, cur->unboundop); + Py_DECREF(cidobj); + if (item == NULL) { + Py_SETREF(ids, NULL); + break; + } + PyList_SET_ITEM(ids, (Py_ssize_t)i, item); } finally: @@ -2942,16 +3063,24 @@ receive end."); static PyObject * channelsmod_send(PyObject *self, PyObject *args, PyObject *kwds) { - static char *kwlist[] = {"cid", "obj", "blocking", "timeout", NULL}; + static char *kwlist[] = {"cid", "obj", "unboundop", "blocking", "timeout", + NULL}; struct channel_id_converter_data cid_data = { .module = self, }; PyObject *obj; + int unboundop = UNBOUND_REPLACE; int blocking = 1; PyObject *timeout_obj = NULL; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&O|$pO:channel_send", kwlist, + if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&O|i$pO:channel_send", kwlist, channel_id_converter, &cid_data, &obj, - &blocking, &timeout_obj)) { + &unboundop, &blocking, &timeout_obj)) + { + return NULL; + } + if (!check_unbound(unboundop)) { + PyErr_Format(PyExc_ValueError, + "unsupported unboundop %d", unboundop); return NULL; } @@ -2964,10 +3093,10 @@ channelsmod_send(PyObject *self, PyObject *args, PyObject *kwds) /* Queue up the object. */ int err = 0; if (blocking) { - err = channel_send_wait(&_globals.channels, cid, obj, timeout); + err = channel_send_wait(&_globals.channels, cid, obj, unboundop, timeout); } else { - err = channel_send(&_globals.channels, cid, obj, NULL); + err = channel_send(&_globals.channels, cid, obj, NULL, unboundop); } if (handle_channel_error(err, self, cid)) { return NULL; @@ -2977,7 +3106,7 @@ channelsmod_send(PyObject *self, PyObject *args, PyObject *kwds) } PyDoc_STRVAR(channelsmod_send_doc, -"channel_send(cid, obj, blocking=True)\n\ +"channel_send(cid, obj, *, blocking=True, timeout=None)\n\ \n\ Add the object's data to the channel's queue.\n\ By default this waits for the object to be received."); @@ -2985,17 +3114,24 @@ By default this waits for the object to be received."); static PyObject * channelsmod_send_buffer(PyObject *self, PyObject *args, PyObject *kwds) { - static char *kwlist[] = {"cid", "obj", "blocking", "timeout", NULL}; + static char *kwlist[] = {"cid", "obj", "unboundop", "blocking", "timeout", + NULL}; struct channel_id_converter_data cid_data = { .module = self, }; PyObject *obj; + int unboundop = UNBOUND_REPLACE; int blocking = 1; PyObject *timeout_obj = NULL; if (!PyArg_ParseTupleAndKeywords(args, kwds, - "O&O|$pO:channel_send_buffer", kwlist, + "O&O|i$pO:channel_send_buffer", kwlist, channel_id_converter, &cid_data, &obj, - &blocking, &timeout_obj)) { + &unboundop, &blocking, &timeout_obj)) { + return NULL; + } + if (!check_unbound(unboundop)) { + PyErr_Format(PyExc_ValueError, + "unsupported unboundop %d", unboundop); return NULL; } @@ -3013,10 +3149,11 @@ channelsmod_send_buffer(PyObject *self, PyObject *args, PyObject *kwds) /* Queue up the object. */ int err = 0; if (blocking) { - err = channel_send_wait(&_globals.channels, cid, tempobj, timeout); + err = channel_send_wait( + &_globals.channels, cid, tempobj, unboundop, timeout); } else { - err = channel_send(&_globals.channels, cid, tempobj, NULL); + err = channel_send(&_globals.channels, cid, tempobj, NULL, unboundop); } Py_DECREF(tempobj); if (handle_channel_error(err, self, cid)) { @@ -3027,7 +3164,7 @@ channelsmod_send_buffer(PyObject *self, PyObject *args, PyObject *kwds) } PyDoc_STRVAR(channelsmod_send_buffer_doc, -"channel_send_buffer(cid, obj, blocking=True)\n\ +"channel_send_buffer(cid, obj, *, blocking=True, timeout=None)\n\ \n\ Add the object's buffer to the channel's queue.\n\ By default this waits for the object to be received."); @@ -3048,25 +3185,28 @@ channelsmod_recv(PyObject *self, PyObject *args, PyObject *kwds) cid = cid_data.cid; PyObject *obj = NULL; - int err = channel_recv(&_globals.channels, cid, &obj); - if (handle_channel_error(err, self, cid)) { - return NULL; - } - Py_XINCREF(dflt); - if (obj == NULL) { + int unboundop = 0; + int err = channel_recv(&_globals.channels, cid, &obj, &unboundop); + if (err == ERR_CHANNEL_EMPTY && dflt != NULL) { // Use the default. - if (dflt == NULL) { - (void)handle_channel_error(ERR_CHANNEL_EMPTY, self, cid); - return NULL; - } obj = Py_NewRef(dflt); + err = 0; } - Py_XDECREF(dflt); - return obj; + else if (handle_channel_error(err, self, cid)) { + return NULL; + } + else if (obj == NULL) { + // The item was unbound. + return Py_BuildValue("Oi", Py_None, unboundop); + } + + PyObject *res = Py_BuildValue("OO", obj, Py_None); + Py_DECREF(obj); + return res; } PyDoc_STRVAR(channelsmod_recv_doc, -"channel_recv(cid, [default]) -> obj\n\ +"channel_recv(cid, [default]) -> (obj, unboundop)\n\ \n\ Return a new object from the data at the front of the channel's queue.\n\ \n\ @@ -3167,6 +3307,34 @@ Close the channel for the current interpreter. 'send' and 'recv'\n\ (bool) may be used to indicate the ends to close. By default both\n\ ends are closed. Closing an already closed end is a noop."); +static PyObject * +channelsmod_get_count(PyObject *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"cid", NULL}; + struct channel_id_converter_data cid_data = { + .module = self, + }; + if (!PyArg_ParseTupleAndKeywords(args, kwds, + "O&:get_count", kwlist, + channel_id_converter, &cid_data)) { + return NULL; + } + int64_t cid = cid_data.cid; + + Py_ssize_t count = -1; + int err = _channel_get_count(&_globals.channels, cid, &count); + if (handle_channel_error(err, self, cid)) { + return NULL; + } + assert(count >= 0); + return PyLong_FromSsize_t(count); +} + +PyDoc_STRVAR(channelsmod_get_count_doc, +"get_count(cid)\n\ +\n\ +Return the number of items in the channel."); + static PyObject * channelsmod_get_info(PyObject *self, PyObject *args, PyObject *kwds) { @@ -3194,6 +3362,38 @@ PyDoc_STRVAR(channelsmod_get_info_doc, \n\ Return details about the channel."); +static PyObject * +channelsmod_get_channel_defaults(PyObject *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"cid", NULL}; + struct channel_id_converter_data cid_data = { + .module = self, + }; + if (!PyArg_ParseTupleAndKeywords(args, kwds, + "O&:get_channel_defaults", kwlist, + channel_id_converter, &cid_data)) { + return NULL; + } + int64_t cid = cid_data.cid; + + PyThread_type_lock mutex = NULL; + _channel_state *channel = NULL; + int err = _channels_lookup(&_globals.channels, cid, &mutex, &channel); + if (handle_channel_error(err, self, cid)) { + return NULL; + } + int unboundop = channel->defaults.unboundop; + PyThread_release_lock(mutex); + + PyObject *defaults = Py_BuildValue("i", unboundop); + return defaults; +} + +PyDoc_STRVAR(channelsmod_get_channel_defaults_doc, +"get_channel_defaults(cid)\n\ +\n\ +Return the channel's default values, set when it was created."); + static PyObject * channelsmod__channel_id(PyObject *self, PyObject *args, PyObject *kwds) { @@ -3240,8 +3440,8 @@ channelsmod__register_end_types(PyObject *self, PyObject *args, PyObject *kwds) } static PyMethodDef module_functions[] = { - {"create", channelsmod_create, - METH_NOARGS, channelsmod_create_doc}, + {"create", _PyCFunction_CAST(channelsmod_create), + METH_VARARGS | METH_KEYWORDS, channelsmod_create_doc}, {"destroy", _PyCFunction_CAST(channelsmod_destroy), METH_VARARGS | METH_KEYWORDS, channelsmod_destroy_doc}, {"list_all", channelsmod_list_all, @@ -3258,8 +3458,12 @@ static PyMethodDef module_functions[] = { METH_VARARGS | METH_KEYWORDS, channelsmod_close_doc}, {"release", _PyCFunction_CAST(channelsmod_release), METH_VARARGS | METH_KEYWORDS, channelsmod_release_doc}, + {"get_count", _PyCFunction_CAST(channelsmod_get_count), + METH_VARARGS | METH_KEYWORDS, channelsmod_get_count_doc}, {"get_info", _PyCFunction_CAST(channelsmod_get_info), METH_VARARGS | METH_KEYWORDS, channelsmod_get_info_doc}, + {"get_channel_defaults", _PyCFunction_CAST(channelsmod_get_channel_defaults), + METH_VARARGS | METH_KEYWORDS, channelsmod_get_channel_defaults_doc}, {"_channel_id", _PyCFunction_CAST(channelsmod__channel_id), METH_VARARGS | METH_KEYWORDS, NULL}, {"_register_end_types", _PyCFunction_CAST(channelsmod__register_end_types), diff --git a/Modules/_interpqueuesmodule.c b/Modules/_interpqueuesmodule.c index 556953db6b8..5b2d8a44ec7 100644 --- a/Modules/_interpqueuesmodule.c +++ b/Modules/_interpqueuesmodule.c @@ -9,7 +9,9 @@ #include "pycore_crossinterp.h" // struct _xid #define REGISTERS_HEAP_TYPES +#define HAS_UNBOUND_ITEMS #include "_interpreters_common.h" +#undef HAS_UNBOUND_ITEMS #undef REGISTERS_HEAP_TYPES @@ -58,7 +60,6 @@ _release_xid_data(_PyCrossInterpreterData *data, int flags) return res; } - static PyInterpreterState * _get_current_interp(void) { @@ -394,42 +395,67 @@ handle_queue_error(int err, PyObject *mod, int64_t qid) struct _queueitem; typedef struct _queueitem { + /* The interpreter that added the item to the queue. + The actual bound interpid is found in item->data. + This is necessary because item->data might be NULL, + meaning the interpreter has been destroyed. */ + int64_t interpid; _PyCrossInterpreterData *data; int fmt; + int unboundop; struct _queueitem *next; } _queueitem; static void _queueitem_init(_queueitem *item, - _PyCrossInterpreterData *data, int fmt) + int64_t interpid, _PyCrossInterpreterData *data, + int fmt, int unboundop) { + if (interpid < 0) { + interpid = _get_interpid(data); + } + else { + assert(data == NULL + || _PyCrossInterpreterData_INTERPID(data) < 0 + || interpid == _PyCrossInterpreterData_INTERPID(data)); + } + assert(check_unbound(unboundop)); *item = (_queueitem){ + .interpid = interpid, .data = data, .fmt = fmt, + .unboundop = unboundop, }; } +static void +_queueitem_clear_data(_queueitem *item) +{ + if (item->data == NULL) { + return; + } + // It was allocated in queue_put(). + (void)_release_xid_data(item->data, XID_IGNORE_EXC & XID_FREE); + item->data = NULL; +} + static void _queueitem_clear(_queueitem *item) { item->next = NULL; - - if (item->data != NULL) { - // It was allocated in queue_put(). - (void)_release_xid_data(item->data, XID_IGNORE_EXC & XID_FREE); - item->data = NULL; - } + _queueitem_clear_data(item); } static _queueitem * -_queueitem_new(_PyCrossInterpreterData *data, int fmt) +_queueitem_new(int64_t interpid, _PyCrossInterpreterData *data, + int fmt, int unboundop) { _queueitem *item = GLOBAL_MALLOC(_queueitem); if (item == NULL) { PyErr_NoMemory(); return NULL; } - _queueitem_init(item, data, fmt); + _queueitem_init(item, interpid, data, fmt, unboundop); return item; } @@ -452,15 +478,44 @@ _queueitem_free_all(_queueitem *item) static void _queueitem_popped(_queueitem *item, - _PyCrossInterpreterData **p_data, int *p_fmt) + _PyCrossInterpreterData **p_data, int *p_fmt, int *p_unboundop) { *p_data = item->data; *p_fmt = item->fmt; + *p_unboundop = item->unboundop; // We clear them here, so they won't be released in _queueitem_clear(). item->data = NULL; _queueitem_free(item); } +static int +_queueitem_clear_interpreter(_queueitem *item) +{ + assert(item->interpid >= 0); + if (item->data == NULL) { + // Its interpreter was already cleared (or it was never bound). + // For UNBOUND_REMOVE it should have been freed at that time. + assert(item->unboundop != UNBOUND_REMOVE); + return 0; + } + assert(_PyCrossInterpreterData_INTERPID(item->data) == item->interpid); + + switch (item->unboundop) { + case UNBOUND_REMOVE: + // The caller must free/clear it. + return 1; + case UNBOUND_ERROR: + case UNBOUND_REPLACE: + // We won't need the cross-interpreter data later + // so we completely throw it away. + _queueitem_clear_data(item); + return 0; + default: + Py_FatalError("not reachable"); + return -1; + } +} + /* the queue */ @@ -474,12 +529,16 @@ typedef struct _queue { _queueitem *first; _queueitem *last; } items; - int fmt; + struct { + int fmt; + int unboundop; + } defaults; } _queue; static int -_queue_init(_queue *queue, Py_ssize_t maxsize, int fmt) +_queue_init(_queue *queue, Py_ssize_t maxsize, int fmt, int unboundop) { + assert(check_unbound(unboundop)); PyThread_type_lock mutex = PyThread_allocate_lock(); if (mutex == NULL) { return ERR_QUEUE_ALLOC; @@ -490,7 +549,10 @@ _queue_init(_queue *queue, Py_ssize_t maxsize, int fmt) .items = { .maxsize = maxsize, }, - .fmt = fmt, + .defaults = { + .fmt = fmt, + .unboundop = unboundop, + }, }; return 0; } @@ -571,7 +633,8 @@ _queue_unlock(_queue *queue) } static int -_queue_add(_queue *queue, _PyCrossInterpreterData *data, int fmt) +_queue_add(_queue *queue, int64_t interpid, _PyCrossInterpreterData *data, + int fmt, int unboundop) { int err = _queue_lock(queue); if (err < 0) { @@ -587,7 +650,7 @@ _queue_add(_queue *queue, _PyCrossInterpreterData *data, int fmt) return ERR_QUEUE_FULL; } - _queueitem *item = _queueitem_new(data, fmt); + _queueitem *item = _queueitem_new(interpid, data, fmt, unboundop); if (item == NULL) { _queue_unlock(queue); return -1; @@ -608,7 +671,7 @@ _queue_add(_queue *queue, _PyCrossInterpreterData *data, int fmt) static int _queue_next(_queue *queue, - _PyCrossInterpreterData **p_data, int *p_fmt) + _PyCrossInterpreterData **p_data, int *p_fmt, int *p_unboundop) { int err = _queue_lock(queue); if (err < 0) { @@ -627,7 +690,7 @@ _queue_next(_queue *queue, } queue->items.count -= 1; - _queueitem_popped(item, p_data, p_fmt); + _queueitem_popped(item, p_data, p_fmt, p_unboundop); _queue_unlock(queue); return 0; @@ -692,14 +755,17 @@ _queue_clear_interpreter(_queue *queue, int64_t interpid) while (next != NULL) { _queueitem *item = next; next = item->next; - if (_PyCrossInterpreterData_INTERPID(item->data) == interpid) { + int remove = (item->interpid == interpid) + ? _queueitem_clear_interpreter(item) + : 0; + if (remove) { + _queueitem_free(item); if (prev == NULL) { - queue->items.first = item->next; + queue->items.first = next; } else { - prev->next = item->next; + prev->next = next; } - _queueitem_free(item); queue->items.count -= 1; } else { @@ -966,18 +1032,19 @@ _queues_decref(_queues *queues, int64_t qid) return res; } -struct queue_id_and_fmt { +struct queue_id_and_info { int64_t id; int fmt; + int unboundop; }; -static struct queue_id_and_fmt * -_queues_list_all(_queues *queues, int64_t *count) +static struct queue_id_and_info * +_queues_list_all(_queues *queues, int64_t *p_count) { - struct queue_id_and_fmt *qids = NULL; + struct queue_id_and_info *qids = NULL; PyThread_acquire_lock(queues->mutex, WAIT_LOCK); - struct queue_id_and_fmt *ids = PyMem_NEW(struct queue_id_and_fmt, - (Py_ssize_t)(queues->count)); + struct queue_id_and_info *ids = PyMem_NEW(struct queue_id_and_info, + (Py_ssize_t)(queues->count)); if (ids == NULL) { goto done; } @@ -985,9 +1052,10 @@ _queues_list_all(_queues *queues, int64_t *count) for (int64_t i=0; ref != NULL; ref = ref->next, i++) { ids[i].id = ref->qid; assert(ref->queue != NULL); - ids[i].fmt = ref->queue->fmt; + ids[i].fmt = ref->queue->defaults.fmt; + ids[i].unboundop = ref->queue->defaults.unboundop; } - *count = queues->count; + *p_count = queues->count; qids = ids; done: @@ -1021,13 +1089,13 @@ _queue_free(_queue *queue) // Create a new queue. static int64_t -queue_create(_queues *queues, Py_ssize_t maxsize, int fmt) +queue_create(_queues *queues, Py_ssize_t maxsize, int fmt, int unboundop) { _queue *queue = GLOBAL_MALLOC(_queue); if (queue == NULL) { return ERR_QUEUE_ALLOC; } - int err = _queue_init(queue, maxsize, fmt); + int err = _queue_init(queue, maxsize, fmt, unboundop); if (err < 0) { GLOBAL_FREE(queue); return (int64_t)err; @@ -1056,7 +1124,7 @@ queue_destroy(_queues *queues, int64_t qid) // Push an object onto the queue. static int -queue_put(_queues *queues, int64_t qid, PyObject *obj, int fmt) +queue_put(_queues *queues, int64_t qid, PyObject *obj, int fmt, int unboundop) { // Look up the queue. _queue *queue = NULL; @@ -1077,9 +1145,12 @@ queue_put(_queues *queues, int64_t qid, PyObject *obj, int fmt) GLOBAL_FREE(data); return -1; } + assert(_PyCrossInterpreterData_INTERPID(data) == \ + PyInterpreterState_GetID(PyInterpreterState_Get())); // Add the data to the queue. - int res = _queue_add(queue, data, fmt); + int64_t interpid = -1; // _queueitem_init() will set it. + int res = _queue_add(queue, interpid, data, fmt, unboundop); _queue_unmark_waiter(queue, queues->mutex); if (res != 0) { // We may chain an exception here: @@ -1094,7 +1165,8 @@ queue_put(_queues *queues, int64_t qid, PyObject *obj, int fmt) // Pop the next object off the queue. Fail if empty. // XXX Support a "wait" mutex? static int -queue_get(_queues *queues, int64_t qid, PyObject **res, int *p_fmt) +queue_get(_queues *queues, int64_t qid, + PyObject **res, int *p_fmt, int *p_unboundop) { int err; *res = NULL; @@ -1110,7 +1182,7 @@ queue_get(_queues *queues, int64_t qid, PyObject **res, int *p_fmt) // Pop off the next item from the queue. _PyCrossInterpreterData *data = NULL; - err = _queue_next(queue, &data, p_fmt); + err = _queue_next(queue, &data, p_fmt, p_unboundop); _queue_unmark_waiter(queue, queues->mutex); if (err != 0) { return err; @@ -1397,15 +1469,22 @@ qidarg_converter(PyObject *arg, void *ptr) static PyObject * queuesmod_create(PyObject *self, PyObject *args, PyObject *kwds) { - static char *kwlist[] = {"maxsize", "fmt", NULL}; + static char *kwlist[] = {"maxsize", "fmt", "unboundop", NULL}; Py_ssize_t maxsize; int fmt; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "ni:create", kwlist, - &maxsize, &fmt)) { + int unboundop; + if (!PyArg_ParseTupleAndKeywords(args, kwds, "nii:create", kwlist, + &maxsize, &fmt, &unboundop)) + { + return NULL; + } + if (!check_unbound(unboundop)) { + PyErr_Format(PyExc_ValueError, + "unsupported unboundop %d", unboundop); return NULL; } - int64_t qid = queue_create(&_globals.queues, maxsize, fmt); + int64_t qid = queue_create(&_globals.queues, maxsize, fmt, unboundop); if (qid < 0) { (void)handle_queue_error((int)qid, self, qid); return NULL; @@ -1427,7 +1506,7 @@ queuesmod_create(PyObject *self, PyObject *args, PyObject *kwds) } PyDoc_STRVAR(queuesmod_create_doc, -"create(maxsize, fmt) -> qid\n\ +"create(maxsize, fmt, unboundop) -> qid\n\ \n\ Create a new cross-interpreter queue and return its unique generated ID.\n\ It is a new reference as though bind() had been called on the queue.\n\ @@ -1463,9 +1542,9 @@ static PyObject * queuesmod_list_all(PyObject *self, PyObject *Py_UNUSED(ignored)) { int64_t count = 0; - struct queue_id_and_fmt *qids = _queues_list_all(&_globals.queues, &count); + struct queue_id_and_info *qids = _queues_list_all(&_globals.queues, &count); if (qids == NULL) { - if (count == 0) { + if (!PyErr_Occurred() && count == 0) { return PyList_New(0); } return NULL; @@ -1474,9 +1553,10 @@ queuesmod_list_all(PyObject *self, PyObject *Py_UNUSED(ignored)) if (ids == NULL) { goto finally; } - struct queue_id_and_fmt *cur = qids; + struct queue_id_and_info *cur = qids; for (int64_t i=0; i < count; cur++, i++) { - PyObject *item = Py_BuildValue("Li", cur->id, cur->fmt); + PyObject *item = Py_BuildValue("Lii", cur->id, cur->fmt, + cur->unboundop); if (item == NULL) { Py_SETREF(ids, NULL); break; @@ -1498,18 +1578,26 @@ Each corresponding default format is also included."); static PyObject * queuesmod_put(PyObject *self, PyObject *args, PyObject *kwds) { - static char *kwlist[] = {"qid", "obj", "fmt", NULL}; + static char *kwlist[] = {"qid", "obj", "fmt", "unboundop", NULL}; qidarg_converter_data qidarg; PyObject *obj; int fmt; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&Oi:put", kwlist, - qidarg_converter, &qidarg, &obj, &fmt)) { + int unboundop; + if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&Oii:put", kwlist, + qidarg_converter, &qidarg, &obj, &fmt, + &unboundop)) + { return NULL; } int64_t qid = qidarg.id; + if (!check_unbound(unboundop)) { + PyErr_Format(PyExc_ValueError, + "unsupported unboundop %d", unboundop); + return NULL; + } /* Queue up the object. */ - int err = queue_put(&_globals.queues, qid, obj, fmt); + int err = queue_put(&_globals.queues, qid, obj, fmt, unboundop); // This is the only place that raises QueueFull. if (handle_queue_error(err, self, qid)) { return NULL; @@ -1536,13 +1624,17 @@ queuesmod_get(PyObject *self, PyObject *args, PyObject *kwds) PyObject *obj = NULL; int fmt = 0; - int err = queue_get(&_globals.queues, qid, &obj, &fmt); + int unboundop = 0; + int err = queue_get(&_globals.queues, qid, &obj, &fmt, &unboundop); // This is the only place that raises QueueEmpty. if (handle_queue_error(err, self, qid)) { return NULL; } - PyObject *res = Py_BuildValue("Oi", obj, fmt); + if (obj == NULL) { + return Py_BuildValue("Oii", Py_None, fmt, unboundop); + } + PyObject *res = Py_BuildValue("OiO", obj, fmt, Py_None); Py_DECREF(obj); return res; } @@ -1656,17 +1748,12 @@ queuesmod_get_queue_defaults(PyObject *self, PyObject *args, PyObject *kwds) if (handle_queue_error(err, self, qid)) { return NULL; } - int fmt = queue->fmt; + int fmt = queue->defaults.fmt; + int unboundop = queue->defaults.unboundop; _queue_unmark_waiter(queue, _globals.queues.mutex); - PyObject *fmt_obj = PyLong_FromLong(fmt); - if (fmt_obj == NULL) { - return NULL; - } - // For now queues only have one default. - PyObject *res = PyTuple_Pack(1, fmt_obj); - Py_DECREF(fmt_obj); - return res; + PyObject *defaults = Py_BuildValue("ii", fmt, unboundop); + return defaults; } PyDoc_STRVAR(queuesmod_get_queue_defaults_doc, diff --git a/Modules/_interpreters_common.h b/Modules/_interpreters_common.h index 07120f6ccc7..0d2e0c9efd3 100644 --- a/Modules/_interpreters_common.h +++ b/Modules/_interpreters_common.h @@ -19,3 +19,48 @@ clear_xid_class(PyTypeObject *cls) return _PyCrossInterpreterData_UnregisterClass(cls); } #endif + + +static inline int64_t +_get_interpid(_PyCrossInterpreterData *data) +{ + int64_t interpid; + if (data != NULL) { + interpid = _PyCrossInterpreterData_INTERPID(data); + assert(!PyErr_Occurred()); + } + else { + interpid = PyInterpreterState_GetID(PyInterpreterState_Get()); + } + return interpid; +} + + +/* unbound items ************************************************************/ + +#ifdef HAS_UNBOUND_ITEMS + +#define UNBOUND_REMOVE 1 +#define UNBOUND_ERROR 2 +#define UNBOUND_REPLACE 3 + +// It would also be possible to add UNBOUND_REPLACE where the replacement +// value is user-provided. There would be some limitations there, though. +// Another possibility would be something like UNBOUND_COPY, where the +// object is released but the underlying data is copied (with the "raw" +// allocator) and used when the item is popped off the queue. + +static int +check_unbound(int unboundop) +{ + switch (unboundop) { + case UNBOUND_REMOVE: + case UNBOUND_ERROR: + case UNBOUND_REPLACE: + return 1; + default: + return 0; + } +} + +#endif diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c index aa52711941d..e45323c93a1 100644 --- a/Modules/_io/bufferedio.c +++ b/Modules/_io/bufferedio.c @@ -8,7 +8,6 @@ */ #include "Python.h" -#include "pycore_bytesobject.h" // _PyBytes_Join() #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_object.h" // _PyObject_GC_UNTRACK() #include "pycore_pyerrors.h" // _Py_FatalErrorFormat() diff --git a/Modules/_localemodule.c b/Modules/_localemodule.c index d4923442478..de7395b610e 100644 --- a/Modules/_localemodule.c +++ b/Modules/_localemodule.c @@ -52,7 +52,7 @@ module _locale [clinic start generated code]*/ /*[clinic end generated code: output=da39a3ee5e6b4b0d input=ed98569b726feada]*/ -/* support functions for formatting floating point numbers */ +/* support functions for formatting floating-point numbers */ /* the grouping is terminated by either 0 or CHAR_MAX */ static PyObject* diff --git a/Modules/_lsprof.c b/Modules/_lsprof.c index 5cf9eba243b..8b6906234bd 100644 --- a/Modules/_lsprof.c +++ b/Modules/_lsprof.c @@ -59,6 +59,7 @@ typedef struct { #define POF_ENABLED 0x001 #define POF_SUBCALLS 0x002 #define POF_BUILTINS 0x004 +#define POF_EXT_TIMER 0x008 #define POF_NOMEMORY 0x100 /*[clinic input] @@ -87,7 +88,14 @@ _lsprof_get_state(PyObject *module) static PyTime_t CallExternalTimer(ProfilerObject *pObj) { - PyObject *o = _PyObject_CallNoArgs(pObj->externalTimer); + PyObject *o = NULL; + + // External timer can do arbitrary things so we need a flag to prevent + // horrible things to happen + pObj->flags |= POF_EXT_TIMER; + o = _PyObject_CallNoArgs(pObj->externalTimer); + pObj->flags &= ~POF_EXT_TIMER; + if (o == NULL) { PyErr_WriteUnraisable(pObj->externalTimer); return 0; @@ -777,6 +785,11 @@ Stop collecting profiling information.\n\ static PyObject* profiler_disable(ProfilerObject *self, PyObject* noarg) { + if (self->flags & POF_EXT_TIMER) { + PyErr_SetString(PyExc_RuntimeError, + "cannot disable profiler in external timer"); + return NULL; + } if (self->flags & POF_ENABLED) { PyObject* result = NULL; PyObject* monitoring = _PyImport_GetModuleAttrString("sys", "monitoring"); @@ -830,6 +843,11 @@ Clear all profiling information collected so far.\n\ static PyObject* profiler_clear(ProfilerObject *pObj, PyObject* noarg) { + if (pObj->flags & POF_EXT_TIMER) { + PyErr_SetString(PyExc_RuntimeError, + "cannot clear profiler in external timer"); + return NULL; + } clearEntries(pObj); Py_RETURN_NONE; } @@ -838,6 +856,7 @@ static int profiler_traverse(ProfilerObject *op, visitproc visit, void *arg) { Py_VISIT(Py_TYPE(op)); + Py_VISIT(op->externalTimer); return 0; } diff --git a/Modules/_pickle.c b/Modules/_pickle.c index 21be88a79d8..add07d558ab 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -1817,10 +1817,10 @@ get_dotted_path(PyObject *obj, PyObject *name) if (_PyUnicode_EqualToASCIIString(subpath, "")) { if (obj == NULL) PyErr_Format(PyExc_AttributeError, - "Can't pickle local object %R", name); + "Can't get local object %R", name); else PyErr_Format(PyExc_AttributeError, - "Can't pickle local attribute %R on %R", name, obj); + "Can't get local attribute %R on %R", name, obj); Py_DECREF(dotted_path); return NULL; } @@ -2507,7 +2507,7 @@ save_picklebuffer(PickleState *st, PicklerObject *self, PyObject *obj) { if (self->proto < 5) { PyErr_SetString(st->PicklingError, - "PickleBuffer can only pickled with protocol >= 5"); + "PickleBuffer can only be pickled with protocol >= 5"); return -1; } const Py_buffer* view = PyPickleBuffer_GetBuffer(obj); @@ -3123,6 +3123,7 @@ batch_dict(PickleState *state, PicklerObject *self, PyObject *iter) if (!PyTuple_Check(obj) || PyTuple_Size(obj) != 2) { PyErr_SetString(PyExc_TypeError, "dict items " "iterator must return 2-tuples"); + Py_DECREF(obj); return -1; } i = save(state, self, PyTuple_GET_ITEM(obj, 0), 0); @@ -3592,7 +3593,6 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj, PyObject *module = NULL; PyObject *parent = NULL; PyObject *dotted_path = NULL; - PyObject *lastname = NULL; PyObject *cls; int status = 0; @@ -3633,10 +3633,7 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj, obj, module_name); goto error; } - lastname = Py_NewRef(PyList_GET_ITEM(dotted_path, - PyList_GET_SIZE(dotted_path) - 1)); cls = get_deep_attribute(module, dotted_path, &parent); - Py_CLEAR(dotted_path); if (cls == NULL) { PyErr_Format(st->PicklingError, "Can't pickle %R: attribute lookup %S on %S failed", @@ -3666,34 +3663,24 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj, if (extension_key == NULL) { goto error; } - code_obj = PyDict_GetItemWithError(st->extension_registry, - extension_key); + if (PyDict_GetItemRef(st->extension_registry, extension_key, &code_obj) < 0) { + Py_DECREF(extension_key); + goto error; + } Py_DECREF(extension_key); - /* The object is not registered in the extension registry. - This is the most likely code path. */ if (code_obj == NULL) { - if (PyErr_Occurred()) { - goto error; - } + /* The object is not registered in the extension registry. + This is the most likely code path. */ goto gen_global; } - /* XXX: pickle.py doesn't check neither the type, nor the range - of the value returned by the extension_registry. It should for - consistency. */ - - /* Verify code_obj has the right type and value. */ - if (!PyLong_Check(code_obj)) { - PyErr_Format(st->PicklingError, - "Can't pickle %R: extension code %R isn't an integer", - obj, code_obj); - goto error; - } - code = PyLong_AS_LONG(code_obj); + code = PyLong_AsLong(code_obj); + Py_DECREF(code_obj); if (code <= 0 || code > 0x7fffffffL) { + /* Should never happen in normal circumstances, since the type and + the value of the code are checked in copyreg.add_extension(). */ if (!PyErr_Occurred()) - PyErr_Format(st->PicklingError, "Can't pickle %R: extension " - "code %ld is out of range", obj, code); + PyErr_Format(PyExc_RuntimeError, "extension code %ld is out of range", code); goto error; } @@ -3724,7 +3711,10 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj, else { gen_global: if (parent == module) { - Py_SETREF(global_name, Py_NewRef(lastname)); + Py_SETREF(global_name, + Py_NewRef(PyList_GET_ITEM(dotted_path, + PyList_GET_SIZE(dotted_path) - 1))); + Py_CLEAR(dotted_path); } if (self->proto >= 4) { const char stack_global_op = STACK_GLOBAL; @@ -3737,20 +3727,30 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj, if (_Pickler_Write(self, &stack_global_op, 1) < 0) goto error; } - else if (parent != module) { - PyObject *reduce_value = Py_BuildValue("(O(OO))", - st->getattr, parent, lastname); - if (reduce_value == NULL) - goto error; - status = save_reduce(st, self, reduce_value, NULL); - Py_DECREF(reduce_value); - if (status < 0) - goto error; - } else { /* Generate a normal global opcode if we are using a pickle protocol < 4, or if the object is not registered in the - extension registry. */ + extension registry. + + Objects with multi-part __qualname__ are represented as + getattr(getattr(..., attrname1), attrname2). */ + const char mark_op = MARK; + const char tupletwo_op = (self->proto < 2) ? TUPLE : TUPLE2; + const char reduce_op = REDUCE; + Py_ssize_t i; + if (dotted_path) { + if (PyList_GET_SIZE(dotted_path) > 1) { + Py_SETREF(global_name, Py_NewRef(PyList_GET_ITEM(dotted_path, 0))); + } + for (i = 1; i < PyList_GET_SIZE(dotted_path); i++) { + if (save(st, self, st->getattr, 0) < 0 || + (self->proto < 2 && _Pickler_Write(self, &mark_op, 1) < 0)) + { + goto error; + } + } + } + PyObject *encoded; PyObject *(*unicode_encoder)(PyObject *); @@ -3812,6 +3812,17 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj, Py_DECREF(encoded); if (_Pickler_Write(self, "\n", 1) < 0) goto error; + + if (dotted_path) { + for (i = 1; i < PyList_GET_SIZE(dotted_path); i++) { + if (save(st, self, PyList_GET_ITEM(dotted_path, i), 0) < 0 || + _Pickler_Write(self, &tupletwo_op, 1) < 0 || + _Pickler_Write(self, &reduce_op, 1) < 0) + { + goto error; + } + } + } } /* Memoize the object. */ if (memo_put(st, self, obj) < 0) @@ -3827,7 +3838,6 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj, Py_XDECREF(module); Py_XDECREF(parent); Py_XDECREF(dotted_path); - Py_XDECREF(lastname); return status; } @@ -6524,11 +6534,13 @@ load_additems(PickleState *state, UnpicklerObject *self) if (result == NULL) { Pdata_clear(self->stack, i + 1); Py_SET_SIZE(self->stack, mark); + Py_DECREF(add_func); return -1; } Py_DECREF(result); } Py_SET_SIZE(self->stack, mark); + Py_DECREF(add_func); } return 0; diff --git a/Modules/_struct.c b/Modules/_struct.c index 905dcbdeedd..e76ed111a5c 100644 --- a/Modules/_struct.c +++ b/Modules/_struct.c @@ -281,7 +281,7 @@ get_size_t(_structmodulestate *state, PyObject *v, size_t *p) #define RANGE_ERROR(state, f, flag) return _range_error(state, f, flag) -/* Floating point helpers */ +/* Floating-point helpers */ static PyObject * unpack_halffloat(const char *p, /* start of 2-byte string */ @@ -1669,9 +1669,16 @@ s_unpack_internal(PyStructObject *soself, const char *startfrom, if (e->format == 's') { v = PyBytes_FromStringAndSize(res, code->size); } else if (e->format == 'p') { - Py_ssize_t n = *(unsigned char*)res; - if (n >= code->size) - n = code->size - 1; + Py_ssize_t n; + if (code->size == 0) { + n = 0; + } + else { + n = *(unsigned char*)res; + if (n >= code->size) { + n = code->size - 1; + } + } v = PyBytes_FromStringAndSize(res + 1, n); } else { v = e->unpack(state, res, e); @@ -1982,8 +1989,12 @@ s_pack_internal(PyStructObject *soself, PyObject *const *args, int offset, n = PyByteArray_GET_SIZE(v); p = PyByteArray_AS_STRING(v); } - if (n > (code->size - 1)) + if (code->size == 0) { + n = 0; + } + else if (n > (code->size - 1)) { n = code->size - 1; + } if (n > 0) memcpy(res + 1, p, n); if (n > 255) diff --git a/Modules/_testcapi/numbers.c b/Modules/_testcapi/numbers.c index 6f7fa3fa7a4..e16ff737440 100644 --- a/Modules/_testcapi/numbers.c +++ b/Modules/_testcapi/numbers.c @@ -1,7 +1,168 @@ #include "parts.h" #include "util.h" + +static PyObject * +number_check(PyObject *Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyNumber_Check(obj)); +} + +#define BINARYFUNC(funcsuffix, methsuffix) \ + static PyObject * \ + number_##methsuffix(PyObject *Py_UNUSED(module), PyObject *args) \ + { \ + PyObject *o1, *o2; \ + \ + if (!PyArg_ParseTuple(args, "OO", &o1, &o2)) { \ + return NULL; \ + } \ + \ + NULLABLE(o1); \ + NULLABLE(o2); \ + return PyNumber_##funcsuffix(o1, o2); \ + }; + +BINARYFUNC(Add, add) +BINARYFUNC(Subtract, subtract) +BINARYFUNC(Multiply, multiply) +BINARYFUNC(MatrixMultiply, matrixmultiply) +BINARYFUNC(FloorDivide, floordivide) +BINARYFUNC(TrueDivide, truedivide) +BINARYFUNC(Remainder, remainder) +BINARYFUNC(Divmod, divmod) + +#define TERNARYFUNC(funcsuffix, methsuffix) \ + static PyObject * \ + number_##methsuffix(PyObject *Py_UNUSED(module), PyObject *args) \ + { \ + PyObject *o1, *o2, *o3 = Py_None; \ + \ + if (!PyArg_ParseTuple(args, "OO|O", &o1, &o2, &o3)) { \ + return NULL; \ + } \ + \ + NULLABLE(o1); \ + NULLABLE(o2); \ + return PyNumber_##funcsuffix(o1, o2, o3); \ + }; + +TERNARYFUNC(Power, power) + +#define UNARYFUNC(funcsuffix, methsuffix) \ + static PyObject * \ + number_##methsuffix(PyObject *Py_UNUSED(module), PyObject *obj) \ + { \ + NULLABLE(obj); \ + return PyNumber_##funcsuffix(obj); \ + }; + +UNARYFUNC(Negative, negative) +UNARYFUNC(Positive, positive) +UNARYFUNC(Absolute, absolute) +UNARYFUNC(Invert, invert) + +BINARYFUNC(Lshift, lshift) +BINARYFUNC(Rshift, rshift) +BINARYFUNC(And, and) +BINARYFUNC(Xor, xor) +BINARYFUNC(Or, or) + +BINARYFUNC(InPlaceAdd, inplaceadd) +BINARYFUNC(InPlaceSubtract, inplacesubtract) +BINARYFUNC(InPlaceMultiply, inplacemultiply) +BINARYFUNC(InPlaceMatrixMultiply, inplacematrixmultiply) +BINARYFUNC(InPlaceFloorDivide, inplacefloordivide) +BINARYFUNC(InPlaceTrueDivide, inplacetruedivide) +BINARYFUNC(InPlaceRemainder, inplaceremainder) + +TERNARYFUNC(InPlacePower, inplacepower) + +BINARYFUNC(InPlaceLshift, inplacelshift) +BINARYFUNC(InPlaceRshift, inplacershift) +BINARYFUNC(InPlaceAnd, inplaceand) +BINARYFUNC(InPlaceXor, inplacexor) +BINARYFUNC(InPlaceOr, inplaceor) + +UNARYFUNC(Long, long) +UNARYFUNC(Float, float) +UNARYFUNC(Index, index) + +static PyObject * +number_tobase(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *n; + int base; + + if (!PyArg_ParseTuple(args, "Oi", &n, &base)) { + return NULL; + } + + NULLABLE(n); + return PyNumber_ToBase(n, base); +} + +static PyObject * +number_asssizet(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *o, *exc; + Py_ssize_t ret; + + if (!PyArg_ParseTuple(args, "OO", &o, &exc)) { + return NULL; + } + + NULLABLE(o); + NULLABLE(exc); + ret = PyNumber_AsSsize_t(o, exc); + + if (ret == (Py_ssize_t)(-1) && PyErr_Occurred()) { + return NULL; + } + + return PyLong_FromSsize_t(ret); +} + + static PyMethodDef test_methods[] = { + {"number_check", number_check, METH_O}, + {"number_add", number_add, METH_VARARGS}, + {"number_subtract", number_subtract, METH_VARARGS}, + {"number_multiply", number_multiply, METH_VARARGS}, + {"number_matrixmultiply", number_matrixmultiply, METH_VARARGS}, + {"number_floordivide", number_floordivide, METH_VARARGS}, + {"number_truedivide", number_truedivide, METH_VARARGS}, + {"number_remainder", number_remainder, METH_VARARGS}, + {"number_divmod", number_divmod, METH_VARARGS}, + {"number_power", number_power, METH_VARARGS}, + {"number_negative", number_negative, METH_O}, + {"number_positive", number_positive, METH_O}, + {"number_absolute", number_absolute, METH_O}, + {"number_invert", number_invert, METH_O}, + {"number_lshift", number_lshift, METH_VARARGS}, + {"number_rshift", number_rshift, METH_VARARGS}, + {"number_and", number_and, METH_VARARGS}, + {"number_xor", number_xor, METH_VARARGS}, + {"number_or", number_or, METH_VARARGS}, + {"number_inplaceadd", number_inplaceadd, METH_VARARGS}, + {"number_inplacesubtract", number_inplacesubtract, METH_VARARGS}, + {"number_inplacemultiply", number_inplacemultiply, METH_VARARGS}, + {"number_inplacematrixmultiply", number_inplacematrixmultiply, METH_VARARGS}, + {"number_inplacefloordivide", number_inplacefloordivide, METH_VARARGS}, + {"number_inplacetruedivide", number_inplacetruedivide, METH_VARARGS}, + {"number_inplaceremainder", number_inplaceremainder, METH_VARARGS}, + {"number_inplacepower", number_inplacepower, METH_VARARGS}, + {"number_inplacelshift", number_inplacelshift, METH_VARARGS}, + {"number_inplacershift", number_inplacershift, METH_VARARGS}, + {"number_inplaceand", number_inplaceand, METH_VARARGS}, + {"number_inplacexor", number_inplacexor, METH_VARARGS}, + {"number_inplaceor", number_inplaceor, METH_VARARGS}, + {"number_long", number_long, METH_O}, + {"number_float", number_float, METH_O}, + {"number_index", number_index, METH_O}, + {"number_tobase", number_tobase, METH_VARARGS}, + {"number_asssizet", number_asssizet, METH_VARARGS}, {NULL}, }; diff --git a/Modules/_testcapi/pyatomic.c b/Modules/_testcapi/pyatomic.c index 4f72844535e..850de6f9c33 100644 --- a/Modules/_testcapi/pyatomic.c +++ b/Modules/_testcapi/pyatomic.c @@ -125,6 +125,7 @@ test_atomic_fences(PyObject *self, PyObject *obj) { // Just make sure that the fences compile. We are not // testing any synchronizing ordering. _Py_atomic_fence_seq_cst(); + _Py_atomic_fence_acquire(); _Py_atomic_fence_release(); Py_RETURN_NONE; } diff --git a/Modules/_testcapi/tuple.c b/Modules/_testcapi/tuple.c index 95dde8c0eda..d9c02ba0ff0 100644 --- a/Modules/_testcapi/tuple.c +++ b/Modules/_testcapi/tuple.c @@ -2,14 +2,121 @@ #include "util.h" +static PyObject * +tuple_get_size(PyObject *Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + RETURN_SIZE(PyTuple_GET_SIZE(obj)); +} + +static PyObject * +tuple_get_item(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *obj; + Py_ssize_t i; + if (!PyArg_ParseTuple(args, "On", &obj, &i)) { + return NULL; + } + NULLABLE(obj); + return Py_XNewRef(PyTuple_GET_ITEM(obj, i)); +} + +static PyObject * +tuple_copy(PyObject *tuple) +{ + Py_ssize_t size = PyTuple_GET_SIZE(tuple); + PyObject *newtuple = PyTuple_New(size); + if (!newtuple) { + return NULL; + } + for (Py_ssize_t n = 0; n < size; n++) { + PyTuple_SET_ITEM(newtuple, n, Py_XNewRef(PyTuple_GET_ITEM(tuple, n))); + } + return newtuple; +} + +static PyObject * +tuple_set_item(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *obj, *value, *newtuple; + Py_ssize_t i; + if (!PyArg_ParseTuple(args, "OnO", &obj, &i, &value)) { + return NULL; + } + NULLABLE(value); + if (PyTuple_CheckExact(obj)) { + newtuple = tuple_copy(obj); + if (!newtuple) { + return NULL; + } + + PyObject *val = PyTuple_GET_ITEM(newtuple, i); + PyTuple_SET_ITEM(newtuple, i, Py_XNewRef(value)); + Py_DECREF(val); + return newtuple; + } + else { + NULLABLE(obj); + + PyObject *val = PyTuple_GET_ITEM(obj, i); + PyTuple_SET_ITEM(obj, i, Py_XNewRef(value)); + Py_DECREF(val); + return Py_XNewRef(obj); + } +} + +static PyObject * +_tuple_resize(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *tup; + Py_ssize_t newsize; + int new = 1; + if (!PyArg_ParseTuple(args, "On|p", &tup, &newsize, &new)) { + return NULL; + } + if (new) { + tup = tuple_copy(tup); + if (!tup) { + return NULL; + } + } + else { + NULLABLE(tup); + Py_XINCREF(tup); + } + int r = _PyTuple_Resize(&tup, newsize); + if (r == -1) { + assert(tup == NULL); + return NULL; + } + return tup; +} + +static PyObject * +_check_tuple_item_is_NULL(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *obj; + Py_ssize_t i; + if (!PyArg_ParseTuple(args, "On", &obj, &i)) { + return NULL; + } + return PyLong_FromLong(PyTuple_GET_ITEM(obj, i) == NULL); +} + + static PyMethodDef test_methods[] = { + {"tuple_get_size", tuple_get_size, METH_O}, + {"tuple_get_item", tuple_get_item, METH_VARARGS}, + {"tuple_set_item", tuple_set_item, METH_VARARGS}, + {"_tuple_resize", _tuple_resize, METH_VARARGS}, + {"_check_tuple_item_is_NULL", _check_tuple_item_is_NULL, METH_VARARGS}, {NULL}, }; int _PyTestCapi_Init_Tuple(PyObject *m) { - if (PyModule_AddFunctions(m, test_methods) < 0){ + if (PyModule_AddFunctions(m, test_methods) < 0) { return -1; } diff --git a/Modules/_testcapi/vectorcall.c b/Modules/_testcapi/vectorcall.c index b30c5e8704c..03aaacb328e 100644 --- a/Modules/_testcapi/vectorcall.c +++ b/Modules/_testcapi/vectorcall.c @@ -348,6 +348,9 @@ static PyObject * MethodDescriptor2_new(PyTypeObject* type, PyObject* args, PyObject *kw) { MethodDescriptor2Object *op = PyObject_New(MethodDescriptor2Object, type); + if (op == NULL) { + return NULL; + } op->base.vectorcall = NULL; op->vectorcall = MethodDescriptor_vectorcall; return (PyObject *)op; diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index 1fa7c378412..01b6bd89d13 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -3332,6 +3332,12 @@ test_critical_sections(PyObject *module, PyObject *Py_UNUSED(args)) Py_RETURN_NONE; } +static PyObject * +pyeval_getlocals(PyObject *module, PyObject *Py_UNUSED(args)) +{ + return Py_XNewRef(PyEval_GetLocals()); +} + static PyMethodDef TestMethods[] = { {"set_errno", set_errno, METH_VARARGS}, {"test_config", test_config, METH_NOARGS}, @@ -3476,6 +3482,7 @@ static PyMethodDef TestMethods[] = { {"test_weakref_capi", test_weakref_capi, METH_NOARGS}, {"function_set_warning", function_set_warning, METH_NOARGS}, {"test_critical_sections", test_critical_sections, METH_NOARGS}, + {"pyeval_getlocals", pyeval_getlocals, METH_NOARGS}, {NULL, NULL} /* sentinel */ }; diff --git a/Modules/_testclinic.c b/Modules/_testclinic.c index 4187e13231d..2dae8accf01 100644 --- a/Modules/_testclinic.c +++ b/Modules/_testclinic.c @@ -982,54 +982,56 @@ posonly_vararg_impl(PyObject *module, PyObject *a, PyObject *b, /*[clinic input] -vararg_and_posonly +vararg a: object *args: object - / [clinic start generated code]*/ static PyObject * -vararg_and_posonly_impl(PyObject *module, PyObject *a, PyObject *args) -/*[clinic end generated code: output=42792f799465a14d input=defe017b19ba52e8]*/ +vararg_impl(PyObject *module, PyObject *a, PyObject *args) +/*[clinic end generated code: output=91ab7a0efc52dd5e input=02c0f772d05f591e]*/ { return pack_arguments_newref(2, a, args); } /*[clinic input] -vararg +vararg_with_default a: object *args: object + b: bool = False [clinic start generated code]*/ static PyObject * -vararg_impl(PyObject *module, PyObject *a, PyObject *args) -/*[clinic end generated code: output=91ab7a0efc52dd5e input=02c0f772d05f591e]*/ +vararg_with_default_impl(PyObject *module, PyObject *a, PyObject *args, + int b) +/*[clinic end generated code: output=182c01035958ce92 input=68cafa6a79f89e36]*/ { - return pack_arguments_newref(2, a, args); + PyObject *obj_b = b ? Py_True : Py_False; + return pack_arguments_newref(3, a, args, obj_b); } /*[clinic input] -vararg_with_default +vararg_with_default2 a: object *args: object - b: bool = False + b: object = None + c: object = None [clinic start generated code]*/ static PyObject * -vararg_with_default_impl(PyObject *module, PyObject *a, PyObject *args, - int b) -/*[clinic end generated code: output=182c01035958ce92 input=68cafa6a79f89e36]*/ +vararg_with_default2_impl(PyObject *module, PyObject *a, PyObject *args, + PyObject *b, PyObject *c) +/*[clinic end generated code: output=a0fb7c37796e2129 input=59fb22f5f0a8925f]*/ { - PyObject *obj_b = b ? Py_True : Py_False; - return pack_arguments_newref(3, a, args, obj_b); + return pack_arguments_newref(4, a, args, b, c); } @@ -1049,6 +1051,25 @@ vararg_with_only_defaults_impl(PyObject *module, PyObject *args, PyObject *b) } +/*[clinic input] +vararg_kwonly_req_opt + + *args: object + a: object + b: object = None + c: object = None + +[clinic start generated code]*/ + +static PyObject * +vararg_kwonly_req_opt_impl(PyObject *module, PyObject *args, PyObject *a, + PyObject *b, PyObject *c) +/*[clinic end generated code: output=54694a99c3da370a input=b0d8bf09e540d400]*/ +{ + return pack_arguments_newref(4, args, a, b, c); +} + + /*[clinic input] gh_32092_oob @@ -1096,7 +1117,6 @@ gh_32092_kw_pass_impl(PyObject *module, PyObject *pos, PyObject *args, gh_99233_refcount *args: object - / Proof-of-concept of GH-99233 refcount error bug. @@ -1104,7 +1124,7 @@ Proof-of-concept of GH-99233 refcount error bug. static PyObject * gh_99233_refcount_impl(PyObject *module, PyObject *args) -/*[clinic end generated code: output=585855abfbca9a7f input=85f5fb47ac91a626]*/ +/*[clinic end generated code: output=585855abfbca9a7f input=eecfdc2092d90dc3]*/ { Py_RETURN_NONE; } @@ -1904,10 +1924,11 @@ static PyMethodDef tester_methods[] = { POSONLY_OPT_KEYWORDS_OPT_KWONLY_OPT_METHODDEF KEYWORD_ONLY_PARAMETER_METHODDEF POSONLY_VARARG_METHODDEF - VARARG_AND_POSONLY_METHODDEF VARARG_METHODDEF VARARG_WITH_DEFAULT_METHODDEF + VARARG_WITH_DEFAULT2_METHODDEF VARARG_WITH_ONLY_DEFAULTS_METHODDEF + VARARG_KWONLY_REQ_OPT_METHODDEF GH_32092_OOB_METHODDEF GH_32092_KW_PASS_METHODDEF GH_99233_REFCOUNT_METHODDEF diff --git a/Modules/_testlimitedcapi.c b/Modules/_testlimitedcapi.c index fb5cdb6ca9e..ec19da217d8 100644 --- a/Modules/_testlimitedcapi.c +++ b/Modules/_testlimitedcapi.c @@ -68,6 +68,9 @@ PyInit__testlimitedcapi(void) if (_PyTestLimitedCAPI_Init_Sys(mod) < 0) { return NULL; } + if (_PyTestLimitedCAPI_Init_Tuple(mod) < 0) { + return NULL; + } if (_PyTestLimitedCAPI_Init_Unicode(mod) < 0) { return NULL; } diff --git a/Modules/_testlimitedcapi/parts.h b/Modules/_testlimitedcapi/parts.h index d5e590a8dcd..140396d6b99 100644 --- a/Modules/_testlimitedcapi/parts.h +++ b/Modules/_testlimitedcapi/parts.h @@ -35,6 +35,7 @@ int _PyTestLimitedCAPI_Init_Long(PyObject *module); int _PyTestLimitedCAPI_Init_PyOS(PyObject *module); int _PyTestLimitedCAPI_Init_Set(PyObject *module); int _PyTestLimitedCAPI_Init_Sys(PyObject *module); +int _PyTestLimitedCAPI_Init_Tuple(PyObject *module); int _PyTestLimitedCAPI_Init_Unicode(PyObject *module); int _PyTestLimitedCAPI_Init_VectorcallLimited(PyObject *module); diff --git a/Modules/_testlimitedcapi/tuple.c b/Modules/_testlimitedcapi/tuple.c new file mode 100644 index 00000000000..231ec12d517 --- /dev/null +++ b/Modules/_testlimitedcapi/tuple.c @@ -0,0 +1,136 @@ +#include "parts.h" +#include "util.h" + + +static PyObject * +tuple_check(PyObject* Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyTuple_Check(obj)); +} + +static PyObject * +tuple_checkexact(PyObject* Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyTuple_CheckExact(obj)); +} + +static PyObject * +tuple_new(PyObject* Py_UNUSED(module), PyObject *len) +{ + return PyTuple_New(PyLong_AsSsize_t(len)); +} + +static PyObject * +tuple_pack(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *arg1 = NULL, *arg2 = NULL; + Py_ssize_t size; + + if (!PyArg_ParseTuple(args, "n|OO", &size, &arg1, &arg2)) { + return NULL; + } + if (arg1) { + NULLABLE(arg1); + if (arg2) { + NULLABLE(arg2); + return PyTuple_Pack(size, arg1, arg2); + } + return PyTuple_Pack(size, arg1); + } + return PyTuple_Pack(size); +} + +static PyObject * +tuple_size(PyObject *Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + RETURN_SIZE(PyTuple_Size(obj)); +} + +static PyObject * +tuple_getitem(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *obj; + Py_ssize_t i; + if (!PyArg_ParseTuple(args, "On", &obj, &i)) { + return NULL; + } + NULLABLE(obj); + return Py_XNewRef(PyTuple_GetItem(obj, i)); +} + +static PyObject * +tuple_getslice(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *obj; + Py_ssize_t ilow, ihigh; + if (!PyArg_ParseTuple(args, "Onn", &obj, &ilow, &ihigh)) { + return NULL; + } + NULLABLE(obj); + return PyTuple_GetSlice(obj, ilow, ihigh); +} + +static PyObject * +tuple_setitem(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *obj, *value, *newtuple = NULL; + Py_ssize_t i; + if (!PyArg_ParseTuple(args, "OnO", &obj, &i, &value)) { + return NULL; + } + NULLABLE(value); + if (PyTuple_CheckExact(obj)) { + Py_ssize_t size = PyTuple_Size(obj); + newtuple = PyTuple_New(size); + if (!newtuple) { + return NULL; + } + for (Py_ssize_t n = 0; n < size; n++) { + if (PyTuple_SetItem(newtuple, n, + Py_XNewRef(PyTuple_GetItem(obj, n))) == -1) { + Py_DECREF(newtuple); + return NULL; + } + } + + if (PyTuple_SetItem(newtuple, i, Py_XNewRef(value)) == -1) { + Py_DECREF(newtuple); + return NULL; + } + return newtuple; + } + else { + NULLABLE(obj); + + if (PyTuple_SetItem(obj, i, Py_XNewRef(value)) == -1) { + return NULL; + } + return Py_XNewRef(obj); + } +} + + +static PyMethodDef test_methods[] = { + {"tuple_check", tuple_check, METH_O}, + {"tuple_checkexact", tuple_checkexact, METH_O}, + {"tuple_new", tuple_new, METH_O}, + {"tuple_pack", tuple_pack, METH_VARARGS}, + {"tuple_size", tuple_size, METH_O}, + {"tuple_getitem", tuple_getitem, METH_VARARGS}, + {"tuple_getslice", tuple_getslice, METH_VARARGS}, + {"tuple_setitem", tuple_setitem, METH_VARARGS}, + {NULL}, +}; + +int +_PyTestLimitedCAPI_Init_Tuple(PyObject *m) +{ + if (PyModule_AddFunctions(m, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testsinglephase.c b/Modules/_testsinglephase.c index 066e0dbfb63..2c59085d15b 100644 --- a/Modules/_testsinglephase.c +++ b/Modules/_testsinglephase.c @@ -1,7 +1,7 @@ /* Testing module for single-phase initialization of extension modules -This file contains 8 distinct modules, meaning each as its own name +This file contains several distinct modules, meaning each as its own name and its own init function (PyInit_...). The default import system will only find the one matching the filename: _testsinglephase. To load the others you must do so manually. For example: @@ -12,9 +12,13 @@ filename = _testsinglephase.__file__ loader = importlib.machinery.ExtensionFileLoader(name, filename) spec = importlib.util.spec_from_file_location(name, filename, loader=loader) mod = importlib._bootstrap._load(spec) +loader.exec_module(module) +sys.modules[modname] = module ``` -Here are the 8 modules: +(The last two lines are just for completeness.) + +Here are the modules: * _testsinglephase * def: _testsinglephase_basic, @@ -163,6 +167,11 @@ Here are the 8 modules: * functions: none * import system: same as _testsinglephase_with_state +* _testsinglephase_circular + Regression test for gh-123880. + Does not have the common attributes & methods. + See test_singlephase_circular test.test_import.SinglephaseInitTests. + Module state: * fields @@ -740,3 +749,53 @@ PyInit__testsinglephase_with_state_check_cache_first(void) } return PyModule_Create(&_testsinglephase_with_state_check_cache_first); } + + +/****************************************/ +/* the _testsinglephase_circular module */ +/****************************************/ + +static PyObject *static_module_circular; + +static PyObject * +circularmod_clear_static_var(PyObject *self, PyObject *arg) +{ + PyObject *result = static_module_circular; + static_module_circular = NULL; + return result; +} + +static struct PyModuleDef _testsinglephase_circular = { + PyModuleDef_HEAD_INIT, + .m_name = "_testsinglephase_circular", + .m_doc = PyDoc_STR("Test module _testsinglephase_circular"), + .m_methods = (PyMethodDef[]) { + {"clear_static_var", circularmod_clear_static_var, METH_NOARGS, + "Clear the static variable and return its previous value."}, + {NULL, NULL} /* sentinel */ + } +}; + +PyMODINIT_FUNC +PyInit__testsinglephase_circular(void) +{ + if (!static_module_circular) { + static_module_circular = PyModule_Create(&_testsinglephase_circular); + if (!static_module_circular) { + return NULL; + } + } + static const char helper_mod_name[] = ( + "test.test_import.data.circular_imports.singlephase"); + PyObject *helper_mod = PyImport_ImportModule(helper_mod_name); + Py_XDECREF(helper_mod); + if (!helper_mod) { + return NULL; + } + if(PyModule_AddStringConstant(static_module_circular, + "helper_mod_name", + helper_mod_name) < 0) { + return NULL; + } + return Py_NewRef(static_module_circular); +} diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c index 39d309729d8..d21a37d8866 100644 --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -1350,33 +1350,44 @@ newlockobject(PyObject *module) Our implementation uses small "localdummy" objects in order to break the reference chain. These trivial objects are hashable (using the default scheme of identity hashing) and weakrefable. - Each thread-state holds a separate localdummy for each local object - (as a /strong reference/), - and each thread-local object holds a dict mapping /weak references/ - of localdummies to local dicts. + + Each thread-state holds two separate localdummy objects: + + - `threading_local_key` is used as a key to retrieve the locals dictionary + for the thread in any `threading.local` object. + - `threading_local_sentinel` is used to signal when a thread is being + destroyed. Consequently, the associated thread-state must hold the only + reference. + + Each `threading.local` object contains a dict mapping localdummy keys to + locals dicts and a set containing weak references to localdummy + sentinels. Each sentinel weak reference has a callback that removes itself + and the locals dict for the key from the `threading.local` object when + called. Therefore: - - only the thread-state dict holds a strong reference to the dummies - - only the thread-local object holds a strong reference to the local dicts - - only outside objects (application- or library-level) hold strong - references to the thread-local objects - - as soon as a thread-state dict is destroyed, the weakref callbacks of all - dummies attached to that thread are called, and destroy the corresponding - local dicts from thread-local objects - - as soon as a thread-local object is destroyed, its local dicts are - destroyed and its dummies are manually removed from all thread states - - the GC can do its work correctly when a thread-local object is dangling, - without any interference from the thread-state dicts - - As an additional optimization, each localdummy holds a borrowed reference - to the corresponding localdict. This borrowed reference is only used - by the thread-local object which has created the localdummy, which should - guarantee that the localdict still exists when accessed. + - The thread-state only holds strong references to localdummy objects, which + cannot participate in cycles. + - Only outside objects (application- or library-level) hold strong + references to the thread-local objects. + - As soon as thread-state's sentinel dummy is destroyed the callbacks for + all weakrefs attached to the sentinel are called, and destroy the + corresponding local dicts from thread-local objects. + - As soon as a thread-local object is destroyed, its local dicts are + destroyed. + - The GC can do its work correctly when a thread-local object is dangling, + without any interference from the thread-state dicts. + + This dual key arrangement is necessary to ensure that `threading.local` + values can be retrieved from finalizers. If we were to only keep a mapping + of localdummy weakrefs to locals dicts it's possible that the weakrefs would + be cleared before finalizers were called (GC currently clears weakrefs that + are garbage before invoking finalizers), causing lookups in finalizers to + fail. */ typedef struct { PyObject_HEAD - PyObject *localdict; /* Borrowed reference! */ PyObject *weakreflist; /* List of weak references to self */ } localdummyobject; @@ -1413,80 +1424,60 @@ static PyType_Spec local_dummy_type_spec = { typedef struct { PyObject_HEAD - PyObject *key; PyObject *args; PyObject *kw; PyObject *weakreflist; /* List of weak references to self */ - /* A {localdummy weakref -> localdict} dict */ - PyObject *dummies; - /* The callback for weakrefs to localdummies */ - PyObject *wr_callback; + /* A {localdummy -> localdict} dict */ + PyObject *localdicts; + /* A set of weakrefs to thread sentinels localdummies*/ + PyObject *thread_watchdogs; } localobject; /* Forward declaration */ -static PyObject *_ldict(localobject *self, thread_module_state *state); -static PyObject *_localdummy_destroyed(PyObject *meth_self, PyObject *dummyweakref); +static int create_localsdict(localobject *self, thread_module_state *state, + PyObject **localsdict, PyObject **sentinel_wr); +static PyObject *clear_locals(PyObject *meth_self, PyObject *dummyweakref); -/* Create and register the dummy for the current thread. - Returns a borrowed reference of the corresponding local dict */ +/* Create a weakref to the sentinel localdummy for the current thread */ static PyObject * -_local_create_dummy(localobject *self, thread_module_state *state) +create_sentinel_wr(localobject *self) { - PyObject *ldict = NULL, *wr = NULL; - localdummyobject *dummy = NULL; - PyTypeObject *type = state->local_dummy_type; + static PyMethodDef wr_callback_def = { + "clear_locals", (PyCFunction) clear_locals, METH_O + }; - PyObject *tdict = PyThreadState_GetDict(); - if (tdict == NULL) { - PyErr_SetString(PyExc_SystemError, - "Couldn't get thread-state dictionary"); - goto err; - } + PyThreadState *tstate = PyThreadState_Get(); - ldict = PyDict_New(); - if (ldict == NULL) { - goto err; - } - dummy = (localdummyobject *) type->tp_alloc(type, 0); - if (dummy == NULL) { - goto err; - } - dummy->localdict = ldict; - wr = PyWeakref_NewRef((PyObject *) dummy, self->wr_callback); - if (wr == NULL) { - goto err; + /* We use a weak reference to self in the callback closure + in order to avoid spurious reference cycles */ + PyObject *self_wr = PyWeakref_NewRef((PyObject *) self, NULL); + if (self_wr == NULL) { + return NULL; } - /* As a side-effect, this will cache the weakref's hash before the - dummy gets deleted */ - int r = PyDict_SetItem(self->dummies, wr, ldict); - if (r < 0) { - goto err; + PyObject *args = PyTuple_New(2); + if (args == NULL) { + Py_DECREF(self_wr); + return NULL; } - Py_CLEAR(wr); - r = PyDict_SetItem(tdict, self->key, (PyObject *) dummy); - if (r < 0) { - goto err; + PyTuple_SET_ITEM(args, 0, self_wr); + PyTuple_SET_ITEM(args, 1, Py_NewRef(tstate->threading_local_key)); + + PyObject *cb = PyCFunction_New(&wr_callback_def, args); + Py_DECREF(args); + if (cb == NULL) { + return NULL; } - Py_CLEAR(dummy); - Py_DECREF(ldict); - return ldict; + PyObject *wr = PyWeakref_NewRef(tstate->threading_local_sentinel, cb); + Py_DECREF(cb); -err: - Py_XDECREF(ldict); - Py_XDECREF(wr); - Py_XDECREF(dummy); - return NULL; + return wr; } static PyObject * local_new(PyTypeObject *type, PyObject *args, PyObject *kw) { - static PyMethodDef wr_callback_def = { - "_localdummy_destroyed", (PyCFunction) _localdummy_destroyed, METH_O - }; - if (type->tp_init == PyBaseObject_Type.tp_init) { int rc = 0; if (args != NULL) @@ -1513,30 +1504,25 @@ local_new(PyTypeObject *type, PyObject *args, PyObject *kw) self->args = Py_XNewRef(args); self->kw = Py_XNewRef(kw); - self->key = PyUnicode_FromFormat("thread.local.%p", self); - if (self->key == NULL) { - goto err; - } - self->dummies = PyDict_New(); - if (self->dummies == NULL) { + self->localdicts = PyDict_New(); + if (self->localdicts == NULL) { goto err; } - /* We use a weak reference to self in the callback closure - in order to avoid spurious reference cycles */ - PyObject *wr = PyWeakref_NewRef((PyObject *) self, NULL); - if (wr == NULL) { - goto err; - } - self->wr_callback = PyCFunction_NewEx(&wr_callback_def, wr, NULL); - Py_DECREF(wr); - if (self->wr_callback == NULL) { + self->thread_watchdogs = PySet_New(NULL); + if (self->thread_watchdogs == NULL) { goto err; } - if (_local_create_dummy(self, state) == NULL) { + + PyObject *localsdict = NULL; + PyObject *sentinel_wr = NULL; + if (create_localsdict(self, state, &localsdict, &sentinel_wr) < 0) { goto err; } + Py_DECREF(localsdict); + Py_DECREF(sentinel_wr); + return (PyObject *)self; err: @@ -1550,7 +1536,8 @@ local_traverse(localobject *self, visitproc visit, void *arg) Py_VISIT(Py_TYPE(self)); Py_VISIT(self->args); Py_VISIT(self->kw); - Py_VISIT(self->dummies); + Py_VISIT(self->localdicts); + Py_VISIT(self->thread_watchdogs); return 0; } @@ -1559,27 +1546,8 @@ local_clear(localobject *self) { Py_CLEAR(self->args); Py_CLEAR(self->kw); - Py_CLEAR(self->dummies); - Py_CLEAR(self->wr_callback); - /* Remove all strong references to dummies from the thread states */ - if (self->key) { - PyInterpreterState *interp = _PyInterpreterState_GET(); - _PyRuntimeState *runtime = &_PyRuntime; - HEAD_LOCK(runtime); - PyThreadState *tstate = PyInterpreterState_ThreadHead(interp); - HEAD_UNLOCK(runtime); - while (tstate) { - if (tstate->dict) { - if (PyDict_Pop(tstate->dict, self->key, NULL) < 0) { - // Silently ignore error - PyErr_Clear(); - } - } - HEAD_LOCK(runtime); - tstate = PyThreadState_Next(tstate); - HEAD_UNLOCK(runtime); - } - } + Py_CLEAR(self->localdicts); + Py_CLEAR(self->thread_watchdogs); return 0; } @@ -1595,48 +1563,142 @@ local_dealloc(localobject *self) PyObject_GC_UnTrack(self); local_clear(self); - Py_XDECREF(self->key); PyTypeObject *tp = Py_TYPE(self); tp->tp_free((PyObject*)self); Py_DECREF(tp); } -/* Returns a borrowed reference to the local dict, creating it if necessary */ +/* Create the TLS key and sentinel if they don't exist */ +static int +create_localdummies(thread_module_state *state) +{ + PyThreadState *tstate = _PyThreadState_GET(); + + if (tstate->threading_local_key != NULL) { + return 0; + } + + PyTypeObject *ld_type = state->local_dummy_type; + tstate->threading_local_key = ld_type->tp_alloc(ld_type, 0); + if (tstate->threading_local_key == NULL) { + return -1; + } + + tstate->threading_local_sentinel = ld_type->tp_alloc(ld_type, 0); + if (tstate->threading_local_sentinel == NULL) { + Py_CLEAR(tstate->threading_local_key); + return -1; + } + + return 0; +} + +/* Insert a localsdict and sentinel weakref for the current thread, placing + strong references in localsdict and sentinel_wr, respectively. +*/ +static int +create_localsdict(localobject *self, thread_module_state *state, + PyObject **localsdict, PyObject **sentinel_wr) +{ + PyThreadState *tstate = _PyThreadState_GET(); + PyObject *ldict = NULL; + PyObject *wr = NULL; + + if (create_localdummies(state) < 0) { + goto err; + } + + /* Create and insert the locals dict and sentinel weakref */ + ldict = PyDict_New(); + if (ldict == NULL) { + goto err; + } + + if (PyDict_SetItem(self->localdicts, tstate->threading_local_key, ldict) < + 0) { + goto err; + } + + wr = create_sentinel_wr(self); + if (wr == NULL) { + PyObject *exc = PyErr_GetRaisedException(); + if (PyDict_DelItem(self->localdicts, tstate->threading_local_key) < + 0) { + PyErr_WriteUnraisable((PyObject *)self); + } + PyErr_SetRaisedException(exc); + goto err; + } + + if (PySet_Add(self->thread_watchdogs, wr) < 0) { + PyObject *exc = PyErr_GetRaisedException(); + if (PyDict_DelItem(self->localdicts, tstate->threading_local_key) < + 0) { + PyErr_WriteUnraisable((PyObject *)self); + } + PyErr_SetRaisedException(exc); + goto err; + } + + *localsdict = ldict; + *sentinel_wr = wr; + return 0; + +err: + Py_XDECREF(ldict); + Py_XDECREF(wr); + return -1; +} + +/* Return a strong reference to the locals dict for the current thread, + creating it if necessary. +*/ static PyObject * _ldict(localobject *self, thread_module_state *state) { - PyObject *tdict = PyThreadState_GetDict(); - if (tdict == NULL) { - PyErr_SetString(PyExc_SystemError, - "Couldn't get thread-state dictionary"); + if (create_localdummies(state) < 0) { return NULL; } + /* Check if a localsdict already exists */ PyObject *ldict; - PyObject *dummy = PyDict_GetItemWithError(tdict, self->key); - if (dummy == NULL) { - if (PyErr_Occurred()) { - return NULL; - } - ldict = _local_create_dummy(self, state); - if (ldict == NULL) - return NULL; + PyThreadState *tstate = _PyThreadState_GET(); + if (PyDict_GetItemRef(self->localdicts, tstate->threading_local_key, + &ldict) < 0) { + return NULL; + } + if (ldict != NULL) { + return ldict; + } - if (Py_TYPE(self)->tp_init != PyBaseObject_Type.tp_init && - Py_TYPE(self)->tp_init((PyObject*)self, - self->args, self->kw) < 0) { - /* we need to get rid of ldict from thread so - we create a new one the next time we do an attr - access */ - PyDict_DelItem(tdict, self->key); - return NULL; - } + /* threading.local hasn't been instantiated for this thread */ + PyObject *wr; + if (create_localsdict(self, state, &ldict, &wr) < 0) { + return NULL; } - else { - assert(Py_IS_TYPE(dummy, state->local_dummy_type)); - ldict = ((localdummyobject *) dummy)->localdict; + + /* run __init__ if we're a subtype of `threading.local` */ + if (Py_TYPE(self)->tp_init != PyBaseObject_Type.tp_init && + Py_TYPE(self)->tp_init((PyObject *)self, self->args, self->kw) < 0) { + /* we need to get rid of ldict from thread so + we create a new one the next time we do an attr + access */ + PyObject *exc = PyErr_GetRaisedException(); + if (PyDict_DelItem(self->localdicts, tstate->threading_local_key) < + 0) { + PyErr_WriteUnraisable((PyObject *)self); + PyErr_Clear(); + } + if (PySet_Discard(self->thread_watchdogs, wr) < 0) { + PyErr_WriteUnraisable((PyObject *)self); + } + PyErr_SetRaisedException(exc); + Py_DECREF(ldict); + Py_DECREF(wr); + return NULL; } + Py_DECREF(wr); return ldict; } @@ -1650,21 +1712,28 @@ local_setattro(localobject *self, PyObject *name, PyObject *v) PyObject *ldict = _ldict(self, state); if (ldict == NULL) { - return -1; + goto err; } int r = PyObject_RichCompareBool(name, &_Py_ID(__dict__), Py_EQ); if (r == -1) { - return -1; + goto err; } if (r == 1) { PyErr_Format(PyExc_AttributeError, "'%.100s' object attribute '%U' is read-only", Py_TYPE(self)->tp_name, name); - return -1; + goto err; } - return _PyObject_GenericSetAttrWithDict((PyObject *)self, name, v, ldict); + int st = + _PyObject_GenericSetAttrWithDict((PyObject *)self, name, v, ldict); + Py_DECREF(ldict); + return st; + +err: + Py_XDECREF(ldict); + return -1; } static PyObject *local_getattro(localobject *, PyObject *); @@ -1707,34 +1776,42 @@ local_getattro(localobject *self, PyObject *name) int r = PyObject_RichCompareBool(name, &_Py_ID(__dict__), Py_EQ); if (r == 1) { - return Py_NewRef(ldict); + return ldict; } if (r == -1) { + Py_DECREF(ldict); return NULL; } if (!Py_IS_TYPE(self, state->local_type)) { /* use generic lookup for subtypes */ - return _PyObject_GenericGetAttrWithDict((PyObject *)self, name, - ldict, 0); + PyObject *res = + _PyObject_GenericGetAttrWithDict((PyObject *)self, name, ldict, 0); + Py_DECREF(ldict); + return res; } /* Optimization: just look in dict ourselves */ PyObject *value; if (PyDict_GetItemRef(ldict, name, &value) != 0) { // found or error + Py_DECREF(ldict); return value; } /* Fall back on generic to get __class__ and __dict__ */ - return _PyObject_GenericGetAttrWithDict( - (PyObject *)self, name, ldict, 0); + PyObject *res = + _PyObject_GenericGetAttrWithDict((PyObject *)self, name, ldict, 0); + Py_DECREF(ldict); + return res; } -/* Called when a dummy is destroyed. */ +/* Called when a dummy is destroyed, indicating that the owning thread is being + * cleared. */ static PyObject * -_localdummy_destroyed(PyObject *localweakref, PyObject *dummyweakref) +clear_locals(PyObject *locals_and_key, PyObject *dummyweakref) { + PyObject *localweakref = PyTuple_GetItem(locals_and_key, 0); localobject *self = (localobject *)_PyWeakref_GET_REF(localweakref); if (self == NULL) { Py_RETURN_NONE; @@ -1742,11 +1819,18 @@ _localdummy_destroyed(PyObject *localweakref, PyObject *dummyweakref) /* If the thread-local object is still alive and not being cleared, remove the corresponding local dict */ - if (self->dummies != NULL) { - if (PyDict_Pop(self->dummies, dummyweakref, NULL) < 0) { + if (self->localdicts != NULL) { + PyObject *key = PyTuple_GetItem(locals_and_key, 1); + if (PyDict_Pop(self->localdicts, key, NULL) < 0) { PyErr_WriteUnraisable((PyObject*)self); } } + if (self->thread_watchdogs != NULL) { + if (PySet_Discard(self->thread_watchdogs, dummyweakref) < 0) { + PyErr_WriteUnraisable((PyObject *)self); + } + } + Py_DECREF(self); Py_RETURN_NONE; } diff --git a/Modules/_typingmodule.c b/Modules/_typingmodule.c index 37af00f3071..09fbb3c5e8b 100644 --- a/Modules/_typingmodule.c +++ b/Modules/_typingmodule.c @@ -63,9 +63,6 @@ _typing_exec(PyObject *m) if (PyModule_AddObjectRef(m, "TypeAliasType", (PyObject *)&_PyTypeAlias_Type) < 0) { return -1; } - if (PyType_Ready(&_PyNoDefault_Type) < 0) { - return -1; - } if (PyModule_AddObjectRef(m, "NoDefault", (PyObject *)&_Py_NoDefaultStruct) < 0) { return -1; } diff --git a/Modules/_weakref.c b/Modules/_weakref.c index a5c15c0f10b..ecaa08ff60f 100644 --- a/Modules/_weakref.c +++ b/Modules/_weakref.c @@ -31,7 +31,7 @@ _weakref_getweakrefcount_impl(PyObject *module, PyObject *object) static int -is_dead_weakref(PyObject *value) +is_dead_weakref(PyObject *value, void *unused) { if (!PyWeakref_Check(value)) { PyErr_SetString(PyExc_TypeError, "not a weakref"); @@ -56,15 +56,8 @@ _weakref__remove_dead_weakref_impl(PyObject *module, PyObject *dct, PyObject *key) /*[clinic end generated code: output=d9ff53061fcb875c input=19fc91f257f96a1d]*/ { - if (_PyDict_DelItemIf(dct, key, is_dead_weakref) < 0) { - if (PyErr_ExceptionMatches(PyExc_KeyError)) - /* This function is meant to allow safe weak-value dicts - with GC in another thread (see issue #28427), so it's - ok if the key doesn't exist anymore. - */ - PyErr_Clear(); - else - return NULL; + if (_PyDict_DelItemIf(dct, key, is_dead_weakref, NULL) < 0) { + return NULL; } Py_RETURN_NONE; } diff --git a/Modules/_winapi.c b/Modules/_winapi.c index 8794d568e92..71bddd2f697 100644 --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -2803,7 +2803,7 @@ _winapi__mimetypes_read_windows_registry_impl(PyObject *module, } err = RegOpenKeyExW(hkcr, ext, 0, KEY_READ, &subkey); - if (err == ERROR_FILE_NOT_FOUND) { + if (err == ERROR_FILE_NOT_FOUND || err == ERROR_ACCESS_DENIED) { err = ERROR_SUCCESS; continue; } else if (err != ERROR_SUCCESS) { diff --git a/Modules/_xxtestfuzz/README.rst b/Modules/_xxtestfuzz/README.rst index b951858458c..68d5d589d2a 100644 --- a/Modules/_xxtestfuzz/README.rst +++ b/Modules/_xxtestfuzz/README.rst @@ -23,7 +23,7 @@ Add the test name on a new line in ``fuzz_tests.txt``. In ``fuzzer.c``, add a function to be run:: - int $test_name (const char* data, size_t size) { + static int $fuzz_test_name(const char* data, size_t size) { ... return 0; } @@ -31,10 +31,12 @@ In ``fuzzer.c``, add a function to be run:: And invoke it from ``LLVMFuzzerTestOneInput``:: - #if _Py_FUZZ_YES(fuzz_builtin_float) - rv |= _run_fuzz(data, size, fuzz_builtin_float); + #if !defined(_Py_FUZZ_ONE) || defined(_Py_FUZZ_$fuzz_test_name) + rv |= _run_fuzz(data, size, $fuzz_test_name); #endif +Don't forget to replace ``$fuzz_test_name`` with your actual test name. + ``LLVMFuzzerTestOneInput`` will run in oss-fuzz, with each test in ``fuzz_tests.txt`` run separately. diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index a3b833d47cd..679222c3f03 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -2847,7 +2847,7 @@ array_new(PyTypeObject *type, PyObject *args, PyObject *kwds) PyDoc_STRVAR(module_doc, "This module defines an object type which can efficiently represent\n\ -an array of basic values: characters, integers, floating point\n\ +an array of basic values: characters, integers, floating-point\n\ numbers. Arrays are sequence types and behave very much like lists,\n\ except that the type of objects stored in them is constrained.\n"); @@ -2875,8 +2875,8 @@ The following type codes are defined:\n\ 'L' unsigned integer 4\n\ 'q' signed integer 8 (see note)\n\ 'Q' unsigned integer 8 (see note)\n\ - 'f' floating point 4\n\ - 'd' floating point 8\n\ + 'f' floating-point 4\n\ + 'd' floating-point 8\n\ \n\ NOTE: The 'u' typecode corresponds to Python's unicode character. On\n\ narrow builds this is 2-bytes on wide builds this is 4-bytes.\n\ diff --git a/Modules/clinic/_testclinic.c.h b/Modules/clinic/_testclinic.c.h index 451912a8bd1..1255319357f 100644 --- a/Modules/clinic/_testclinic.c.h +++ b/Modules/clinic/_testclinic.c.h @@ -2382,42 +2382,6 @@ posonly_vararg(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje return return_value; } -PyDoc_STRVAR(vararg_and_posonly__doc__, -"vararg_and_posonly($module, a, /, *args)\n" -"--\n" -"\n"); - -#define VARARG_AND_POSONLY_METHODDEF \ - {"vararg_and_posonly", _PyCFunction_CAST(vararg_and_posonly), METH_FASTCALL, vararg_and_posonly__doc__}, - -static PyObject * -vararg_and_posonly_impl(PyObject *module, PyObject *a, PyObject *args); - -static PyObject * -vararg_and_posonly(PyObject *module, PyObject *const *args, Py_ssize_t nargs) -{ - PyObject *return_value = NULL; - PyObject *a; - PyObject *__clinic_args = NULL; - - if (!_PyArg_CheckPositional("vararg_and_posonly", nargs, 1, PY_SSIZE_T_MAX)) { - goto exit; - } - a = args[0]; - __clinic_args = PyTuple_New(nargs - 1); - if (!__clinic_args) { - goto exit; - } - for (Py_ssize_t i = 0; i < nargs - 1; ++i) { - PyTuple_SET_ITEM(__clinic_args, i, Py_NewRef(args[1 + i])); - } - return_value = vararg_and_posonly_impl(module, a, __clinic_args); - -exit: - Py_XDECREF(__clinic_args); - return return_value; -} - PyDoc_STRVAR(vararg__doc__, "vararg($module, /, a, *args)\n" "--\n" @@ -2543,6 +2507,78 @@ vararg_with_default(PyObject *module, PyObject *const *args, Py_ssize_t nargs, P return return_value; } +PyDoc_STRVAR(vararg_with_default2__doc__, +"vararg_with_default2($module, /, a, *args, b=None, c=None)\n" +"--\n" +"\n"); + +#define VARARG_WITH_DEFAULT2_METHODDEF \ + {"vararg_with_default2", _PyCFunction_CAST(vararg_with_default2), METH_FASTCALL|METH_KEYWORDS, vararg_with_default2__doc__}, + +static PyObject * +vararg_with_default2_impl(PyObject *module, PyObject *a, PyObject *args, + PyObject *b, PyObject *c); + +static PyObject * +vararg_with_default2(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { _Py_LATIN1_CHR('a'), _Py_LATIN1_CHR('b'), _Py_LATIN1_CHR('c'), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", "c", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "vararg_with_default2", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + Py_ssize_t noptargs = Py_MIN(nargs, 1) + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *__clinic_args = NULL; + PyObject *b = Py_None; + PyObject *c = Py_None; + + args = _PyArg_UnpackKeywordsWithVararg(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + __clinic_args = args[1]; + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[2]) { + b = args[2]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + c = args[3]; +skip_optional_kwonly: + return_value = vararg_with_default2_impl(module, a, __clinic_args, b, c); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + PyDoc_STRVAR(vararg_with_only_defaults__doc__, "vararg_with_only_defaults($module, /, *args, b=None)\n" "--\n" @@ -2605,6 +2641,78 @@ vararg_with_only_defaults(PyObject *module, PyObject *const *args, Py_ssize_t na return return_value; } +PyDoc_STRVAR(vararg_kwonly_req_opt__doc__, +"vararg_kwonly_req_opt($module, /, *args, a, b=None, c=None)\n" +"--\n" +"\n"); + +#define VARARG_KWONLY_REQ_OPT_METHODDEF \ + {"vararg_kwonly_req_opt", _PyCFunction_CAST(vararg_kwonly_req_opt), METH_FASTCALL|METH_KEYWORDS, vararg_kwonly_req_opt__doc__}, + +static PyObject * +vararg_kwonly_req_opt_impl(PyObject *module, PyObject *args, PyObject *a, + PyObject *b, PyObject *c); + +static PyObject * +vararg_kwonly_req_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { _Py_LATIN1_CHR('a'), _Py_LATIN1_CHR('b'), _Py_LATIN1_CHR('c'), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", "c", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "vararg_kwonly_req_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + Py_ssize_t noptargs = 0 + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *__clinic_args = NULL; + PyObject *a; + PyObject *b = Py_None; + PyObject *c = Py_None; + + args = _PyArg_UnpackKeywordsWithVararg(args, nargs, NULL, kwnames, &_parser, 0, 0, 1, 0, argsbuf); + if (!args) { + goto exit; + } + __clinic_args = args[0]; + a = args[1]; + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[2]) { + b = args[2]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + c = args[3]; +skip_optional_kwonly: + return_value = vararg_kwonly_req_opt_impl(module, __clinic_args, a, b, c); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + PyDoc_STRVAR(gh_32092_oob__doc__, "gh_32092_oob($module, /, pos1, pos2, *varargs, kw1=None, kw2=None)\n" "--\n" @@ -3219,4 +3327,4 @@ _testclinic_TestClass_get_defining_class_arg(PyObject *self, PyTypeObject *cls, exit: return return_value; } -/*[clinic end generated code: output=62aebbac06ec6588 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=52b0a0d6e5c291f1 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/mathmodule.c.h b/Modules/clinic/mathmodule.c.h index d16db722a74..81eec310ddb 100644 --- a/Modules/clinic/mathmodule.c.h +++ b/Modules/clinic/mathmodule.c.h @@ -34,9 +34,9 @@ PyDoc_STRVAR(math_fsum__doc__, "fsum($module, seq, /)\n" "--\n" "\n" -"Return an accurate floating point sum of values in the iterable seq.\n" +"Return an accurate floating-point sum of values in the iterable seq.\n" "\n" -"Assumes IEEE-754 floating point arithmetic."); +"Assumes IEEE-754 floating-point arithmetic."); #define MATH_FSUM_METHODDEF \ {"fsum", (PyCFunction)math_fsum, METH_O, math_fsum__doc__}, @@ -610,7 +610,7 @@ PyDoc_STRVAR(math_isclose__doc__, "isclose($module, /, a, b, *, rel_tol=1e-09, abs_tol=0.0)\n" "--\n" "\n" -"Determine whether two floating point numbers are close in value.\n" +"Determine whether two floating-point numbers are close in value.\n" "\n" " rel_tol\n" " maximum difference for being considered \"close\", relative to the\n" @@ -1011,4 +1011,4 @@ math_ulp(PyObject *module, PyObject *arg) exit: return return_value; } -/*[clinic end generated code: output=7d03f84f77342496 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=755da3b1dbd9e45f input=a9049054013a1b77]*/ diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index c5f27a5c9ed..14a6efb9ac9 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -5954,7 +5954,7 @@ os_wait(PyObject *module, PyObject *Py_UNUSED(ignored)) #endif /* defined(HAVE_WAIT) */ -#if (defined(__linux__) && defined(__NR_pidfd_open)) +#if (defined(__linux__) && defined(__NR_pidfd_open) && !(defined(__ANDROID__) && __ANDROID_API__ < 31)) PyDoc_STRVAR(os_pidfd_open__doc__, "pidfd_open($module, /, pid, flags=0)\n" @@ -6013,7 +6013,7 @@ os_pidfd_open(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec return return_value; } -#endif /* (defined(__linux__) && defined(__NR_pidfd_open)) */ +#endif /* (defined(__linux__) && defined(__NR_pidfd_open) && !(defined(__ANDROID__) && __ANDROID_API__ < 31)) */ #if defined(HAVE_SETNS) @@ -6348,7 +6348,7 @@ PyDoc_STRVAR(os_times__doc__, "\n" "The object returned behaves like a named tuple with these fields:\n" " (utime, stime, cutime, cstime, elapsed_time)\n" -"All fields are floating point numbers."); +"All fields are floating-point numbers."); #define OS_TIMES_METHODDEF \ {"times", (PyCFunction)os_times, METH_NOARGS, os_times__doc__}, @@ -12819,4 +12819,4 @@ os__is_inputhook_installed(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF #define OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF #endif /* !defined(OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF) */ -/*[clinic end generated code: output=cebab1ef718b4878 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=b2ffb856bcada7c9 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/selectmodule.c.h b/Modules/clinic/selectmodule.c.h index dc7d3fb8143..49c0e48d2e0 100644 --- a/Modules/clinic/selectmodule.c.h +++ b/Modules/clinic/selectmodule.c.h @@ -6,6 +6,7 @@ preserve # include "pycore_gc.h" // PyGC_Head # include "pycore_runtime.h" // _Py_ID() #endif +#include "pycore_critical_section.h"// Py_BEGIN_CRITICAL_SECTION() #include "pycore_long.h" // _PyLong_UnsignedShort_Converter() #include "pycore_modsupport.h" // _PyArg_CheckPositional() @@ -25,7 +26,7 @@ PyDoc_STRVAR(select_select__doc__, "gotten from a fileno() method call on one of those.\n" "\n" "The optional 4th argument specifies a timeout in seconds; it may be\n" -"a floating point number to specify fractions of seconds. If it is absent\n" +"a floating-point number to specify fractions of seconds. If it is absent\n" "or None, the call will never time out.\n" "\n" "The return value is a tuple of three lists corresponding to the first three\n" @@ -110,7 +111,9 @@ select_poll_register(pollObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_poll_register_impl(self, fd, eventmask); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -155,7 +158,9 @@ select_poll_modify(pollObject *self, PyObject *const *args, Py_ssize_t nargs) if (!_PyLong_UnsignedShort_Converter(args[1], &eventmask)) { goto exit; } + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_poll_modify_impl(self, fd, eventmask); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -187,7 +192,9 @@ select_poll_unregister(pollObject *self, PyObject *arg) if (fd < 0) { goto exit; } + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_poll_unregister_impl(self, fd); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -230,7 +237,9 @@ select_poll_poll(pollObject *self, PyObject *const *args, Py_ssize_t nargs) } timeout_obj = args[0]; skip_optional: + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_poll_poll_impl(self, timeout_obj); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -281,7 +290,9 @@ select_devpoll_register(devpollObject *self, PyObject *const *args, Py_ssize_t n goto exit; } skip_optional: + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_devpoll_register_impl(self, fd, eventmask); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -332,7 +343,9 @@ select_devpoll_modify(devpollObject *self, PyObject *const *args, Py_ssize_t nar goto exit; } skip_optional: + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_devpoll_modify_impl(self, fd, eventmask); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -364,7 +377,9 @@ select_devpoll_unregister(devpollObject *self, PyObject *arg) if (fd < 0) { goto exit; } + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_devpoll_unregister_impl(self, fd); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -407,7 +422,9 @@ select_devpoll_poll(devpollObject *self, PyObject *const *args, Py_ssize_t nargs } timeout_obj = args[0]; skip_optional: + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_devpoll_poll_impl(self, timeout_obj); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -434,7 +451,13 @@ select_devpoll_close_impl(devpollObject *self); static PyObject * select_devpoll_close(devpollObject *self, PyObject *Py_UNUSED(ignored)) { - return select_devpoll_close_impl(self); + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = select_devpoll_close_impl(self); + Py_END_CRITICAL_SECTION(); + + return return_value; } #endif /* (defined(HAVE_POLL) && !defined(HAVE_BROKEN_POLL)) && defined(HAVE_SYS_DEVPOLL_H) */ @@ -456,7 +479,13 @@ select_devpoll_fileno_impl(devpollObject *self); static PyObject * select_devpoll_fileno(devpollObject *self, PyObject *Py_UNUSED(ignored)) { - return select_devpoll_fileno_impl(self); + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = select_devpoll_fileno_impl(self); + Py_END_CRITICAL_SECTION(); + + return return_value; } #endif /* (defined(HAVE_POLL) && !defined(HAVE_BROKEN_POLL)) && defined(HAVE_SYS_DEVPOLL_H) */ @@ -615,7 +644,13 @@ select_epoll_close_impl(pyEpoll_Object *self); static PyObject * select_epoll_close(pyEpoll_Object *self, PyObject *Py_UNUSED(ignored)) { - return select_epoll_close_impl(self); + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = select_epoll_close_impl(self); + Py_END_CRITICAL_SECTION(); + + return return_value; } #endif /* defined(HAVE_EPOLL) */ @@ -1108,7 +1143,13 @@ select_kqueue_close_impl(kqueue_queue_Object *self); static PyObject * select_kqueue_close(kqueue_queue_Object *self, PyObject *Py_UNUSED(ignored)) { - return select_kqueue_close_impl(self); + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = select_kqueue_close_impl(self); + Py_END_CRITICAL_SECTION(); + + return return_value; } #endif /* defined(HAVE_KQUEUE) */ @@ -1319,4 +1360,4 @@ select_kqueue_control(kqueue_queue_Object *self, PyObject *const *args, Py_ssize #ifndef SELECT_KQUEUE_CONTROL_METHODDEF #define SELECT_KQUEUE_CONTROL_METHODDEF #endif /* !defined(SELECT_KQUEUE_CONTROL_METHODDEF) */ -/*[clinic end generated code: output=4fc17ae9b6cfdc86 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f99427b75cbe6d44 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/signalmodule.c.h b/Modules/clinic/signalmodule.c.h index d074cc30d1e..986c0289f2b 100644 --- a/Modules/clinic/signalmodule.c.h +++ b/Modules/clinic/signalmodule.c.h @@ -597,7 +597,7 @@ PyDoc_STRVAR(signal_sigtimedwait__doc__, "\n" "Like sigwaitinfo(), but with a timeout.\n" "\n" -"The timeout is specified in seconds, with floating point numbers allowed."); +"The timeout is specified in seconds, with floating-point numbers allowed."); #define SIGNAL_SIGTIMEDWAIT_METHODDEF \ {"sigtimedwait", _PyCFunction_CAST(signal_sigtimedwait), METH_FASTCALL, signal_sigtimedwait__doc__}, @@ -670,7 +670,7 @@ signal_pthread_kill(PyObject *module, PyObject *const *args, Py_ssize_t nargs) #endif /* defined(HAVE_PTHREAD_KILL) */ -#if (defined(__linux__) && defined(__NR_pidfd_send_signal)) +#if (defined(__linux__) && defined(__NR_pidfd_send_signal) && !(defined(__ANDROID__) && __ANDROID_API__ < 31)) PyDoc_STRVAR(signal_pidfd_send_signal__doc__, "pidfd_send_signal($module, pidfd, signalnum, siginfo=None, flags=0, /)\n" @@ -723,7 +723,7 @@ signal_pidfd_send_signal(PyObject *module, PyObject *const *args, Py_ssize_t nar return return_value; } -#endif /* (defined(__linux__) && defined(__NR_pidfd_send_signal)) */ +#endif /* (defined(__linux__) && defined(__NR_pidfd_send_signal) && !(defined(__ANDROID__) && __ANDROID_API__ < 31)) */ #ifndef SIGNAL_ALARM_METHODDEF #define SIGNAL_ALARM_METHODDEF @@ -776,4 +776,4 @@ signal_pidfd_send_signal(PyObject *module, PyObject *const *args, Py_ssize_t nar #ifndef SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF #define SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF #endif /* !defined(SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF) */ -/*[clinic end generated code: output=1c11c1b6f12f26be input=a9049054013a1b77]*/ +/*[clinic end generated code: output=c57b4b98fad6f4b8 input=a9049054013a1b77]*/ diff --git a/Modules/expat/expat.h b/Modules/expat/expat.h index c2770be3897..d0d6015a662 100644 --- a/Modules/expat/expat.h +++ b/Modules/expat/expat.h @@ -1066,7 +1066,7 @@ XML_SetReparseDeferralEnabled(XML_Parser parser, XML_Bool enabled); */ #define XML_MAJOR_VERSION 2 #define XML_MINOR_VERSION 6 -#define XML_MICRO_VERSION 2 +#define XML_MICRO_VERSION 3 #ifdef __cplusplus } diff --git a/Modules/expat/siphash.h b/Modules/expat/siphash.h index a1ed99e687b..04f6f74585b 100644 --- a/Modules/expat/siphash.h +++ b/Modules/expat/siphash.h @@ -126,8 +126,7 @@ | ((uint64_t)((p)[4]) << 32) | ((uint64_t)((p)[5]) << 40) \ | ((uint64_t)((p)[6]) << 48) | ((uint64_t)((p)[7]) << 56)) -#define SIPHASH_INITIALIZER \ - { 0, 0, 0, 0, {0}, 0, 0 } +#define SIPHASH_INITIALIZER {0, 0, 0, 0, {0}, 0, 0} struct siphash { uint64_t v0, v1, v2, v3; diff --git a/Modules/expat/xmlparse.c b/Modules/expat/xmlparse.c index 2951fec70c5..d9285b213b3 100644 --- a/Modules/expat/xmlparse.c +++ b/Modules/expat/xmlparse.c @@ -1,4 +1,4 @@ -/* 2a14271ad4d35e82bde8ba210b4edb7998794bcbae54deab114046a300f9639a (2.6.2+) +/* ba4cdf9bdb534f355a9def4c9e25d20ee8e72f95b0a4d930be52e563f5080196 (2.6.3+) __ __ _ ___\ \/ /_ __ __ _| |_ / _ \\ /| '_ \ / _` | __| @@ -39,6 +39,7 @@ Copyright (c) 2022 Sean McBride Copyright (c) 2023 Owain Davies Copyright (c) 2023-2024 Sony Corporation / Snild Dolkow + Copyright (c) 2024 Berkay Eren Ürün Licensed under the MIT license: Permission is hereby granted, free of charge, to any person obtaining @@ -294,7 +295,7 @@ typedef struct { The name of the element is stored in both the document and API encodings. The memory buffer 'buf' is a separately-allocated memory area which stores the name. During the XML_Parse()/ - XMLParseBuffer() when the element is open, the memory for the 'raw' + XML_ParseBuffer() when the element is open, the memory for the 'raw' version of the name (in the document encoding) is shared with the document buffer. If the element is open across calls to XML_Parse()/XML_ParseBuffer(), the buffer is re-allocated to @@ -2038,6 +2039,12 @@ XML_ParseBuffer(XML_Parser parser, int len, int isFinal) { if (parser == NULL) return XML_STATUS_ERROR; + + if (len < 0) { + parser->m_errorCode = XML_ERROR_INVALID_ARGUMENT; + return XML_STATUS_ERROR; + } + switch (parser->m_parsingStatus.parsing) { case XML_SUSPENDED: parser->m_errorCode = XML_ERROR_SUSPENDED; @@ -5846,18 +5853,17 @@ processInternalEntity(XML_Parser parser, ENTITY *entity, XML_Bool betweenDecl) { /* Set a safe default value in case 'next' does not get set */ next = textStart; -#ifdef XML_DTD if (entity->is_param) { int tok = XmlPrologTok(parser->m_internalEncoding, textStart, textEnd, &next); result = doProlog(parser, parser->m_internalEncoding, textStart, textEnd, tok, next, &next, XML_FALSE, XML_FALSE, XML_ACCOUNT_ENTITY_EXPANSION); - } else -#endif /* XML_DTD */ + } else { result = doContent(parser, parser->m_tagLevel, parser->m_internalEncoding, textStart, textEnd, &next, XML_FALSE, XML_ACCOUNT_ENTITY_EXPANSION); + } if (result == XML_ERROR_NONE) { if (textEnd != next && parser->m_parsingStatus.parsing == XML_SUSPENDED) { @@ -5894,18 +5900,17 @@ internalEntityProcessor(XML_Parser parser, const char *s, const char *end, /* Set a safe default value in case 'next' does not get set */ next = textStart; -#ifdef XML_DTD if (entity->is_param) { int tok = XmlPrologTok(parser->m_internalEncoding, textStart, textEnd, &next); result = doProlog(parser, parser->m_internalEncoding, textStart, textEnd, tok, next, &next, XML_FALSE, XML_TRUE, XML_ACCOUNT_ENTITY_EXPANSION); - } else -#endif /* XML_DTD */ + } else { result = doContent(parser, openEntity->startTagLevel, parser->m_internalEncoding, textStart, textEnd, &next, XML_FALSE, XML_ACCOUNT_ENTITY_EXPANSION); + } if (result != XML_ERROR_NONE) return result; @@ -5932,7 +5937,6 @@ internalEntityProcessor(XML_Parser parser, const char *s, const char *end, return XML_ERROR_NONE; } -#ifdef XML_DTD if (entity->is_param) { int tok; parser->m_processor = prologProcessor; @@ -5940,9 +5944,7 @@ internalEntityProcessor(XML_Parser parser, const char *s, const char *end, return doProlog(parser, parser->m_encoding, s, end, tok, next, nextPtr, (XML_Bool)! parser->m_parsingStatus.finalBuffer, XML_TRUE, XML_ACCOUNT_DIRECT); - } else -#endif /* XML_DTD */ - { + } else { parser->m_processor = contentProcessor; /* see externalEntityContentProcessor vs contentProcessor */ result = doContent(parser, parser->m_parentParser ? 1 : 0, @@ -7016,6 +7018,16 @@ dtdCopy(XML_Parser oldParser, DTD *newDtd, const DTD *oldDtd, if (! newE) return 0; if (oldE->nDefaultAtts) { + /* Detect and prevent integer overflow. + * The preprocessor guard addresses the "always false" warning + * from -Wtype-limits on platforms where + * sizeof(int) < sizeof(size_t), e.g. on x86_64. */ +#if UINT_MAX >= SIZE_MAX + if ((size_t)oldE->nDefaultAtts + > ((size_t)(-1) / sizeof(DEFAULT_ATTRIBUTE))) { + return 0; + } +#endif newE->defaultAtts = ms->malloc_fcn(oldE->nDefaultAtts * sizeof(DEFAULT_ATTRIBUTE)); if (! newE->defaultAtts) { @@ -7558,6 +7570,15 @@ nextScaffoldPart(XML_Parser parser) { int next; if (! dtd->scaffIndex) { + /* Detect and prevent integer overflow. + * The preprocessor guard addresses the "always false" warning + * from -Wtype-limits on platforms where + * sizeof(unsigned int) < sizeof(size_t), e.g. on x86_64. */ +#if UINT_MAX >= SIZE_MAX + if (parser->m_groupSize > ((size_t)(-1) / sizeof(int))) { + return -1; + } +#endif dtd->scaffIndex = (int *)MALLOC(parser, parser->m_groupSize * sizeof(int)); if (! dtd->scaffIndex) return -1; diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c index cfa3cbdc34b..b62362f2777 100644 --- a/Modules/faulthandler.c +++ b/Modules/faulthandler.c @@ -75,7 +75,7 @@ static fault_handler_t faulthandler_handlers[] = { #ifdef SIGILL {SIGILL, 0, "Illegal instruction", }, #endif - {SIGFPE, 0, "Floating point exception", }, + {SIGFPE, 0, "Floating-point exception", }, {SIGABRT, 0, "Aborted", }, /* define SIGSEGV at the end to make it the default choice if searching the handler fails in faulthandler_fatal_error() */ diff --git a/Modules/fcntlmodule.c b/Modules/fcntlmodule.c index b6eeec2c66f..17ecf4c6731 100644 --- a/Modules/fcntlmodule.c +++ b/Modules/fcntlmodule.c @@ -580,12 +580,17 @@ all_ins(PyObject* m) #ifdef F_GETPIPE_SZ if (PyModule_AddIntMacro(m, F_GETPIPE_SZ)) return -1; #endif + +/* On Android, FICLONE is blocked by SELinux. */ +#ifndef __ANDROID__ #ifdef FICLONE if (PyModule_AddIntMacro(m, FICLONE)) return -1; #endif #ifdef FICLONERANGE if (PyModule_AddIntMacro(m, FICLONERANGE)) return -1; #endif +#endif + #ifdef F_GETOWN_EX // since Linux 2.6.32 if (PyModule_AddIntMacro(m, F_GETOWN_EX)) return -1; diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 57e4aae9ed5..b57a1c90723 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -158,12 +158,17 @@ gc_set_threshold_impl(PyObject *module, int threshold0, int group_right_1, { GCState *gcstate = get_gc_state(); - gcstate->young.threshold = threshold0; + gcstate->generations[0].threshold = threshold0; if (group_right_1) { - gcstate->old[0].threshold = threshold1; + gcstate->generations[1].threshold = threshold1; } if (group_right_2) { - gcstate->old[1].threshold = threshold2; + gcstate->generations[2].threshold = threshold2; + + /* generations higher than 2 get the same threshold */ + for (int i = 3; i < NUM_GENERATIONS; i++) { + gcstate->generations[i].threshold = gcstate->generations[2].threshold; + } } Py_RETURN_NONE; } @@ -180,9 +185,9 @@ gc_get_threshold_impl(PyObject *module) { GCState *gcstate = get_gc_state(); return Py_BuildValue("(iii)", - gcstate->young.threshold, - gcstate->old[0].threshold, - 0); + gcstate->generations[0].threshold, + gcstate->generations[1].threshold, + gcstate->generations[2].threshold); } /*[clinic input] @@ -202,14 +207,14 @@ gc_get_count_impl(PyObject *module) struct _gc_thread_state *gc = &tstate->gc; // Flush the local allocation count to the global count - _Py_atomic_add_int(&gcstate->young.count, (int)gc->alloc_count); + _Py_atomic_add_int(&gcstate->generations[0].count, (int)gc->alloc_count); gc->alloc_count = 0; #endif return Py_BuildValue("(iii)", - gcstate->young.count, - gcstate->old[gcstate->visited_space].count, - gcstate->old[gcstate->visited_space^1].count); + gcstate->generations[0].count, + gcstate->generations[1].count, + gcstate->generations[2].count); } /*[clinic input] diff --git a/Modules/getpath.c b/Modules/getpath.c index abed1390282..d0128b20fae 100644 --- a/Modules/getpath.c +++ b/Modules/getpath.c @@ -951,6 +951,11 @@ _PyConfig_InitPathConfig(PyConfig *config, int compute_path_config) !wchar_to_dict(dict, "executable_dir", NULL) || !wchar_to_dict(dict, "py_setpath", _PyPathConfig_GetGlobalModuleSearchPath()) || !funcs_to_dict(dict, config->pathconfig_warnings) || +#ifdef Py_GIL_DISABLED + !decode_to_dict(dict, "ABI_THREAD", "t") || +#else + !decode_to_dict(dict, "ABI_THREAD", "") || +#endif #ifndef MS_WINDOWS PyDict_SetItemString(dict, "winreg", Py_None) < 0 || #endif diff --git a/Modules/getpath.py b/Modules/getpath.py index bc7053224aa..1f1bfcb4f64 100644 --- a/Modules/getpath.py +++ b/Modules/getpath.py @@ -40,6 +40,7 @@ # EXE_SUFFIX -- [in, opt] '.exe' on Windows/Cygwin/similar # VERSION_MAJOR -- [in] sys.version_info.major # VERSION_MINOR -- [in] sys.version_info.minor +# ABI_THREAD -- [in] either 't' for free-threaded builds or '' # PYWINVER -- [in] the Windows platform-specific version (e.g. 3.8-32) # ** Values read from the environment ** @@ -172,17 +173,18 @@ # ****************************************************************************** platlibdir = config.get('platlibdir') or PLATLIBDIR +ABI_THREAD = ABI_THREAD or '' if os_name == 'posix' or os_name == 'darwin': BUILDDIR_TXT = 'pybuilddir.txt' BUILD_LANDMARK = 'Modules/Setup.local' DEFAULT_PROGRAM_NAME = f'python{VERSION_MAJOR}' - STDLIB_SUBDIR = f'{platlibdir}/python{VERSION_MAJOR}.{VERSION_MINOR}' + STDLIB_SUBDIR = f'{platlibdir}/python{VERSION_MAJOR}.{VERSION_MINOR}{ABI_THREAD}' STDLIB_LANDMARKS = [f'{STDLIB_SUBDIR}/os.py', f'{STDLIB_SUBDIR}/os.pyc'] - PLATSTDLIB_LANDMARK = f'{platlibdir}/python{VERSION_MAJOR}.{VERSION_MINOR}/lib-dynload' + PLATSTDLIB_LANDMARK = f'{platlibdir}/python{VERSION_MAJOR}.{VERSION_MINOR}{ABI_THREAD}/lib-dynload' BUILDSTDLIB_LANDMARKS = ['Lib/os.py'] VENV_LANDMARK = 'pyvenv.cfg' - ZIP_LANDMARK = f'{platlibdir}/python{VERSION_MAJOR}{VERSION_MINOR}.zip' + ZIP_LANDMARK = f'{platlibdir}/python{VERSION_MAJOR}{VERSION_MINOR}{ABI_THREAD}.zip' DELIM = ':' SEP = '/' diff --git a/Modules/main.c b/Modules/main.c index 1a70b300b6a..15ea49a1bad 100644 --- a/Modules/main.c +++ b/Modules/main.c @@ -4,6 +4,7 @@ #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_initconfig.h" // _PyArgv #include "pycore_interp.h" // _PyInterpreterState.sysdict +#include "pycore_long.h" // _PyLong_GetOne() #include "pycore_pathconfig.h" // _PyPathConfig_ComputeSysPath0() #include "pycore_pylifecycle.h" // _Py_PreInitializeFromPyArgv() #include "pycore_pystate.h" // _PyInterpreterState_GET() @@ -259,6 +260,57 @@ pymain_run_command(wchar_t *command) } +static int +pymain_start_pyrepl_no_main(void) +{ + int res = 0; + PyObject *console = NULL; + PyObject *empty_tuple = NULL; + PyObject *kwargs = NULL; + PyObject *console_result = NULL; + + PyObject *pyrepl = PyImport_ImportModule("_pyrepl.main"); + if (pyrepl == NULL) { + fprintf(stderr, "Could not import _pyrepl.main\n"); + res = pymain_exit_err_print(); + goto done; + } + console = PyObject_GetAttrString(pyrepl, "interactive_console"); + if (console == NULL) { + fprintf(stderr, "Could not access _pyrepl.main.interactive_console\n"); + res = pymain_exit_err_print(); + goto done; + } + empty_tuple = PyTuple_New(0); + if (empty_tuple == NULL) { + res = pymain_exit_err_print(); + goto done; + } + kwargs = PyDict_New(); + if (kwargs == NULL) { + res = pymain_exit_err_print(); + goto done; + } + if (!PyDict_SetItemString(kwargs, "pythonstartup", _PyLong_GetOne())) { + _PyRuntime.signals.unhandled_keyboard_interrupt = 0; + console_result = PyObject_Call(console, empty_tuple, kwargs); + if (!console_result && PyErr_Occurred() == PyExc_KeyboardInterrupt) { + _PyRuntime.signals.unhandled_keyboard_interrupt = 1; + } + if (console_result == NULL) { + res = pymain_exit_err_print(); + } + } +done: + Py_XDECREF(console_result); + Py_XDECREF(kwargs); + Py_XDECREF(empty_tuple); + Py_XDECREF(console); + Py_XDECREF(pyrepl); + return res; +} + + static int pymain_run_module(const wchar_t *modname, int set_argv0) { @@ -542,6 +594,10 @@ pymain_repl(PyConfig *config, int *exitcode) return; } + if (PySys_Audit("cpython.run_stdin", NULL) < 0) { + return; + } + if (!isatty(fileno(stdin)) || _Py_GetEnv(config->use_environment, "PYTHON_BASIC_REPL")) { PyCompilerFlags cf = _PyCompilerFlags_INIT; @@ -549,7 +605,7 @@ pymain_repl(PyConfig *config, int *exitcode) *exitcode = (run != 0); return; } - int run = pymain_run_module(L"_pyrepl", 0); + int run = pymain_start_pyrepl_no_main(); *exitcode = (run != 0); return; } diff --git a/Modules/makesetup b/Modules/makesetup index d41b6640bb5..8bb971b152a 100755 --- a/Modules/makesetup +++ b/Modules/makesetup @@ -274,7 +274,7 @@ sed -e 's/[ ]*#.*//' -e '/^[ ]*$/d' | ;; esac rule="$file: $objs" - rule="$rule; \$(BLDSHARED) $objs $libs \$(MODULE_LDFLAGS) -o $file" + rule="$rule; \$(BLDSHARED) $objs $libs \$(LIBPYTHON) -o $file" echo "$rule" >>$rulesf done done diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c index a79694730a8..64a497306b2 100644 --- a/Modules/mathmodule.c +++ b/Modules/mathmodule.c @@ -107,7 +107,7 @@ typedef struct{ double hi; double lo; } DoubleLength; static DoubleLength dl_fast_sum(double a, double b) { - /* Algorithm 1.1. Compensated summation of two floating point numbers. */ + /* Algorithm 1.1. Compensated summation of two floating-point numbers. */ assert(fabs(a) >= fabs(b)); double x = a + b; double y = (a - x) + b; @@ -1354,14 +1354,14 @@ math.fsum seq: object / -Return an accurate floating point sum of values in the iterable seq. +Return an accurate floating-point sum of values in the iterable seq. -Assumes IEEE-754 floating point arithmetic. +Assumes IEEE-754 floating-point arithmetic. [clinic start generated code]*/ static PyObject * math_fsum(PyObject *module, PyObject *seq) -/*[clinic end generated code: output=ba5c672b87fe34fc input=c51b7d8caf6f6e82]*/ +/*[clinic end generated code: output=ba5c672b87fe34fc input=4506244ded6057dc]*/ { PyObject *item, *iter, *sum = NULL; Py_ssize_t i, j, n = 0, m = NUM_PARTIALS; @@ -2385,6 +2385,15 @@ math_fmod_impl(PyObject *module, double x, double y) return PyFloat_FromDouble(x); errno = 0; r = fmod(x, y); +#ifdef _MSC_VER + /* Windows (e.g. Windows 10 with MSC v.1916) loose sign + for zero result. But C99+ says: "if y is nonzero, the result + has the same sign as x". + */ + if (r == 0.0 && y != 0.0) { + r = copysign(r, x); + } +#endif if (Py_IS_NAN(r)) { if (!Py_IS_NAN(x) && !Py_IS_NAN(y)) errno = EDOM; @@ -2453,7 +2462,7 @@ Since lo**2 is less than 1/2 ulp(csum), we have csum+lo*lo == csum. To minimize loss of information during the accumulation of fractional values, each term has a separate accumulator. This also breaks up sequential dependencies in the inner loop so the CPU can maximize -floating point throughput. [4] On an Apple M1 Max, hypot(*vec) +floating-point throughput. [4] On an Apple M1 Max, hypot(*vec) takes only 3.33 µsec when len(vec) == 1000. The square root differential correction is needed because a @@ -3136,7 +3145,7 @@ math.isclose -> bool maximum difference for being considered "close", regardless of the magnitude of the input values -Determine whether two floating point numbers are close in value. +Determine whether two floating-point numbers are close in value. Return True if a is close in value to b, and False otherwise. @@ -3151,7 +3160,7 @@ only close to themselves. static int math_isclose_impl(PyObject *module, double a, double b, double rel_tol, double abs_tol) -/*[clinic end generated code: output=b73070207511952d input=f28671871ea5bfba]*/ +/*[clinic end generated code: output=b73070207511952d input=12d41764468bfdb8]*/ { double diff = 0.0; diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index f85fab31096..51e34b5f4b7 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -125,6 +125,7 @@ # define HAVE_PWRITEV_RUNTIME __builtin_available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) # define HAVE_MKFIFOAT_RUNTIME __builtin_available(macOS 13.0, iOS 16.0, tvOS 16.0, watchOS 9.0, *) # define HAVE_MKNODAT_RUNTIME __builtin_available(macOS 13.0, iOS 16.0, tvOS 16.0, watchOS 9.0, *) +# define HAVE_PTSNAME_R_RUNTIME __builtin_available(macOS 10.13.4, iOS 11.3, tvOS 11.3, watchOS 4.3, *) # define HAVE_POSIX_SPAWN_SETSID_RUNTIME __builtin_available(macOS 10.15, *) @@ -206,6 +207,10 @@ # define HAVE_MKNODAT_RUNTIME (mknodat != NULL) # endif +# ifdef HAVE_PTSNAME_R +# define HAVE_PTSNAME_R_RUNTIME (ptsname_r != NULL) +# endif + #endif #ifdef HAVE_FUTIMESAT @@ -231,6 +236,7 @@ # define HAVE_PWRITEV_RUNTIME 1 # define HAVE_MKFIFOAT_RUNTIME 1 # define HAVE_MKNODAT_RUNTIME 1 +# define HAVE_PTSNAME_R_RUNTIME 1 #endif @@ -8635,6 +8641,19 @@ os_unlockpt_impl(PyObject *module, int fd) #endif /* HAVE_UNLOCKPT */ #if defined(HAVE_PTSNAME) || defined(HAVE_PTSNAME_R) +static PyObject * +py_ptsname(int fd) +{ + // POSIX manpage: Upon failure, ptsname() shall return a null pointer + // and may set errno. Always initialize errno to avoid undefined behavior. + errno = 0; + char *name = ptsname(fd); + if (name == NULL) { + return posix_error(); + } + return PyUnicode_DecodeFSDefault(name); +} + /*[clinic input] os.ptsname @@ -8656,22 +8675,22 @@ os_ptsname_impl(PyObject *module, int fd) int ret; char name[MAXPATHLEN+1]; - ret = ptsname_r(fd, name, sizeof(name)); + if (HAVE_PTSNAME_R_RUNTIME) { + ret = ptsname_r(fd, name, sizeof(name)); + } + else { + // fallback to ptsname() if ptsname_r() is not available in runtime. + return py_ptsname(fd); + } if (ret != 0) { errno = ret; return posix_error(); } -#else - char *name; - - name = ptsname(fd); - /* POSIX manpage: Upon failure, ptsname() shall return a null pointer and may set errno. - *MAY* set errno? Hmm... */ - if (name == NULL) - return posix_error(); -#endif /* HAVE_PTSNAME_R */ return PyUnicode_DecodeFSDefault(name); +#else + return py_ptsname(fd); +#endif /* HAVE_PTSNAME_R */ } #endif /* defined(HAVE_PTSNAME) || defined(HAVE_PTSNAME_R) */ @@ -10102,7 +10121,10 @@ os_wait_impl(PyObject *module) } #endif /* HAVE_WAIT */ -#if defined(__linux__) && defined(__NR_pidfd_open) + +// This system call always crashes on older Android versions. +#if defined(__linux__) && defined(__NR_pidfd_open) && \ + !(defined(__ANDROID__) && __ANDROID_API__ < 31) /*[clinic input] os.pidfd_open pid: pid_t @@ -10585,12 +10607,12 @@ Return a collection containing process timing information. The object returned behaves like a named tuple with these fields: (utime, stime, cutime, cstime, elapsed_time) -All fields are floating point numbers. +All fields are floating-point numbers. [clinic start generated code]*/ static PyObject * os_times_impl(PyObject *module) -/*[clinic end generated code: output=35f640503557d32a input=2bf9df3d6ab2e48b]*/ +/*[clinic end generated code: output=35f640503557d32a input=8dbfe33a2dcc3df3]*/ { #ifdef MS_WINDOWS FILETIME create, exit, kernel, user; @@ -17736,6 +17758,9 @@ PROBE(probe_futimens, HAVE_FUTIMENS_RUNTIME) PROBE(probe_utimensat, HAVE_UTIMENSAT_RUNTIME) #endif +#ifdef HAVE_PTSNAME_R +PROBE(probe_ptsname_r, HAVE_PTSNAME_R_RUNTIME) +#endif @@ -17876,6 +17901,10 @@ static const struct have_function { { "HAVE_UTIMENSAT", probe_utimensat }, #endif +#ifdef HAVE_PTSNAME_R + { "HAVE_PTSNAME_R", probe_ptsname_r }, +#endif + #ifdef MS_WINDOWS { "MS_WINDOWS", NULL }, #endif diff --git a/Modules/rotatingtree.c b/Modules/rotatingtree.c index 217e495b3d2..5910e25bed6 100644 --- a/Modules/rotatingtree.c +++ b/Modules/rotatingtree.c @@ -1,9 +1,4 @@ -#ifndef Py_BUILD_CORE_BUILTIN -# define Py_BUILD_CORE_MODULE 1 -#endif - #include "Python.h" -#include "pycore_lock.h" #include "rotatingtree.h" #define KEY_LOWER_THAN(key1, key2) ((char*)(key1) < (char*)(key2)) diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c index 3eaee22c652..5bd9b7732a4 100644 --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -262,7 +262,7 @@ A file descriptor is either a socket or file object, or a small integer gotten from a fileno() method call on one of those. The optional 4th argument specifies a timeout in seconds; it may be -a floating point number to specify fractions of seconds. If it is absent +a floating-point number to specify fractions of seconds. If it is absent or None, the call will never time out. The return value is a tuple of three lists corresponding to the first three @@ -277,7 +277,7 @@ descriptors can be used. static PyObject * select_select_impl(PyObject *module, PyObject *rlist, PyObject *wlist, PyObject *xlist, PyObject *timeout_obj) -/*[clinic end generated code: output=2b3cfa824f7ae4cf input=e467f5d68033de00]*/ +/*[clinic end generated code: output=2b3cfa824f7ae4cf input=1199d5e101abca4a]*/ { #ifdef SELECT_USES_HEAP pylist *rfd2obj, *wfd2obj, *efd2obj; @@ -473,6 +473,7 @@ update_ufd_array(pollObject *self) } /*[clinic input] +@critical_section select.poll.register fd: fildes @@ -486,7 +487,7 @@ Register a file descriptor with the polling object. static PyObject * select_poll_register_impl(pollObject *self, int fd, unsigned short eventmask) -/*[clinic end generated code: output=0dc7173c800a4a65 input=34e16cfb28d3c900]*/ +/*[clinic end generated code: output=0dc7173c800a4a65 input=c475e029ce6c2830]*/ { PyObject *key, *value; int err; @@ -514,6 +515,7 @@ select_poll_register_impl(pollObject *self, int fd, unsigned short eventmask) /*[clinic input] +@critical_section select.poll.modify fd: fildes @@ -528,7 +530,7 @@ Modify an already registered file descriptor. static PyObject * select_poll_modify_impl(pollObject *self, int fd, unsigned short eventmask) -/*[clinic end generated code: output=1a7b88bf079eff17 input=a8e383df075c32cf]*/ +/*[clinic end generated code: output=1a7b88bf079eff17 input=38c9db5346711872]*/ { PyObject *key, *value; int err; @@ -566,6 +568,7 @@ select_poll_modify_impl(pollObject *self, int fd, unsigned short eventmask) /*[clinic input] +@critical_section select.poll.unregister fd: fildes @@ -576,7 +579,7 @@ Remove a file descriptor being tracked by the polling object. static PyObject * select_poll_unregister_impl(pollObject *self, int fd) -/*[clinic end generated code: output=8c9f42e75e7d291b input=4b4fccc1040e79cb]*/ +/*[clinic end generated code: output=8c9f42e75e7d291b input=ae6315d7f5243704]*/ { PyObject *key; @@ -599,6 +602,7 @@ select_poll_unregister_impl(pollObject *self, int fd) } /*[clinic input] +@critical_section select.poll.poll timeout as timeout_obj: object = None @@ -614,7 +618,7 @@ report, as a list of (fd, event) 2-tuples. static PyObject * select_poll_poll_impl(pollObject *self, PyObject *timeout_obj) -/*[clinic end generated code: output=876e837d193ed7e4 input=c2f6953ec45e5622]*/ +/*[clinic end generated code: output=876e837d193ed7e4 input=54310631457efdec]*/ { PyObject *result_list = NULL; int poll_result, i, j; @@ -857,6 +861,7 @@ internal_devpoll_register(devpollObject *self, int fd, } /*[clinic input] +@critical_section select.devpoll.register fd: fildes @@ -872,12 +877,13 @@ Register a file descriptor with the polling object. static PyObject * select_devpoll_register_impl(devpollObject *self, int fd, unsigned short eventmask) -/*[clinic end generated code: output=6e07fe8b74abba0c input=22006fabe9567522]*/ +/*[clinic end generated code: output=6e07fe8b74abba0c input=8d48bd2653a61c42]*/ { return internal_devpoll_register(self, fd, eventmask, 0); } /*[clinic input] +@critical_section select.devpoll.modify fd: fildes @@ -893,12 +899,13 @@ Modify a possible already registered file descriptor. static PyObject * select_devpoll_modify_impl(devpollObject *self, int fd, unsigned short eventmask) -/*[clinic end generated code: output=bc2e6d23aaff98b4 input=09fa335db7cdc09e]*/ +/*[clinic end generated code: output=bc2e6d23aaff98b4 input=773b37e9abca2460]*/ { return internal_devpoll_register(self, fd, eventmask, 1); } /*[clinic input] +@critical_section select.devpoll.unregister fd: fildes @@ -909,7 +916,7 @@ Remove a file descriptor being tracked by the polling object. static PyObject * select_devpoll_unregister_impl(devpollObject *self, int fd) -/*[clinic end generated code: output=95519ffa0c7d43fe input=b4ea42a4442fd467]*/ +/*[clinic end generated code: output=95519ffa0c7d43fe input=6052d368368d4d05]*/ { if (self->fd_devpoll < 0) return devpoll_err_closed(); @@ -926,6 +933,7 @@ select_devpoll_unregister_impl(devpollObject *self, int fd) } /*[clinic input] +@critical_section select.devpoll.poll timeout as timeout_obj: object = None The maximum time to wait in milliseconds, or else None (or a negative @@ -940,7 +948,7 @@ report, as a list of (fd, event) 2-tuples. static PyObject * select_devpoll_poll_impl(devpollObject *self, PyObject *timeout_obj) -/*[clinic end generated code: output=2654e5457cca0b3c input=3c3f0a355ec2bedb]*/ +/*[clinic end generated code: output=2654e5457cca0b3c input=fe7a3f6dcbc118c5]*/ { struct dvpoll dvp; PyObject *result_list = NULL; @@ -1059,6 +1067,7 @@ devpoll_internal_close(devpollObject *self) } /*[clinic input] +@critical_section select.devpoll.close Close the devpoll file descriptor. @@ -1068,7 +1077,7 @@ Further operations on the devpoll object will raise an exception. static PyObject * select_devpoll_close_impl(devpollObject *self) -/*[clinic end generated code: output=26b355bd6429f21b input=6273c30f5560a99b]*/ +/*[clinic end generated code: output=26b355bd6429f21b input=408fde21a377ccfb]*/ { errno = devpoll_internal_close(self); if (errno < 0) { @@ -1088,6 +1097,7 @@ devpoll_get_closed(devpollObject *self, void *Py_UNUSED(ignored)) } /*[clinic input] +@critical_section select.devpoll.fileno Return the file descriptor. @@ -1095,7 +1105,7 @@ Return the file descriptor. static PyObject * select_devpoll_fileno_impl(devpollObject *self) -/*[clinic end generated code: output=26920929f8d292f4 input=ef15331ebde6c368]*/ +/*[clinic end generated code: output=26920929f8d292f4 input=8c9db2efa1ade538]*/ { if (self->fd_devpoll < 0) return devpoll_err_closed(); @@ -1378,6 +1388,7 @@ pyepoll_dealloc(pyEpoll_Object *self) } /*[clinic input] +@critical_section select.epoll.close Close the epoll control file descriptor. @@ -1387,7 +1398,7 @@ Further operations on the epoll object will raise an exception. static PyObject * select_epoll_close_impl(pyEpoll_Object *self) -/*[clinic end generated code: output=ee2144c446a1a435 input=ca6c66ba5a736bfd]*/ +/*[clinic end generated code: output=ee2144c446a1a435 input=f626a769192e1dbe]*/ { errno = pyepoll_internal_close(self); if (errno < 0) { @@ -2023,10 +2034,8 @@ kqueue_tracking_init(PyObject *module) { } static int -kqueue_tracking_add(_selectstate *state, kqueue_queue_Object *self) { - if (!state->kqueue_tracking_initialized) { - kqueue_tracking_init(PyType_GetModule(Py_TYPE(self))); - } +kqueue_tracking_add_lock_held(_selectstate *state, kqueue_queue_Object *self) +{ assert(self->kqfd >= 0); _kqueue_list_item *item = PyMem_New(_kqueue_list_item, 1); if (item == NULL) { @@ -2039,8 +2048,23 @@ kqueue_tracking_add(_selectstate *state, kqueue_queue_Object *self) { return 0; } +static int +kqueue_tracking_add(_selectstate *state, kqueue_queue_Object *self) +{ + int ret; + PyObject *module = PyType_GetModule(Py_TYPE(self)); + Py_BEGIN_CRITICAL_SECTION(module); + if (!state->kqueue_tracking_initialized) { + kqueue_tracking_init(module); + } + ret = kqueue_tracking_add_lock_held(state, self); + Py_END_CRITICAL_SECTION(); + return ret; +} + static void -kqueue_tracking_remove(_selectstate *state, kqueue_queue_Object *self) { +kqueue_tracking_remove_lock_held(_selectstate *state, kqueue_queue_Object *self) +{ _kqueue_list *listptr = &state->kqueue_open_list; while (*listptr != NULL) { _kqueue_list_item *item = *listptr; @@ -2056,6 +2080,14 @@ kqueue_tracking_remove(_selectstate *state, kqueue_queue_Object *self) { assert(0); } +static void +kqueue_tracking_remove(_selectstate *state, kqueue_queue_Object *self) +{ + Py_BEGIN_CRITICAL_SECTION(PyType_GetModule(Py_TYPE(self))); + kqueue_tracking_remove_lock_held(state, self); + Py_END_CRITICAL_SECTION(); +} + static int kqueue_queue_internal_close(kqueue_queue_Object *self) { @@ -2150,6 +2182,7 @@ kqueue_queue_finalize(kqueue_queue_Object *self) } /*[clinic input] +@critical_section select.kqueue.close Close the kqueue control file descriptor. @@ -2159,7 +2192,7 @@ Further operations on the kqueue object will raise an exception. static PyObject * select_kqueue_close_impl(kqueue_queue_Object *self) -/*[clinic end generated code: output=d1c7df0b407a4bc1 input=0b12d95430e0634c]*/ +/*[clinic end generated code: output=d1c7df0b407a4bc1 input=6d763c858b17b690]*/ { errno = kqueue_queue_internal_close(self); if (errno < 0) { diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c index 7de5ebe0899..0e53a36bca5 100644 --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -638,7 +638,7 @@ signal_strsignal_impl(PyObject *module, int signalnum) res = "Aborted"; break; case SIGFPE: - res = "Floating point exception"; + res = "Floating-point exception"; break; case SIGSEGV: res = "Segmentation fault"; @@ -1199,13 +1199,13 @@ signal.sigtimedwait Like sigwaitinfo(), but with a timeout. -The timeout is specified in seconds, with floating point numbers allowed. +The timeout is specified in seconds, with floating-point numbers allowed. [clinic start generated code]*/ static PyObject * signal_sigtimedwait_impl(PyObject *module, sigset_t sigset, PyObject *timeout_obj) -/*[clinic end generated code: output=59c8971e8ae18a64 input=87fd39237cf0b7ba]*/ +/*[clinic end generated code: output=59c8971e8ae18a64 input=955773219c1596cd]*/ { PyTime_t timeout; if (_PyTime_FromSecondsObject(&timeout, @@ -1299,7 +1299,9 @@ signal_pthread_kill_impl(PyObject *module, unsigned long thread_id, #endif /* #if defined(HAVE_PTHREAD_KILL) */ -#if defined(__linux__) && defined(__NR_pidfd_send_signal) +// This system call always crashes on older Android versions. +#if defined(__linux__) && defined(__NR_pidfd_send_signal) && \ + !(defined(__ANDROID__) && __ANDROID_API__ < 31) /*[clinic input] signal.pidfd_send_signal diff --git a/Modules/timemodule.c b/Modules/timemodule.c index ed2d32688ec..f74f7625b53 100644 --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -116,7 +116,7 @@ time_time(PyObject *self, PyObject *unused) PyDoc_STRVAR(time_doc, -"time() -> floating point number\n\ +"time() -> floating-point number\n\ \n\ Return the current time in seconds since the Epoch.\n\ Fractions of a second may be present if the system clock provides them."); @@ -350,7 +350,7 @@ time_clock_getres(PyObject *self, PyObject *args) } PyDoc_STRVAR(clock_getres_doc, -"clock_getres(clk_id) -> floating point number\n\ +"clock_getres(clk_id) -> floating-point number\n\ \n\ Return the resolution (precision) of the specified clock clk_id."); @@ -413,7 +413,7 @@ PyDoc_STRVAR(sleep_doc, "sleep(seconds)\n\ \n\ Delay execution for a given number of seconds. The argument may be\n\ -a floating point number for subsecond precision."); +a floating-point number for subsecond precision."); static PyStructSequence_Field struct_time_type_fields[] = { {"tm_year", "year, for example, 1993"}, @@ -813,7 +813,12 @@ time_strftime(PyObject *module, PyObject *args) return NULL; } -#if defined(_MSC_VER) || (defined(__sun) && defined(__SVR4)) || defined(_AIX) || defined(__VXWORKS__) +// Some platforms only support a limited range of years. +// +// Android works with negative years on the emulator, but fails on some +// physical devices (#123017). +#if defined(_MSC_VER) || (defined(__sun) && defined(__SVR4)) || defined(_AIX) \ + || defined(__VXWORKS__) || defined(__ANDROID__) if (buf.tm_year + 1900 < 1 || 9999 < buf.tm_year + 1900) { PyErr_SetString(PyExc_ValueError, "strftime() requires year in [1; 9999]"); @@ -1104,7 +1109,7 @@ time_mktime(PyObject *module, PyObject *tm_tuple) } PyDoc_STRVAR(mktime_doc, -"mktime(tuple) -> floating point number\n\ +"mktime(tuple) -> floating-point number\n\ \n\ Convert a time tuple in local time to seconds since the Epoch.\n\ Note that mktime(gmtime(0)) will not generally return zero for most\n\ @@ -1902,7 +1907,7 @@ PyDoc_STRVAR(module_doc, \n\ There are two standard representations of time. One is the number\n\ of seconds since the Epoch, in UTC (a.k.a. GMT). It may be an integer\n\ -or a floating point number (to represent fractions of seconds).\n\ +or a floating-point number (to represent fractions of seconds).\n\ The epoch is the point where the time starts, the return value of time.gmtime(0).\n\ It is January 1, 1970, 00:00:00 (UTC) on all platforms.\n\ \n\ diff --git a/Objects/boolobject.c b/Objects/boolobject.c index fb48dcbeca7..a88a8ad0cfd 100644 --- a/Objects/boolobject.c +++ b/Objects/boolobject.c @@ -71,8 +71,8 @@ static PyObject * bool_invert(PyObject *v) { if (PyErr_WarnEx(PyExc_DeprecationWarning, - "Bitwise inversion '~' on bool is deprecated. This " - "returns the bitwise inversion of the underlying int " + "Bitwise inversion '~' on bool is deprecated and will be removed in " + "Python 3.16. This returns the bitwise inversion of the underlying int " "object and is usually not what you expect from negating " "a bool. Use the 'not' operator for boolean negation or " "~int(x) if you really want the bitwise inversion of the " diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index cd799a926ae..b8bcef27cf1 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -46,7 +46,7 @@ Py_LOCAL_INLINE(Py_ssize_t) _PyBytesWriter_GetSize(_PyBytesWriter *writer, static inline PyObject* bytes_get_empty(void) { PyObject *empty = &EMPTY->ob_base.ob_base; - assert(_Py_IsImmortal(empty)); + assert(_Py_IsImmortalLoose(empty)); return empty; } @@ -119,7 +119,7 @@ PyBytes_FromStringAndSize(const char *str, Py_ssize_t size) } if (size == 1 && str != NULL) { op = CHARACTER(*str & 255); - assert(_Py_IsImmortal(op)); + assert(_Py_IsImmortalLoose(op)); return (PyObject *)op; } if (size == 0) { @@ -155,7 +155,7 @@ PyBytes_FromString(const char *str) } else if (size == 1) { op = CHARACTER(*str & 255); - assert(_Py_IsImmortal(op)); + assert(_Py_IsImmortalLoose(op)); return (PyObject *)op; } diff --git a/Objects/capsule.c b/Objects/capsule.c index 555979dab2b..28965e0f21b 100644 --- a/Objects/capsule.c +++ b/Objects/capsule.c @@ -317,10 +317,14 @@ static int capsule_traverse(PyCapsule *capsule, visitproc visit, void *arg) { // Capsule object is only tracked by the GC - // if _PyCapsule_SetTraverse() is called - assert(capsule->traverse_func != NULL); + // if _PyCapsule_SetTraverse() is called, but + // this can still be manually triggered by gc.get_referents() + + if (capsule->traverse_func != NULL) { + return capsule->traverse_func((PyObject*)capsule, visit, arg); + } - return capsule->traverse_func((PyObject*)capsule, visit, arg); + return 0; } diff --git a/Objects/clinic/floatobject.c.h b/Objects/clinic/floatobject.c.h index 10f6149cc88..d104b071890 100644 --- a/Objects/clinic/floatobject.c.h +++ b/Objects/clinic/floatobject.c.h @@ -197,7 +197,7 @@ PyDoc_STRVAR(float_new__doc__, "float(x=0, /)\n" "--\n" "\n" -"Convert a string or number to a floating point number, if possible."); +"Convert a string or number to a floating-point number, if possible."); static PyObject * float_new_impl(PyTypeObject *type, PyObject *x); @@ -256,7 +256,7 @@ PyDoc_STRVAR(float___getformat____doc__, "It exists mainly to be used in Python\'s test suite.\n" "\n" "This function returns whichever of \'unknown\', \'IEEE, big-endian\' or \'IEEE,\n" -"little-endian\' best describes the format of floating point numbers used by the\n" +"little-endian\' best describes the format of floating-point numbers used by the\n" "C type named by typestr."); #define FLOAT___GETFORMAT___METHODDEF \ @@ -318,4 +318,4 @@ float___format__(PyObject *self, PyObject *arg) exit: return return_value; } -/*[clinic end generated code: output=c79743c8551c30d9 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d8bbcd83977d516f input=a9049054013a1b77]*/ diff --git a/Objects/codeobject.c b/Objects/codeobject.c index 7b1244a8d5f..fbc1439d30c 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -147,7 +147,7 @@ intern_strings(PyObject *tuple) "non-string found in code slot"); return -1; } - _PyUnicode_InternMortal(interp, &_PyTuple_ITEMS(tuple)[i]); + _PyUnicode_InternImmortal(interp, &_PyTuple_ITEMS(tuple)[i]); } return 0; } diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 27da631a574..a5a5bce3965 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -154,22 +154,30 @@ ASSERT_DICT_LOCKED(PyObject *op) _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(op); } #define ASSERT_DICT_LOCKED(op) ASSERT_DICT_LOCKED(_Py_CAST(PyObject*, op)) +#define ASSERT_WORLD_STOPPED_OR_DICT_LOCKED(op) \ + if (!_PyInterpreterState_GET()->stoptheworld.world_stopped) { \ + ASSERT_DICT_LOCKED(op); \ + } +#define ASSERT_WORLD_STOPPED_OR_OBJ_LOCKED(op) \ + if (!_PyInterpreterState_GET()->stoptheworld.world_stopped) { \ + _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(op); \ + } + #define IS_DICT_SHARED(mp) _PyObject_GC_IS_SHARED(mp) #define SET_DICT_SHARED(mp) _PyObject_GC_SET_SHARED(mp) #define LOAD_INDEX(keys, size, idx) _Py_atomic_load_int##size##_relaxed(&((const int##size##_t*)keys->dk_indices)[idx]); #define STORE_INDEX(keys, size, idx, value) _Py_atomic_store_int##size##_relaxed(&((int##size##_t*)keys->dk_indices)[idx], (int##size##_t)value); #define ASSERT_OWNED_OR_SHARED(mp) \ assert(_Py_IsOwnedByCurrentThread((PyObject *)mp) || IS_DICT_SHARED(mp)); -#define LOAD_KEYS_NENTRIES(d) #define LOCK_KEYS_IF_SPLIT(keys, kind) \ if (kind == DICT_KEYS_SPLIT) { \ - LOCK_KEYS(dk); \ + LOCK_KEYS(keys); \ } #define UNLOCK_KEYS_IF_SPLIT(keys, kind) \ if (kind == DICT_KEYS_SPLIT) { \ - UNLOCK_KEYS(dk); \ + UNLOCK_KEYS(keys); \ } static inline Py_ssize_t @@ -203,7 +211,7 @@ set_values(PyDictObject *mp, PyDictValues *values) #define INCREF_KEYS(dk) _Py_atomic_add_ssize(&dk->dk_refcnt, 1) // Dec refs the keys object, giving the previous value #define DECREF_KEYS(dk) _Py_atomic_add_ssize(&dk->dk_refcnt, -1) -#define LOAD_KEYS_NENTIRES(keys) _Py_atomic_load_ssize_relaxed(&keys->dk_nentries) +#define LOAD_KEYS_NENTRIES(keys) _Py_atomic_load_ssize_relaxed(&keys->dk_nentries) #define INCREF_KEYS_FT(dk) dictkeys_incref(dk) #define DECREF_KEYS_FT(dk, shared) dictkeys_decref(_PyInterpreterState_GET(), dk, shared) @@ -221,6 +229,8 @@ static inline void split_keys_entry_added(PyDictKeysObject *keys) #else /* Py_GIL_DISABLED */ #define ASSERT_DICT_LOCKED(op) +#define ASSERT_WORLD_STOPPED_OR_DICT_LOCKED(op) +#define ASSERT_WORLD_STOPPED_OR_OBJ_LOCKED(op) #define LOCK_KEYS(keys) #define UNLOCK_KEYS(keys) #define ASSERT_KEYS_LOCKED(keys) @@ -228,7 +238,7 @@ static inline void split_keys_entry_added(PyDictKeysObject *keys) #define STORE_SHARED_KEY(key, value) key = value #define INCREF_KEYS(dk) dk->dk_refcnt++ #define DECREF_KEYS(dk) dk->dk_refcnt-- -#define LOAD_KEYS_NENTIRES(keys) keys->dk_nentries +#define LOAD_KEYS_NENTRIES(keys) keys->dk_nentries #define INCREF_KEYS_FT(dk) #define DECREF_KEYS_FT(dk, shared) #define LOCK_KEYS_IF_SPLIT(keys, kind) @@ -427,7 +437,7 @@ static inline Py_hash_t unicode_get_hash(PyObject *o) { assert(PyUnicode_CheckExact(o)); - return _PyASCIIObject_CAST(o)->hash; + return FT_ATOMIC_LOAD_SSIZE_RELAXED(_PyASCIIObject_CAST(o)->hash); } /* Print summary info about the state of the optimized allocator */ @@ -473,7 +483,7 @@ dictkeys_decref(PyInterpreterState *interp, PyDictKeysObject *dk, bool use_qsbr) if (FT_ATOMIC_LOAD_SSIZE_RELAXED(dk->dk_refcnt) == _Py_IMMORTAL_REFCNT) { return; } - assert(dk->dk_refcnt > 0); + assert(FT_ATOMIC_LOAD_SSIZE(dk->dk_refcnt) > 0); #ifdef Py_REF_DEBUG _Py_DecRefTotal(_PyThreadState_GET()); #endif @@ -670,6 +680,8 @@ dump_entries(PyDictKeysObject *dk) int _PyDict_CheckConsistency(PyObject *op, int check_content) { + ASSERT_WORLD_STOPPED_OR_DICT_LOCKED(op); + #define CHECK(expr) \ do { if (!(expr)) { _PyObject_ASSERT_FAILED_MSG(op, Py_STRINGIFY(expr)); } } while (0) @@ -681,10 +693,15 @@ _PyDict_CheckConsistency(PyObject *op, int check_content) int splitted = _PyDict_HasSplitTable(mp); Py_ssize_t usable = USABLE_FRACTION(DK_SIZE(keys)); + // In the free-threaded build, shared keys may be concurrently modified, + // so use atomic loads. + Py_ssize_t dk_usable = FT_ATOMIC_LOAD_SSIZE_ACQUIRE(keys->dk_usable); + Py_ssize_t dk_nentries = FT_ATOMIC_LOAD_SSIZE_ACQUIRE(keys->dk_nentries); + CHECK(0 <= mp->ma_used && mp->ma_used <= usable); - CHECK(0 <= keys->dk_usable && keys->dk_usable <= usable); - CHECK(0 <= keys->dk_nentries && keys->dk_nentries <= usable); - CHECK(keys->dk_usable + keys->dk_nentries <= usable); + CHECK(0 <= dk_usable && dk_usable <= usable); + CHECK(0 <= dk_nentries && dk_nentries <= usable); + CHECK(dk_usable + dk_nentries <= usable); if (!splitted) { /* combined table */ @@ -701,6 +718,7 @@ _PyDict_CheckConsistency(PyObject *op, int check_content) } if (check_content) { + LOCK_KEYS_IF_SPLIT(keys, keys->dk_kind); for (Py_ssize_t i=0; i < DK_SIZE(keys); i++) { Py_ssize_t ix = dictkeys_get_index(keys, i); CHECK(DKIX_DUMMY <= ix && ix <= usable); @@ -756,6 +774,7 @@ _PyDict_CheckConsistency(PyObject *op, int check_content) CHECK(mp->ma_values->values[index] != NULL); } } + UNLOCK_KEYS_IF_SPLIT(keys, keys->dk_kind); } return 1; @@ -1031,7 +1050,7 @@ lookdict_index(PyDictKeysObject *k, Py_hash_t hash, Py_ssize_t index) static inline Py_ALWAYS_INLINE Py_ssize_t do_lookup(PyDictObject *mp, PyDictKeysObject *dk, PyObject *key, Py_hash_t hash, - Py_ssize_t (*check_lookup)(PyDictObject *, PyDictKeysObject *, void *, Py_ssize_t ix, PyObject *key, Py_hash_t)) + int (*check_lookup)(PyDictObject *, PyDictKeysObject *, void *, Py_ssize_t ix, PyObject *key, Py_hash_t)) { void *ep0 = _DK_ENTRIES(dk); size_t mask = DK_MASK(dk); @@ -1041,7 +1060,7 @@ do_lookup(PyDictObject *mp, PyDictKeysObject *dk, PyObject *key, Py_hash_t hash, for (;;) { ix = dictkeys_get_index(dk, i); if (ix >= 0) { - Py_ssize_t cmp = check_lookup(mp, dk, ep0, ix, key, hash); + int cmp = check_lookup(mp, dk, ep0, ix, key, hash); if (cmp < 0) { return cmp; } else if (cmp) { @@ -1057,7 +1076,7 @@ do_lookup(PyDictObject *mp, PyDictKeysObject *dk, PyObject *key, Py_hash_t hash, // Manual loop unrolling ix = dictkeys_get_index(dk, i); if (ix >= 0) { - Py_ssize_t cmp = check_lookup(mp, dk, ep0, ix, key, hash); + int cmp = check_lookup(mp, dk, ep0, ix, key, hash); if (cmp < 0) { return cmp; } else if (cmp) { @@ -1073,7 +1092,7 @@ do_lookup(PyDictObject *mp, PyDictKeysObject *dk, PyObject *key, Py_hash_t hash, Py_UNREACHABLE(); } -static inline Py_ALWAYS_INLINE Py_ssize_t +static inline int compare_unicode_generic(PyDictObject *mp, PyDictKeysObject *dk, void *ep0, Py_ssize_t ix, PyObject *key, Py_hash_t hash) { @@ -1108,7 +1127,7 @@ unicodekeys_lookup_generic(PyDictObject *mp, PyDictKeysObject* dk, PyObject *key return do_lookup(mp, dk, key, hash, compare_unicode_generic); } -static inline Py_ALWAYS_INLINE Py_ssize_t +static inline int compare_unicode_unicode(PyDictObject *mp, PyDictKeysObject *dk, void *ep0, Py_ssize_t ix, PyObject *key, Py_hash_t hash) { @@ -1129,7 +1148,7 @@ unicodekeys_lookup_unicode(PyDictKeysObject* dk, PyObject *key, Py_hash_t hash) return do_lookup(NULL, dk, key, hash, compare_unicode_unicode); } -static inline Py_ALWAYS_INLINE Py_ssize_t +static inline int compare_generic(PyDictObject *mp, PyDictKeysObject *dk, void *ep0, Py_ssize_t ix, PyObject *key, Py_hash_t hash) { @@ -1324,8 +1343,8 @@ ensure_shared_on_resize(PyDictObject *mp) #ifdef Py_GIL_DISABLED -static inline Py_ALWAYS_INLINE -Py_ssize_t compare_unicode_generic_threadsafe(PyDictObject *mp, PyDictKeysObject *dk, +static inline Py_ALWAYS_INLINE int +compare_unicode_generic_threadsafe(PyDictObject *mp, PyDictKeysObject *dk, void *ep0, Py_ssize_t ix, PyObject *key, Py_hash_t hash) { PyDictUnicodeEntry *ep = &((PyDictUnicodeEntry *)ep0)[ix]; @@ -1367,7 +1386,7 @@ unicodekeys_lookup_generic_threadsafe(PyDictObject *mp, PyDictKeysObject* dk, Py return do_lookup(mp, dk, key, hash, compare_unicode_generic_threadsafe); } -static inline Py_ALWAYS_INLINE Py_ssize_t +static inline Py_ALWAYS_INLINE int compare_unicode_unicode_threadsafe(PyDictObject *mp, PyDictKeysObject *dk, void *ep0, Py_ssize_t ix, PyObject *key, Py_hash_t hash) { @@ -1401,8 +1420,8 @@ unicodekeys_lookup_unicode_threadsafe(PyDictKeysObject* dk, PyObject *key, Py_ha return do_lookup(NULL, dk, key, hash, compare_unicode_unicode_threadsafe); } -static inline Py_ALWAYS_INLINE -Py_ssize_t compare_generic_threadsafe(PyDictObject *mp, PyDictKeysObject *dk, +static inline Py_ALWAYS_INLINE int +compare_generic_threadsafe(PyDictObject *mp, PyDictKeysObject *dk, void *ep0, Py_ssize_t ix, PyObject *key, Py_hash_t hash) { PyDictKeyEntry *ep = &((PyDictKeyEntry *)ep0)[ix]; @@ -1530,7 +1549,7 @@ _Py_dict_lookup_threadsafe(PyDictObject *mp, PyObject *key, Py_hash_t hash, PyOb *value_addr = value; if (value != NULL) { assert(ix >= 0); - Py_INCREF(value); + _Py_NewRefWithLock(value); } Py_END_CRITICAL_SECTION(); return ix; @@ -1580,6 +1599,8 @@ _PyDict_MaybeUntrack(PyObject *op) PyObject *value; Py_ssize_t i, numentries; + ASSERT_WORLD_STOPPED_OR_DICT_LOCKED(op); + if (!PyDict_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op)) return; @@ -1665,6 +1686,10 @@ insert_combined_dict(PyInterpreterState *interp, PyDictObject *mp, } } + uint64_t new_version = _PyDict_NotifyEvent( + interp, PyDict_EVENT_ADDED, mp, key, value); + mp->ma_keys->dk_version = 0; + Py_ssize_t hashpos = find_empty_slot(mp->ma_keys, hash); dictkeys_set_index(mp->ma_keys, hashpos, mp->ma_keys->dk_nentries); @@ -1681,6 +1706,7 @@ insert_combined_dict(PyInterpreterState *interp, PyDictObject *mp, STORE_VALUE(ep, value); STORE_HASH(ep, hash); } + mp->ma_version_tag = new_version; STORE_KEYS_USABLE(mp->ma_keys, mp->ma_keys->dk_usable - 1); STORE_KEYS_NENTRIES(mp->ma_keys, mp->ma_keys->dk_nentries + 1); assert(mp->ma_keys->dk_usable >= 0); @@ -1722,19 +1748,22 @@ static void insert_split_value(PyInterpreterState *interp, PyDictObject *mp, PyObject *key, PyObject *value, Py_ssize_t ix) { assert(PyUnicode_CheckExact(key)); + ASSERT_DICT_LOCKED(mp); MAINTAIN_TRACKING(mp, key, value); PyObject *old_value = mp->ma_values->values[ix]; if (old_value == NULL) { uint64_t new_version = _PyDict_NotifyEvent(interp, PyDict_EVENT_ADDED, mp, key, value); STORE_SPLIT_VALUE(mp, ix, Py_NewRef(value)); _PyDictValues_AddToInsertionOrder(mp->ma_values, ix); - mp->ma_used++; + STORE_USED(mp, mp->ma_used + 1); mp->ma_version_tag = new_version; } else { uint64_t new_version = _PyDict_NotifyEvent(interp, PyDict_EVENT_MODIFIED, mp, key, value); STORE_SPLIT_VALUE(mp, ix, Py_NewRef(value)); mp->ma_version_tag = new_version; + // old_value should be DECREFed after GC track checking is done, if not, it could raise a segmentation fault, + // when dict only holds the strong reference to value in ep->me_value. Py_DECREF(old_value); } ASSERT_CONSISTENT(mp); @@ -1783,16 +1812,12 @@ insertdict(PyInterpreterState *interp, PyDictObject *mp, if (ix == DKIX_EMPTY) { assert(!_PyDict_HasSplitTable(mp)); - uint64_t new_version = _PyDict_NotifyEvent( - interp, PyDict_EVENT_ADDED, mp, key, value); /* Insert into new slot. */ - mp->ma_keys->dk_version = 0; assert(old_value == NULL); if (insert_combined_dict(interp, mp, hash, key, value) < 0) { goto Fail; } - mp->ma_version_tag = new_version; - mp->ma_used++; + STORE_USED(mp, mp->ma_used + 1); ASSERT_CONSISTENT(mp); return 0; } @@ -1832,9 +1857,6 @@ insert_to_emptydict(PyInterpreterState *interp, PyDictObject *mp, assert(mp->ma_keys == Py_EMPTY_KEYS); ASSERT_DICT_LOCKED(mp); - uint64_t new_version = _PyDict_NotifyEvent( - interp, PyDict_EVENT_ADDED, mp, key, value); - int unicode = PyUnicode_CheckExact(key); PyDictKeysObject *newkeys = new_keys_object( interp, PyDict_LOG_MINSIZE, unicode); @@ -1843,6 +1865,9 @@ insert_to_emptydict(PyInterpreterState *interp, PyDictObject *mp, Py_DECREF(value); return -1; } + uint64_t new_version = _PyDict_NotifyEvent( + interp, PyDict_EVENT_ADDED, mp, key, value); + /* We don't decref Py_EMPTY_KEYS here because it is immortal. */ assert(mp->ma_values == NULL); @@ -1861,7 +1886,7 @@ insert_to_emptydict(PyInterpreterState *interp, PyDictObject *mp, ep->me_hash = hash; STORE_VALUE(ep, value); } - FT_ATOMIC_STORE_SSIZE_RELAXED(mp->ma_used, FT_ATOMIC_LOAD_SSIZE_RELAXED(mp->ma_used) + 1); + STORE_USED(mp, mp->ma_used + 1); mp->ma_version_tag = new_version; newkeys->dk_usable--; newkeys->dk_nentries++; @@ -1870,11 +1895,7 @@ insert_to_emptydict(PyInterpreterState *interp, PyDictObject *mp, // the case where we're inserting from the non-owner thread. We don't use // set_keys here because the transition from empty to non-empty is safe // as the empty keys will never be freed. -#ifdef Py_GIL_DISABLED - _Py_atomic_store_ptr_release(&mp->ma_keys, newkeys); -#else - mp->ma_keys = newkeys; -#endif + FT_ATOMIC_STORE_PTR_RELEASE(mp->ma_keys, newkeys); return 0; } @@ -2170,13 +2191,10 @@ dict_getitem(PyObject *op, PyObject *key, const char *warnmsg) } PyDictObject *mp = (PyDictObject *)op; - Py_hash_t hash; - if (!PyUnicode_CheckExact(key) || (hash = unicode_get_hash(key)) == -1) { - hash = PyObject_Hash(key); - if (hash == -1) { - PyErr_FormatUnraisable(warnmsg); - return NULL; - } + Py_hash_t hash = _PyObject_HashFast(key); + if (hash == -1) { + PyErr_FormatUnraisable(warnmsg); + return NULL; } PyThreadState *tstate = _PyThreadState_GET(); @@ -2225,12 +2243,9 @@ _PyDict_LookupIndex(PyDictObject *mp, PyObject *key) assert(PyDict_CheckExact((PyObject*)mp)); assert(PyUnicode_CheckExact(key)); - Py_hash_t hash = unicode_get_hash(key); + Py_hash_t hash = _PyObject_HashFast(key); if (hash == -1) { - hash = PyObject_Hash(key); - if (hash == -1) { - return -1; - } + return -1; } return _Py_dict_lookup(mp, key, hash, &value); @@ -2262,6 +2277,29 @@ _PyDict_GetItem_KnownHash(PyObject *op, PyObject *key, Py_hash_t hash) return value; // borrowed reference } +/* Gets an item and provides a new reference if the value is present. + * Returns 1 if the key is present, 0 if the key is missing, and -1 if an + * exception occurred. +*/ +int +_PyDict_GetItemRef_KnownHash_LockHeld(PyDictObject *op, PyObject *key, + Py_hash_t hash, PyObject **result) +{ + PyObject *value; + Py_ssize_t ix = _Py_dict_lookup(op, key, hash, &value); + assert(ix >= 0 || value == NULL); + if (ix == DKIX_ERROR) { + *result = NULL; + return -1; + } + if (value == NULL) { + *result = NULL; + return 0; // missing key + } + *result = Py_NewRef(value); + return 1; // key is present +} + /* Gets an item and provides a new reference if the value is present. * Returns 1 if the key is present, 0 if the key is missing, and -1 if an * exception occurred. @@ -2301,14 +2339,10 @@ PyDict_GetItemRef(PyObject *op, PyObject *key, PyObject **result) return -1; } - Py_hash_t hash; - if (!PyUnicode_CheckExact(key) || (hash = unicode_get_hash(key)) == -1) - { - hash = PyObject_Hash(key); - if (hash == -1) { - *result = NULL; - return -1; - } + Py_hash_t hash = _PyObject_HashFast(key); + if (hash == -1) { + *result = NULL; + return -1; } return _PyDict_GetItemRef_KnownHash((PyDictObject *)op, key, hash, result); @@ -2320,13 +2354,10 @@ _PyDict_GetItemRef_Unicode_LockHeld(PyDictObject *op, PyObject *key, PyObject ** ASSERT_DICT_LOCKED(op); assert(PyUnicode_CheckExact(key)); - Py_hash_t hash; - if ((hash = unicode_get_hash(key)) == -1) { - hash = PyObject_Hash(key); - if (hash == -1) { - *result = NULL; - return -1; - } + Py_hash_t hash = _PyObject_HashFast(key); + if (hash == -1) { + *result = NULL; + return -1; } PyObject *value; @@ -2360,12 +2391,9 @@ PyDict_GetItemWithError(PyObject *op, PyObject *key) PyErr_BadInternalCall(); return NULL; } - if (!PyUnicode_CheckExact(key) || (hash = unicode_get_hash(key)) == -1) - { - hash = PyObject_Hash(key); - if (hash == -1) { - return NULL; - } + hash = _PyObject_HashFast(key); + if (hash == -1) { + return NULL; } #ifdef Py_GIL_DISABLED @@ -2433,10 +2461,9 @@ _PyDict_LoadGlobal(PyDictObject *globals, PyDictObject *builtins, PyObject *key) Py_hash_t hash; PyObject *value; - if (!PyUnicode_CheckExact(key) || (hash = unicode_get_hash(key)) == -1) { - hash = PyObject_Hash(key); - if (hash == -1) - return NULL; + hash = _PyObject_HashFast(key); + if (hash == -1) { + return NULL; } /* namespace 1: globals */ @@ -2461,14 +2488,11 @@ setitem_take2_lock_held(PyDictObject *mp, PyObject *key, PyObject *value) assert(key); assert(value); assert(PyDict_Check(mp)); - Py_hash_t hash; - if (!PyUnicode_CheckExact(key) || (hash = unicode_get_hash(key)) == -1) { - hash = PyObject_Hash(key); - if (hash == -1) { - Py_DECREF(key); - Py_DECREF(value); - return -1; - } + Py_hash_t hash = _PyObject_HashFast(key); + if (hash == -1) { + Py_DECREF(key); + Py_DECREF(value); + return -1; } PyInterpreterState *interp = _PyInterpreterState_GET(); @@ -2520,11 +2544,21 @@ setitem_lock_held(PyDictObject *mp, PyObject *key, PyObject *value) int -_PyDict_SetItem_KnownHash(PyObject *op, PyObject *key, PyObject *value, - Py_hash_t hash) +_PyDict_SetItem_KnownHash_LockHeld(PyDictObject *mp, PyObject *key, PyObject *value, + Py_hash_t hash) { - PyDictObject *mp; + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (mp->ma_keys == Py_EMPTY_KEYS) { + return insert_to_emptydict(interp, mp, Py_NewRef(key), hash, Py_NewRef(value)); + } + /* insertdict() handles any resizing that might be necessary */ + return insertdict(interp, mp, Py_NewRef(key), hash, Py_NewRef(value)); +} +int +_PyDict_SetItem_KnownHash(PyObject *op, PyObject *key, PyObject *value, + Py_hash_t hash) +{ if (!PyDict_Check(op)) { PyErr_BadInternalCall(); return -1; @@ -2532,21 +2566,10 @@ _PyDict_SetItem_KnownHash(PyObject *op, PyObject *key, PyObject *value, assert(key); assert(value); assert(hash != -1); - mp = (PyDictObject *)op; int res; - PyInterpreterState *interp = _PyInterpreterState_GET(); - - Py_BEGIN_CRITICAL_SECTION(mp); - - if (mp->ma_keys == Py_EMPTY_KEYS) { - res = insert_to_emptydict(interp, mp, Py_NewRef(key), hash, Py_NewRef(value)); - } - else { - /* insertdict() handles any resizing that might be necessary */ - res = insertdict(interp, mp, Py_NewRef(key), hash, Py_NewRef(value)); - } - + Py_BEGIN_CRITICAL_SECTION(op); + res = _PyDict_SetItem_KnownHash_LockHeld((PyDictObject *)op, key, value, hash); Py_END_CRITICAL_SECTION(); return res; } @@ -2569,7 +2592,7 @@ delete_index_from_values(PyDictValues *values, Py_ssize_t ix) values->size = size; } -static int +static void delitem_common(PyDictObject *mp, Py_hash_t hash, Py_ssize_t ix, PyObject *old_value, uint64_t new_version) { @@ -2580,7 +2603,7 @@ delitem_common(PyDictObject *mp, Py_hash_t hash, Py_ssize_t ix, Py_ssize_t hashpos = lookdict_index(mp->ma_keys, hash, ix); assert(hashpos >= 0); - FT_ATOMIC_STORE_SSIZE_RELAXED(mp->ma_used, FT_ATOMIC_LOAD_SSIZE(mp->ma_used) - 1); + STORE_USED(mp, mp->ma_used - 1); mp->ma_version_tag = new_version; if (_PyDict_HasSplitTable(mp)) { assert(old_value == mp->ma_values->values[ix]); @@ -2611,18 +2634,15 @@ delitem_common(PyDictObject *mp, Py_hash_t hash, Py_ssize_t ix, Py_DECREF(old_value); ASSERT_CONSISTENT(mp); - return 0; } int PyDict_DelItem(PyObject *op, PyObject *key) { - Py_hash_t hash; assert(key); - if (!PyUnicode_CheckExact(key) || (hash = unicode_get_hash(key)) == -1) { - hash = PyObject_Hash(key); - if (hash == -1) - return -1; + Py_hash_t hash = _PyObject_HashFast(key); + if (hash == -1) { + return -1; } return _PyDict_DelItem_KnownHash(op, key, hash); @@ -2656,7 +2676,8 @@ delitem_knownhash_lock_held(PyObject *op, PyObject *key, Py_hash_t hash) PyInterpreterState *interp = _PyInterpreterState_GET(); uint64_t new_version = _PyDict_NotifyEvent( interp, PyDict_EVENT_DELETED, mp, key, NULL); - return delitem_common(mp, hash, ix, old_value, new_version); + delitem_common(mp, hash, ix, old_value, new_version); + return 0; } int @@ -2671,7 +2692,8 @@ _PyDict_DelItem_KnownHash(PyObject *op, PyObject *key, Py_hash_t hash) static int delitemif_lock_held(PyObject *op, PyObject *key, - int (*predicate)(PyObject *value)) + int (*predicate)(PyObject *value, void *arg), + void *arg) { Py_ssize_t ix; PyDictObject *mp; @@ -2681,24 +2703,20 @@ delitemif_lock_held(PyObject *op, PyObject *key, ASSERT_DICT_LOCKED(op); - if (!PyDict_Check(op)) { - PyErr_BadInternalCall(); - return -1; - } assert(key); hash = PyObject_Hash(key); if (hash == -1) return -1; mp = (PyDictObject *)op; ix = _Py_dict_lookup(mp, key, hash, &old_value); - if (ix == DKIX_ERROR) + if (ix == DKIX_ERROR) { return -1; + } if (ix == DKIX_EMPTY || old_value == NULL) { - _PyErr_SetKeyError(key); - return -1; + return 0; } - res = predicate(old_value); + res = predicate(old_value, arg); if (res == -1) return -1; @@ -2706,7 +2724,8 @@ delitemif_lock_held(PyObject *op, PyObject *key, PyInterpreterState *interp = _PyInterpreterState_GET(); uint64_t new_version = _PyDict_NotifyEvent( interp, PyDict_EVENT_DELETED, mp, key, NULL); - return delitem_common(mp, hash, ix, old_value, new_version); + delitem_common(mp, hash, ix, old_value, new_version); + return 1; } else { return 0; } @@ -2718,11 +2737,13 @@ delitemif_lock_held(PyObject *op, PyObject *key, */ int _PyDict_DelItemIf(PyObject *op, PyObject *key, - int (*predicate)(PyObject *value)) + int (*predicate)(PyObject *value, void *arg), + void *arg) { + assert(PyDict_Check(op)); int res; Py_BEGIN_CRITICAL_SECTION(op); - res = delitemif_lock_held(op, key, predicate); + res = delitemif_lock_held(op, key, predicate, arg); Py_END_CRITICAL_SECTION(); return res; } @@ -2752,7 +2773,7 @@ clear_lock_held(PyObject *op) // We don't inc ref empty keys because they're immortal ensure_shared_on_resize(mp); mp->ma_version_tag = new_version; - mp->ma_used = 0; + STORE_USED(mp, 0); if (oldvalues == NULL) { set_keys(mp, Py_EMPTY_KEYS); assert(oldkeys->dk_refcnt == 1); @@ -2946,15 +2967,12 @@ pop_lock_held(PyObject *op, PyObject *key, PyObject **result) return 0; } - Py_hash_t hash; - if (!PyUnicode_CheckExact(key) || (hash = unicode_get_hash(key)) == -1) { - hash = PyObject_Hash(key); - if (hash == -1) { - if (result) { - *result = NULL; - } - return -1; + Py_hash_t hash = _PyObject_HashFast(key); + if (hash == -1) { + if (result) { + *result = NULL; } + return -1; } return _PyDict_Pop_KnownHash(dict, key, hash, result); } @@ -3185,6 +3203,8 @@ dict_repr_lock_held(PyObject *self) _PyUnicodeWriter writer; int first; + ASSERT_DICT_LOCKED(mp); + i = Py_ReprEnter((PyObject *)mp); if (i != 0) { return i > 0 ? PyUnicode_FromString("{...}") : NULL; @@ -3273,8 +3293,7 @@ dict_repr(PyObject *self) static Py_ssize_t dict_length(PyObject *self) { - PyDictObject *mp = (PyDictObject *)self; - return _Py_atomic_load_ssize_relaxed(&mp->ma_used); + return FT_ATOMIC_LOAD_SSIZE_RELAXED(((PyDictObject *)self)->ma_used); } static PyObject * @@ -3285,10 +3304,9 @@ dict_subscript(PyObject *self, PyObject *key) Py_hash_t hash; PyObject *value; - if (!PyUnicode_CheckExact(key) || (hash = unicode_get_hash(key)) == -1) { - hash = PyObject_Hash(key); - if (hash == -1) - return NULL; + hash = _PyObject_HashFast(key); + if (hash == -1) { + return NULL; } ix = _Py_dict_lookup_threadsafe(mp, key, hash, &value); if (ix == DKIX_ERROR) @@ -3666,6 +3684,9 @@ PyDict_MergeFromSeq2(PyObject *d, PyObject *seq2, int override) static int dict_dict_merge(PyInterpreterState *interp, PyDictObject *mp, PyDictObject *other, int override) { + ASSERT_DICT_LOCKED(mp); + ASSERT_DICT_LOCKED(other); + if (other == mp || other->ma_used == 0) /* a.update(a) or a.update({}); nothing to do */ return 0; @@ -3693,7 +3714,7 @@ dict_dict_merge(PyInterpreterState *interp, PyDictObject *mp, PyDictObject *othe ensure_shared_on_resize(mp); dictkeys_decref(interp, mp->ma_keys, IS_DICT_SHARED(mp)); mp->ma_keys = keys; - mp->ma_used = other->ma_used; + STORE_USED(mp, other->ma_used); mp->ma_version_tag = new_version; ASSERT_CONSISTENT(mp); @@ -3897,13 +3918,13 @@ dict_copy_impl(PyDictObject *self) } /* Copies the values, but does not change the reference - * counts of the objects in the array. */ + * counts of the objects in the array. + * Return NULL, but does *not* set an exception on failure */ static PyDictValues * copy_values(PyDictValues *values) { PyDictValues *newvalues = new_values(values->capacity); if (newvalues == NULL) { - PyErr_NoMemory(); return NULL; } newvalues->size = values->size; @@ -4028,7 +4049,7 @@ PyDict_Size(PyObject *mp) PyErr_BadInternalCall(); return -1; } - return ((PyDictObject *)mp)->ma_used; + return FT_ATOMIC_LOAD_SSIZE_RELAXED(((PyDictObject *)mp)->ma_used); } /* Return 1 if dicts equal, 0 if not, -1 if error. @@ -4047,7 +4068,7 @@ dict_equal_lock_held(PyDictObject *a, PyDictObject *b) /* can't be equal if # of entries differ */ return 0; /* Same # of entries -- check all of 'em. Exit early on any diff. */ - for (i = 0; i < LOAD_KEYS_NENTIRES(a->ma_keys); i++) { + for (i = 0; i < LOAD_KEYS_NENTRIES(a->ma_keys); i++) { PyObject *key, *aval; Py_hash_t hash; if (DK_IS_UNICODE(a->ma_keys)) { @@ -4172,10 +4193,9 @@ dict_get_impl(PyDictObject *self, PyObject *key, PyObject *default_value) Py_hash_t hash; Py_ssize_t ix; - if (!PyUnicode_CheckExact(key) || (hash = unicode_get_hash(key)) == -1) { - hash = PyObject_Hash(key); - if (hash == -1) - return NULL; + hash = _PyObject_HashFast(key); + if (hash == -1) { + return NULL; } ix = _Py_dict_lookup_threadsafe(self, key, hash, &val); if (ix == DKIX_ERROR) @@ -4205,14 +4225,12 @@ dict_setdefault_ref_lock_held(PyObject *d, PyObject *key, PyObject *default_valu return -1; } - if (!PyUnicode_CheckExact(key) || (hash = unicode_get_hash(key)) == -1) { - hash = PyObject_Hash(key); - if (hash == -1) { - if (result) { - *result = NULL; - } - return -1; + hash = _PyObject_HashFast(key); + if (hash == -1) { + if (result) { + *result = NULL; } + return -1; } if (mp->ma_keys == Py_EMPTY_KEYS) { @@ -4271,9 +4289,6 @@ dict_setdefault_ref_lock_held(PyObject *d, PyObject *key, PyObject *default_valu if (ix == DKIX_EMPTY) { assert(!_PyDict_HasSplitTable(mp)); - uint64_t new_version = _PyDict_NotifyEvent( - interp, PyDict_EVENT_ADDED, mp, key, default_value); - mp->ma_keys->dk_version = 0; value = default_value; if (insert_combined_dict(interp, mp, hash, Py_NewRef(key), Py_NewRef(value)) < 0) { @@ -4285,8 +4300,7 @@ dict_setdefault_ref_lock_held(PyObject *d, PyObject *key, PyObject *default_valu } MAINTAIN_TRACKING(mp, key, value); - mp->ma_used++; - mp->ma_version_tag = new_version; + STORE_USED(mp, mp->ma_used + 1); assert(mp->ma_keys->dk_usable >= 0); ASSERT_CONSISTENT(mp); if (result) { @@ -4407,6 +4421,8 @@ dict_popitem_impl(PyDictObject *self) uint64_t new_version; PyInterpreterState *interp = _PyInterpreterState_GET(); + ASSERT_DICT_LOCKED(self); + /* Allocate the result tuple before checking the size. Believe it * or not, this allocation could trigger a garbage collection which * could empty the dict, so if we checked the size first and that @@ -4642,12 +4658,10 @@ static PyMethodDef mapp_methods[] = { int PyDict_Contains(PyObject *op, PyObject *key) { - Py_hash_t hash; + Py_hash_t hash = _PyObject_HashFast(key); - if (!PyUnicode_CheckExact(key) || (hash = unicode_get_hash(key)) == -1) { - hash = PyObject_Hash(key); - if (hash == -1) - return -1; + if (hash == -1) { + return -1; } return _PyDict_Contains_KnownHash(op, key, hash); @@ -4947,19 +4961,21 @@ typedef struct { static PyObject * dictiter_new(PyDictObject *dict, PyTypeObject *itertype) { + Py_ssize_t used; dictiterobject *di; di = PyObject_GC_New(dictiterobject, itertype); if (di == NULL) { return NULL; } di->di_dict = (PyDictObject*)Py_NewRef(dict); - di->di_used = dict->ma_used; - di->len = dict->ma_used; + used = FT_ATOMIC_LOAD_SSIZE_RELAXED(dict->ma_used); + di->di_used = used; + di->len = used; if (itertype == &PyDictRevIterKey_Type || itertype == &PyDictRevIterItem_Type || itertype == &PyDictRevIterValue_Type) { if (_PyDict_HasSplitTable(dict)) { - di->di_pos = dict->ma_used - 1; + di->di_pos = used - 1; } else { di->di_pos = load_keys_nentries(dict) - 1; @@ -5008,8 +5024,8 @@ dictiter_len(PyObject *self, PyObject *Py_UNUSED(ignored)) { dictiterobject *di = (dictiterobject *)self; Py_ssize_t len = 0; - if (di->di_dict != NULL && di->di_used == di->di_dict->ma_used) - len = di->len; + if (di->di_dict != NULL && di->di_used == FT_ATOMIC_LOAD_SSIZE_RELAXED(di->di_dict->ma_used)) + len = FT_ATOMIC_LOAD_SSIZE_RELAXED(di->len); return PyLong_FromSize_t(len); } @@ -5292,6 +5308,7 @@ dictiter_iternextitem_lock_held(PyDictObject *d, PyObject *self, Py_ssize_t i; assert (PyDict_Check(d)); + ASSERT_DICT_LOCKED(d); if (di->di_used != d->ma_used) { PyErr_SetString(PyExc_RuntimeError, @@ -5806,7 +5823,7 @@ dictview_len(PyObject *self) _PyDictViewObject *dv = (_PyDictViewObject *)self; Py_ssize_t len = 0; if (dv->dv_dict != NULL) - len = dv->dv_dict->ma_used; + len = FT_ATOMIC_LOAD_SSIZE_RELAXED(dv->dv_dict->ma_used); return len; } @@ -6682,18 +6699,25 @@ make_dict_from_instance_attributes(PyInterpreterState *interp, return res; } -static PyDictObject * -materialize_managed_dict_lock_held(PyObject *obj) +PyDictObject * +_PyObject_MaterializeManagedDict_LockHeld(PyObject *obj) { - _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(obj); + ASSERT_WORLD_STOPPED_OR_OBJ_LOCKED(obj); - PyDictValues *values = _PyObject_InlineValues(obj); - PyInterpreterState *interp = _PyInterpreterState_GET(); - PyDictKeysObject *keys = CACHED_KEYS(Py_TYPE(obj)); OBJECT_STAT_INC(dict_materialized_on_request); - PyDictObject *dict = make_dict_from_instance_attributes(interp, keys, values); + + PyDictValues *values = _PyObject_InlineValues(obj); + PyDictObject *dict; + if (values->valid) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + PyDictKeysObject *keys = CACHED_KEYS(Py_TYPE(obj)); + dict = make_dict_from_instance_attributes(interp, keys, values); + } + else { + dict = (PyDictObject *)PyDict_New(); + } FT_ATOMIC_STORE_PTR_RELEASE(_PyObject_ManagedDictPointer(obj)->dict, - (PyDictObject *)dict); + dict); return dict; } @@ -6714,7 +6738,7 @@ _PyObject_MaterializeManagedDict(PyObject *obj) goto exit; } #endif - dict = materialize_managed_dict_lock_held(obj); + dict = _PyObject_MaterializeManagedDict_LockHeld(obj); #ifdef Py_GIL_DISABLED exit: @@ -6727,11 +6751,9 @@ int _PyDict_SetItem_LockHeld(PyDictObject *dict, PyObject *name, PyObject *value) { if (value == NULL) { - Py_hash_t hash; - if (!PyUnicode_CheckExact(name) || (hash = unicode_get_hash(name)) == -1) { - hash = PyObject_Hash(name); - if (hash == -1) - return -1; + Py_hash_t hash = _PyObject_HashFast(name); + if (hash == -1) { + return -1; } return delitem_knownhash_lock_held((PyObject *)dict, name, hash); } else { @@ -6815,7 +6837,7 @@ store_instance_attr_lock_held(PyObject *obj, PyDictValues *values, _PyDictValues_AddToInsertionOrder(values, ix); if (dict) { assert(dict->ma_values == values); - dict->ma_used++; + STORE_USED(dict, dict->ma_used + 1); } } else { @@ -6823,7 +6845,7 @@ store_instance_attr_lock_held(PyObject *obj, PyDictValues *values, delete_index_from_values(values, ix); if (dict) { assert(dict->ma_values == values); - dict->ma_used--; + STORE_USED(dict, dict->ma_used - 1); } } Py_DECREF(old_value); @@ -6966,7 +6988,7 @@ _PyObject_TryGetInstanceAttribute(PyObject *obj, PyObject *name, PyObject **attr // Still no dict, we can read from the values assert(values->valid); value = values->values[ix]; - *attr = Py_XNewRef(value); + *attr = _Py_XNewRefWithLock(value); success = true; } @@ -6986,7 +7008,7 @@ _PyObject_TryGetInstanceAttribute(PyObject *obj, PyObject *name, PyObject **attr if (dict->ma_values == values && FT_ATOMIC_LOAD_UINT8(values->valid)) { value = _Py_atomic_load_ptr_relaxed(&values->values[ix]); - *attr = Py_XNewRef(value); + *attr = _Py_XNewRefWithLock(value); success = true; } else { // Caller needs to lookup from the dictionary @@ -7034,7 +7056,7 @@ _PyObject_IsInstanceDictEmpty(PyObject *obj) if (dict == NULL) { return 1; } - return ((PyDictObject *)dict)->ma_used == 0; + return FT_ATOMIC_LOAD_SSIZE_RELAXED(((PyDictObject *)dict)->ma_used) == 0; } int @@ -7149,7 +7171,7 @@ PyObject_ClearManagedDict(PyObject *obj) int _PyDict_DetachFromObject(PyDictObject *mp, PyObject *obj) { - _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(obj); + ASSERT_WORLD_STOPPED_OR_OBJ_LOCKED(obj); assert(_PyObject_ManagedDictPointer(obj)->dict == mp); assert(_PyObject_InlineValuesConsistencyCheck(obj)); @@ -7167,6 +7189,13 @@ _PyDict_DetachFromObject(PyDictObject *mp, PyObject *obj) PyDictValues *values = copy_values(mp->ma_values); if (values == NULL) { + /* Out of memory. Clear the dict */ + PyInterpreterState *interp = _PyInterpreterState_GET(); + PyDictKeysObject *oldkeys = mp->ma_keys; + set_keys(mp, Py_EMPTY_KEYS); + dictkeys_decref(interp, oldkeys, IS_DICT_SHARED(mp)); + STORE_USED(mp, 0); + PyErr_NoMemory(); return -1; } mp->ma_values = values; diff --git a/Objects/exceptions.c b/Objects/exceptions.c index fbc8c6c49ab..da500c30621 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -3252,7 +3252,7 @@ SimpleExtendsException(PyExc_Exception, ArithmeticError, * FloatingPointError extends ArithmeticError */ SimpleExtendsException(PyExc_ArithmeticError, FloatingPointError, - "Floating point operation failed."); + "Floating-point operation failed."); /* diff --git a/Objects/floatobject.c b/Objects/floatobject.c index 96227f2cf7d..14a98f542fa 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -1633,12 +1633,12 @@ float.__new__ as float_new x: object(c_default="NULL") = 0 / -Convert a string or number to a floating point number, if possible. +Convert a string or number to a floating-point number, if possible. [clinic start generated code]*/ static PyObject * float_new_impl(PyTypeObject *type, PyObject *x) -/*[clinic end generated code: output=ccf1e8dc460ba6ba input=f43661b7de03e9d8]*/ +/*[clinic end generated code: output=ccf1e8dc460ba6ba input=55909f888aa0c8a6]*/ { if (type != &PyFloat_Type) { if (x == NULL) { @@ -1734,13 +1734,13 @@ You probably don't want to use this function. It exists mainly to be used in Python's test suite. This function returns whichever of 'unknown', 'IEEE, big-endian' or 'IEEE, -little-endian' best describes the format of floating point numbers used by the +little-endian' best describes the format of floating-point numbers used by the C type named by typestr. [clinic start generated code]*/ static PyObject * float___getformat___impl(PyTypeObject *type, const char *typestr) -/*[clinic end generated code: output=2bfb987228cc9628 input=d5a52600f835ad67]*/ +/*[clinic end generated code: output=2bfb987228cc9628 input=90d5e246409a246e]*/ { float_format_type r; @@ -1926,7 +1926,7 @@ _init_global_state(void) float_format_type detected_double_format, detected_float_format; /* We attempt to determine if this machine is using IEEE - floating point formats by peering at the bits of some + floating-point formats by peering at the bits of some carefully chosen values. If it looks like we are on an IEEE platform, the float packing/unpacking routines can just copy bits, if not they resort to arithmetic & shifts diff --git a/Objects/frameobject.c b/Objects/frameobject.c index 0465aaa01bb..23a82ae6736 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -53,22 +53,27 @@ static int framelocalsproxy_getkeyindex(PyFrameObject *frame, PyObject* key, bool read) { /* - * Returns the fast locals index of the key + * Returns -2 (!) if an error occurred; exception will be set. + * Returns the fast locals index of the key on success: * - if read == true, returns the index if the value is not NULL * - if read == false, returns the index if the value is not hidden + * Otherwise returns -1. */ - assert(PyUnicode_CheckExact(key)); - PyCodeObject *co = _PyFrame_GetCode(frame->f_frame); - int found_key = false; + + // Ensure that the key is hashable. + Py_hash_t key_hash = PyObject_Hash(key); + if (key_hash == -1) { + return -2; + } + bool found = false; // We do 2 loops here because it's highly possible the key is interned // and we can do a pointer comparison. for (int i = 0; i < co->co_nlocalsplus; i++) { PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i); if (name == key) { - found_key = true; if (read) { if (framelocalsproxy_getval(frame->f_frame, co, i) != NULL) { return i; @@ -78,23 +83,35 @@ framelocalsproxy_getkeyindex(PyFrameObject *frame, PyObject* key, bool read) return i; } } + found = true; } } - - if (!found_key) { - // This is unlikely, but we need to make sure. This means the key - // is not interned. - for (int i = 0; i < co->co_nlocalsplus; i++) { - PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i); - if (_PyUnicode_EQ(name, key)) { - if (read) { - if (framelocalsproxy_getval(frame->f_frame, co, i) != NULL) { - return i; - } - } else { - if (!(_PyLocals_GetKind(co->co_localspluskinds, i) & CO_FAST_HIDDEN)) { - return i; - } + if (found) { + // This is an attempt to read an unset local variable or + // write to a variable that is hidden from regular write operations + return -1; + } + // This is unlikely, but we need to make sure. This means the key + // is not interned. + for (int i = 0; i < co->co_nlocalsplus; i++) { + PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i); + Py_hash_t name_hash = PyObject_Hash(name); + assert(name_hash != -1); // keys are exact unicode + if (name_hash != key_hash) { + continue; + } + int same = PyObject_RichCompareBool(name, key, Py_EQ); + if (same < 0) { + return -2; + } + if (same) { + if (read) { + if (framelocalsproxy_getval(frame->f_frame, co, i) != NULL) { + return i; + } + } else { + if (!(_PyLocals_GetKind(co->co_localspluskinds, i) & CO_FAST_HIDDEN)) { + return i; } } } @@ -109,13 +126,14 @@ framelocalsproxy_getitem(PyObject *self, PyObject *key) PyFrameObject* frame = ((PyFrameLocalsProxyObject*)self)->frame; PyCodeObject* co = _PyFrame_GetCode(frame->f_frame); - if (PyUnicode_CheckExact(key)) { - int i = framelocalsproxy_getkeyindex(frame, key, true); - if (i >= 0) { - PyObject *value = framelocalsproxy_getval(frame->f_frame, co, i); - assert(value != NULL); - return Py_NewRef(value); - } + int i = framelocalsproxy_getkeyindex(frame, key, true); + if (i == -2) { + return NULL; + } + if (i >= 0) { + PyObject *value = framelocalsproxy_getval(frame->f_frame, co, i); + assert(value != NULL); + return Py_NewRef(value); } // Okay not in the fast locals, try extra locals @@ -145,35 +163,36 @@ framelocalsproxy_setitem(PyObject *self, PyObject *key, PyObject *value) return -1; } - if (PyUnicode_CheckExact(key)) { - int i = framelocalsproxy_getkeyindex(frame, key, false); - if (i >= 0) { - _Py_Executors_InvalidateDependency(PyInterpreterState_Get(), co, 1); + int i = framelocalsproxy_getkeyindex(frame, key, false); + if (i == -2) { + return -1; + } + if (i >= 0) { + _Py_Executors_InvalidateDependency(PyInterpreterState_Get(), co, 1); - _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i); - PyObject *oldvalue = fast[i]; - PyObject *cell = NULL; - if (kind == CO_FAST_FREE) { - // The cell was set when the frame was created from - // the function's closure. - assert(oldvalue != NULL && PyCell_Check(oldvalue)); + _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i); + PyObject *oldvalue = fast[i]; + PyObject *cell = NULL; + if (kind == CO_FAST_FREE) { + // The cell was set when the frame was created from + // the function's closure. + assert(oldvalue != NULL && PyCell_Check(oldvalue)); + cell = oldvalue; + } else if (kind & CO_FAST_CELL && oldvalue != NULL) { + if (PyCell_Check(oldvalue)) { cell = oldvalue; - } else if (kind & CO_FAST_CELL && oldvalue != NULL) { - if (PyCell_Check(oldvalue)) { - cell = oldvalue; - } } - if (cell != NULL) { - oldvalue = PyCell_GET(cell); - if (value != oldvalue) { - PyCell_SET(cell, Py_XNewRef(value)); - Py_XDECREF(oldvalue); - } - } else if (value != oldvalue) { - Py_XSETREF(fast[i], Py_NewRef(value)); + } + if (cell != NULL) { + oldvalue = PyCell_GET(cell); + if (value != oldvalue) { + PyCell_SET(cell, Py_XNewRef(value)); + Py_XDECREF(oldvalue); } - return 0; + } else if (value != oldvalue) { + Py_XSETREF(fast[i], Py_NewRef(value)); } + return 0; } // Okay not in the fast locals, try extra locals @@ -289,14 +308,31 @@ framelocalsproxy_dealloc(PyObject *self) static PyObject * framelocalsproxy_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { + if (PyTuple_GET_SIZE(args) != 1) { + PyErr_Format(PyExc_TypeError, + "FrameLocalsProxy expected 1 argument, got %zd", + PyTuple_GET_SIZE(args)); + return NULL; + } + PyObject *item = PyTuple_GET_ITEM(args, 0); + + if (!PyFrame_Check(item)) { + PyErr_Format(PyExc_TypeError, "expect frame, not %T", item); + return NULL; + } + PyFrameObject *frame = (PyFrameObject*)item; + + if (kwds != NULL && PyDict_Size(kwds) != 0) { + PyErr_SetString(PyExc_TypeError, + "FrameLocalsProxy takes no keyword arguments"); + return 0; + } + PyFrameLocalsProxyObject *self = (PyFrameLocalsProxyObject *)type->tp_alloc(type, 0); if (self == NULL) { return NULL; } - PyFrameObject *frame = (PyFrameObject*)PyTuple_GET_ITEM(args, 0); - assert(PyFrame_Check(frame)); - ((PyFrameLocalsProxyObject*)self)->frame = (PyFrameObject*)Py_NewRef(frame); return (PyObject *)self; @@ -543,11 +579,12 @@ framelocalsproxy_contains(PyObject *self, PyObject *key) { PyFrameObject *frame = ((PyFrameLocalsProxyObject*)self)->frame; - if (PyUnicode_CheckExact(key)) { - int i = framelocalsproxy_getkeyindex(frame, key, true); - if (i >= 0) { - return 1; - } + int i = framelocalsproxy_getkeyindex(frame, key, true); + if (i == -2) { + return -1; + } + if (i >= 0) { + return 1; } PyObject *extra = ((PyFrameObject*)frame)->f_extra_locals; @@ -720,7 +757,7 @@ PyTypeObject PyFrameLocalsProxy_Type = { .tp_as_mapping = &framelocalsproxy_as_mapping, .tp_getattro = PyObject_GenericGetAttr, .tp_setattro = PyObject_GenericSetAttr, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, + .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_MAPPING, .tp_traverse = framelocalsproxy_visit, .tp_clear = framelocalsproxy_tp_clear, .tp_richcompare = framelocalsproxy_richcompare, @@ -1626,6 +1663,7 @@ frame_dealloc(PyFrameObject *f) Py_CLEAR(f->f_back); Py_CLEAR(f->f_trace); Py_CLEAR(f->f_extra_locals); + Py_CLEAR(f->f_locals_cache); PyObject_GC_Del(f); Py_XDECREF(co); Py_TRASHCAN_END; @@ -1637,6 +1675,7 @@ frame_traverse(PyFrameObject *f, visitproc visit, void *arg) Py_VISIT(f->f_back); Py_VISIT(f->f_trace); Py_VISIT(f->f_extra_locals); + Py_VISIT(f->f_locals_cache); if (f->f_frame->owner != FRAME_OWNED_BY_FRAME_OBJECT) { return 0; } @@ -1649,6 +1688,7 @@ frame_tp_clear(PyFrameObject *f) { Py_CLEAR(f->f_trace); Py_CLEAR(f->f_extra_locals); + Py_CLEAR(f->f_locals_cache); /* locals and stack */ PyObject **locals = _PyFrame_GetLocalsArray(f->f_frame); @@ -1786,6 +1826,7 @@ _PyFrame_New_NoTrack(PyCodeObject *code) f->f_trace_opcodes = 0; f->f_lineno = 0; f->f_extra_locals = NULL; + f->f_locals_cache = NULL; return f; } diff --git a/Objects/genericaliasobject.c b/Objects/genericaliasobject.c index 2779baf0bd1..3c64e75fca8 100644 --- a/Objects/genericaliasobject.c +++ b/Objects/genericaliasobject.c @@ -561,6 +561,10 @@ ga_getitem(PyObject *self, PyObject *item) } PyObject *res = Py_GenericAlias(alias->origin, newargs); + if (res == NULL) { + Py_DECREF(newargs); + return NULL; + } ((gaobject *)res)->starred = alias->starred; Py_DECREF(newargs); diff --git a/Objects/longobject.c b/Objects/longobject.c index ce3fd6b711d..03d618aeedc 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -483,11 +483,18 @@ PyLong_AsLongAndOverflow(PyObject *vv, int *overflow) do_decref = 1; } if (_PyLong_IsCompact(v)) { -#if SIZEOF_LONG < SIZEOF_VOID_P - intptr_t tmp = _PyLong_CompactValue(v); - res = (long)tmp; - if (res != tmp) { - *overflow = tmp < 0 ? -1 : 1; +#if SIZEOF_LONG < SIZEOF_SIZE_T + Py_ssize_t tmp = _PyLong_CompactValue(v); + if (tmp < LONG_MIN) { + *overflow = -1; + res = -1; + } + else if (tmp > LONG_MAX) { + *overflow = 1; + res = -1; + } + else { + res = (long)tmp; } #else res = _PyLong_CompactValue(v); @@ -632,14 +639,15 @@ PyLong_AsUnsignedLong(PyObject *vv) v = (PyLongObject *)vv; if (_PyLong_IsNonNegativeCompact(v)) { -#if SIZEOF_LONG < SIZEOF_VOID_P - intptr_t tmp = _PyLong_CompactValue(v); +#if SIZEOF_LONG < SIZEOF_SIZE_T + size_t tmp = (size_t)_PyLong_CompactValue(v); unsigned long res = (unsigned long)tmp; if (res != tmp) { goto overflow; } + return res; #else - return _PyLong_CompactValue(v); + return (unsigned long)(size_t)_PyLong_CompactValue(v); #endif } if (_PyLong_IsNegative(v)) { @@ -685,7 +693,7 @@ PyLong_AsSize_t(PyObject *vv) v = (PyLongObject *)vv; if (_PyLong_IsNonNegativeCompact(v)) { - return _PyLong_CompactValue(v); + return (size_t)_PyLong_CompactValue(v); } if (_PyLong_IsNegative(v)) { PyErr_SetString(PyExc_OverflowError, @@ -722,7 +730,11 @@ _PyLong_AsUnsignedLongMask(PyObject *vv) } v = (PyLongObject *)vv; if (_PyLong_IsCompact(v)) { - return (unsigned long)_PyLong_CompactValue(v); +#if SIZEOF_LONG < SIZEOF_SIZE_T + return (unsigned long)(size_t)_PyLong_CompactValue(v); +#else + return (unsigned long)(long)_PyLong_CompactValue(v); +#endif } i = _PyLong_DigitCount(v); int sign = _PyLong_NonCompactSign(v); @@ -1116,13 +1128,17 @@ PyLong_AsNativeBytes(PyObject* vv, void* buffer, Py_ssize_t n, int flags) if (PyLong_Check(vv)) { v = (PyLongObject *)vv; } - else { + else if (flags != -1 && (flags & Py_ASNATIVEBYTES_ALLOW_INDEX)) { v = (PyLongObject *)_PyNumber_Index(vv); if (v == NULL) { return -1; } do_decref = 1; } + else { + PyErr_Format(PyExc_TypeError, "expect int, got %T", vv); + return -1; + } if ((flags != -1 && (flags & Py_ASNATIVEBYTES_REJECT_NEGATIVE)) && _PyLong_IsNegative(v)) { @@ -1524,7 +1540,18 @@ PyLong_AsUnsignedLongLong(PyObject *vv) v = (PyLongObject*)vv; if (_PyLong_IsNonNegativeCompact(v)) { res = 0; - bytes = _PyLong_CompactValue(v); +#if SIZEOF_LONG_LONG < SIZEOF_SIZE_T + size_t tmp = (size_t)_PyLong_CompactValue(v); + bytes = (unsigned long long)tmp; + if (bytes != tmp) { + PyErr_SetString(PyExc_OverflowError, + "Python int too large to convert " + "to C unsigned long long"); + res = -1; + } +#else + bytes = (unsigned long long)(size_t)_PyLong_CompactValue(v); +#endif } else { res = _PyLong_AsByteArray((PyLongObject *)vv, (unsigned char *)&bytes, @@ -1555,7 +1582,11 @@ _PyLong_AsUnsignedLongLongMask(PyObject *vv) } v = (PyLongObject *)vv; if (_PyLong_IsCompact(v)) { - return (unsigned long long)(signed long long)_PyLong_CompactValue(v); +#if SIZEOF_LONG_LONG < SIZEOF_SIZE_T + return (unsigned long long)(size_t)_PyLong_CompactValue(v); +#else + return (unsigned long long)(long long)_PyLong_CompactValue(v); +#endif } i = _PyLong_DigitCount(v); sign = _PyLong_NonCompactSign(v); @@ -1627,7 +1658,22 @@ PyLong_AsLongLongAndOverflow(PyObject *vv, int *overflow) do_decref = 1; } if (_PyLong_IsCompact(v)) { +#if SIZEOF_LONG_LONG < SIZEOF_SIZE_T + Py_ssize_t tmp = _PyLong_CompactValue(v); + if (tmp < LLONG_MIN) { + *overflow = -1; + res = -1; + } + else if (tmp > LLONG_MAX) { + *overflow = 1; + res = -1; + } + else { + res = (long long)tmp; + } +#else res = _PyLong_CompactValue(v); +#endif } else { i = _PyLong_DigitCount(v); @@ -3564,7 +3610,7 @@ long_hash(PyLongObject *v) int sign; if (_PyLong_IsCompact(v)) { - x = _PyLong_CompactValue(v); + x = (Py_uhash_t)_PyLong_CompactValue(v); if (x == (Py_uhash_t)-1) { x = (Py_uhash_t)-2; } @@ -6488,7 +6534,7 @@ PyDoc_STRVAR(long_doc, int(x, base=10) -> integer\n\ \n\ Convert a number or string to an integer, or return 0 if no arguments\n\ -are given. If x is a number, return x.__int__(). For floating point\n\ +are given. If x is a number, return x.__int__(). For floating-point\n\ numbers, this truncates towards zero.\n\ \n\ If x is not a number or if base is given, then x must be a string,\n\ diff --git a/Objects/memoryobject.c b/Objects/memoryobject.c index 226bd6defde..4f872361117 100644 --- a/Objects/memoryobject.c +++ b/Objects/memoryobject.c @@ -109,8 +109,6 @@ mbuf_release(_PyManagedBufferObject *self) if (self->flags&_Py_MANAGED_BUFFER_RELEASED) return; - /* NOTE: at this point self->exports can still be > 0 if this function - is called from mbuf_clear() to break up a reference cycle. */ self->flags |= _Py_MANAGED_BUFFER_RELEASED; /* PyBuffer_Release() decrements master->obj and sets it to NULL. */ @@ -1096,32 +1094,19 @@ PyBuffer_ToContiguous(void *buf, const Py_buffer *src, Py_ssize_t len, char orde /* Inform the managed buffer that this particular memoryview will not access the underlying buffer again. If no other memoryviews are registered with the managed buffer, the underlying buffer is released instantly and - marked as inaccessible for both the memoryview and the managed buffer. - - This function fails if the memoryview itself has exported buffers. */ -static int + marked as inaccessible for both the memoryview and the managed buffer. */ +static void _memory_release(PyMemoryViewObject *self) { + assert(self->exports == 0); if (self->flags & _Py_MEMORYVIEW_RELEASED) - return 0; + return; - if (self->exports == 0) { - self->flags |= _Py_MEMORYVIEW_RELEASED; - assert(self->mbuf->exports > 0); - if (--self->mbuf->exports == 0) - mbuf_release(self->mbuf); - return 0; + self->flags |= _Py_MEMORYVIEW_RELEASED; + assert(self->mbuf->exports > 0); + if (--self->mbuf->exports == 0) { + mbuf_release(self->mbuf); } - if (self->exports > 0) { - PyErr_Format(PyExc_BufferError, - "memoryview has %zd exported buffer%s", self->exports, - self->exports==1 ? "" : "s"); - return -1; - } - - PyErr_SetString(PyExc_SystemError, - "_memory_release(): negative export count"); - return -1; } /*[clinic input] @@ -1134,9 +1119,21 @@ static PyObject * memoryview_release_impl(PyMemoryViewObject *self) /*[clinic end generated code: output=d0b7e3ba95b7fcb9 input=bc71d1d51f4a52f0]*/ { - if (_memory_release(self) < 0) + if (self->exports == 0) { + _memory_release(self); + Py_RETURN_NONE; + } + + if (self->exports > 0) { + PyErr_Format(PyExc_BufferError, + "memoryview has %zd exported buffer%s", self->exports, + self->exports==1 ? "" : "s"); return NULL; - Py_RETURN_NONE; + } + + PyErr_SetString(PyExc_SystemError, + "memoryview: negative export count"); + return NULL; } static void @@ -1145,7 +1142,7 @@ memory_dealloc(PyObject *_self) PyMemoryViewObject *self = (PyMemoryViewObject *)_self; assert(self->exports == 0); _PyObject_GC_UNTRACK(self); - (void)_memory_release(self); + _memory_release(self); Py_CLEAR(self->mbuf); if (self->weakreflist != NULL) PyObject_ClearWeakRefs((PyObject *) self); @@ -1164,8 +1161,10 @@ static int memory_clear(PyObject *_self) { PyMemoryViewObject *self = (PyMemoryViewObject *)_self; - (void)_memory_release(self); - Py_CLEAR(self->mbuf); + if (self->exports == 0) { + _memory_release(self); + Py_CLEAR(self->mbuf); + } return 0; } diff --git a/Objects/mimalloc/os.c b/Objects/mimalloc/os.c index f3bc7184c41..c9103168c12 100644 --- a/Objects/mimalloc/os.c +++ b/Objects/mimalloc/os.c @@ -115,8 +115,12 @@ void* _mi_os_get_aligned_hint(size_t try_alignment, size_t size) if (hint == 0 || hint > MI_HINT_MAX) { // wrap or initialize uintptr_t init = MI_HINT_BASE; #if (MI_SECURE>0 || MI_DEBUG==0) // security: randomize start of aligned allocations unless in debug mode - uintptr_t r = _mi_heap_random_next(mi_prim_get_default_heap()); - init = init + ((MI_SEGMENT_SIZE * ((r>>17) & 0xFFFFF)) % MI_HINT_AREA); // (randomly 20 bits)*4MiB == 0 to 4TiB + mi_heap_t* heap = mi_prim_get_default_heap(); + // gh-123022: default heap may not be initialized in CPython in background threads + if (mi_heap_is_initialized(heap)) { + uintptr_t r = _mi_heap_random_next(heap); + init = init + ((MI_SEGMENT_SIZE * ((r>>17) & 0xFFFFF)) % MI_HINT_AREA); // (randomly 20 bits)*4MiB == 0 to 4TiB + } #endif uintptr_t expected = hint + size; mi_atomic_cas_strong_acq_rel(&aligned_base, &expected, init); @@ -553,8 +557,12 @@ static uint8_t* mi_os_claim_huge_pages(size_t pages, size_t* total_size) { // Initialize the start address after the 32TiB area start = ((uintptr_t)32 << 40); // 32TiB virtual start address #if (MI_SECURE>0 || MI_DEBUG==0) // security: randomize start of huge pages unless in debug mode - uintptr_t r = _mi_heap_random_next(mi_prim_get_default_heap()); - start = start + ((uintptr_t)MI_HUGE_OS_PAGE_SIZE * ((r>>17) & 0x0FFF)); // (randomly 12bits)*1GiB == between 0 to 4TiB + mi_heap_t* heap = mi_prim_get_default_heap(); + // gh-123022: default heap may not be initialized in CPython in background threads + if (mi_heap_is_initialized(heap)) { + uintptr_t r = _mi_heap_random_next(heap); + start = start + ((uintptr_t)MI_HUGE_OS_PAGE_SIZE * ((r>>17) & 0x0FFF)); // (randomly 12bits)*1GiB == between 0 to 4TiB + } #endif } end = start + size; diff --git a/Objects/mimalloc/prim/unix/prim.c b/Objects/mimalloc/prim/unix/prim.c index c6ea05bbe7a..4d3fb65e954 100644 --- a/Objects/mimalloc/prim/unix/prim.c +++ b/Objects/mimalloc/prim/unix/prim.c @@ -27,6 +27,7 @@ terms of the MIT license. A copy of the license can be found in the file #include // mmap #include // sysconf +#include // open, close, read, access #if defined(__linux__) #include @@ -50,7 +51,7 @@ terms of the MIT license. A copy of the license can be found in the file #include #endif -#if !defined(__HAIKU__) && !defined(__APPLE__) && !defined(__CYGWIN__) && !defined(_AIX) && !defined(__OpenBSD__) && !defined(__FreeBSD__) && !defined(__sun) +#if !defined(__HAIKU__) && !defined(__APPLE__) && !defined(__CYGWIN__) && !defined(_AIX) && !defined(__OpenBSD__) && !defined(__FreeBSD__) && !defined(__sun) && !defined(__NetBSD__) #define MI_HAS_SYSCALL_H #include #endif @@ -76,7 +77,7 @@ static int mi_prim_access(const char *fpath, int mode) { return syscall(SYS_access,fpath,mode); } -#elif !defined(__APPLE__) && !defined(_AIX) && !defined(__OpenBSD__) && !defined(__FreeBSD__) && !defined(__sun) // avoid unused warnings +#elif !defined(__APPLE__) && !defined(_AIX) && !defined(__OpenBSD__) && !defined(__FreeBSD__) && !defined(__sun) && !defined(__NetBSD__) // avoid unused warnings static int mi_prim_open(const char* fpath, int open_flags) { return open(fpath,open_flags); diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c index 46995b948a2..92554de055c 100644 --- a/Objects/moduleobject.c +++ b/Objects/moduleobject.c @@ -250,7 +250,7 @@ _PyModule_CreateInitialized(PyModuleDef* module, int module_api_version) } } m->md_def = module; -#ifdef Py_GIL_DISABLE +#ifdef Py_GIL_DISABLED m->md_gil = Py_MOD_GIL_USED; #endif return (PyObject*)m; diff --git a/Objects/object.c b/Objects/object.c index fcd81b8309b..cbf576d5e5a 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -375,7 +375,6 @@ _Py_MergeZeroLocalRefcount(PyObject *op) { assert(op->ob_ref_local == 0); - _Py_atomic_store_uintptr_relaxed(&op->ob_tid, 0); Py_ssize_t shared = _Py_atomic_load_ssize_acquire(&op->ob_ref_shared); if (shared == 0) { // Fast-path: shared refcount is zero (including flags) @@ -383,6 +382,11 @@ _Py_MergeZeroLocalRefcount(PyObject *op) return; } + // gh-121794: This must be before the store to `ob_ref_shared` (gh-119999), + // but should outside the fast-path to maintain the invariant that + // a zero `ob_tid` implies a merged refcount. + _Py_atomic_store_uintptr_relaxed(&op->ob_tid, 0); + // Slow-path: atomically set the flags (low two bits) to _Py_REF_MERGED. Py_ssize_t new_shared; do { @@ -531,6 +535,7 @@ int PyObject_Print(PyObject *op, FILE *fp, int flags) { int ret = 0; + int write_error = 0; if (PyErr_CheckSignals()) return -1; #ifdef USE_STACKCHECK @@ -569,14 +574,20 @@ PyObject_Print(PyObject *op, FILE *fp, int flags) ret = -1; } else { - fwrite(t, 1, len, fp); + /* Versions of Android and OpenBSD from before 2023 fail to + set the `ferror` indicator when writing to a read-only + stream, so we need to check the return value. + (https://github.com/openbsd/src/commit/fc99cf9338942ecd9adc94ea08bf6188f0428c15) */ + if (fwrite(t, 1, len, fp) != (size_t)len) { + write_error = 1; + } } Py_DECREF(s); } } } if (ret == 0) { - if (ferror(fp)) { + if (write_error || ferror(fp)) { PyErr_SetFromErrno(PyExc_OSError); clearerr(fp); ret = -1; @@ -2313,6 +2324,7 @@ static PyTypeObject* static_types[] = { &_PyWeakref_ProxyType, &_PyWeakref_RefType, &_PyTypeAlias_Type, + &_PyNoDefault_Type, // subclasses: _PyTypes_FiniTypes() deallocates them before their base // class @@ -2739,7 +2751,6 @@ _PyTrash_thread_deposit_object(PyThreadState *tstate, PyObject *op) _PyObject_ASSERT(op, !_PyObject_GC_IS_TRACKED(op)); _PyObject_ASSERT(op, Py_REFCNT(op) == 0); #ifdef Py_GIL_DISABLED - _PyObject_ASSERT(op, op->ob_tid == 0); op->ob_tid = (uintptr_t)tstate->delete_later; #else _PyGCHead_SET_PREV(_Py_AS_GC(op), (PyGC_Head*)tstate->delete_later); @@ -2772,6 +2783,7 @@ _PyTrash_thread_destroy_chain(PyThreadState *tstate) #ifdef Py_GIL_DISABLED tstate->delete_later = (PyObject*) op->ob_tid; op->ob_tid = 0; + _Py_atomic_store_ssize_relaxed(&op->ob_ref_shared, _Py_REF_MERGED); #else tstate->delete_later = (PyObject*) _PyGCHead_PREV(_Py_AS_GC(op)); #endif diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index 4fe195b6316..a9decc5dc1b 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -386,8 +386,16 @@ _PyMem_ArenaFree(void *Py_UNUSED(ctx), void *ptr, ) { #ifdef MS_WINDOWS + /* Unlike free(), VirtualFree() does not special-case NULL to noop. */ + if (ptr == NULL) { + return; + } VirtualFree(ptr, 0, MEM_RELEASE); #elif defined(ARENAS_USE_MMAP) + /* Unlike free(), munmap() does not special-case NULL to noop. */ + if (ptr == NULL) { + return; + } munmap(ptr, size); #else free(ptr); diff --git a/Objects/odictobject.c b/Objects/odictobject.c index 53f64fc81e7..858f0cbaf9e 100644 --- a/Objects/odictobject.c +++ b/Objects/odictobject.c @@ -796,6 +796,7 @@ _odict_clear_nodes(PyODictObject *od) _odictnode_DEALLOC(node); node = next; } + od->od_state++; } /* There isn't any memory management of nodes past this point. */ @@ -806,24 +807,40 @@ _odict_keys_equal(PyODictObject *a, PyODictObject *b) { _ODictNode *node_a, *node_b; + // keep operands' state to detect undesired mutations + const size_t state_a = a->od_state; + const size_t state_b = b->od_state; + node_a = _odict_FIRST(a); node_b = _odict_FIRST(b); while (1) { - if (node_a == NULL && node_b == NULL) + if (node_a == NULL && node_b == NULL) { /* success: hit the end of each at the same time */ return 1; - else if (node_a == NULL || node_b == NULL) + } + else if (node_a == NULL || node_b == NULL) { /* unequal length */ return 0; + } else { - int res = PyObject_RichCompareBool( - (PyObject *)_odictnode_KEY(node_a), - (PyObject *)_odictnode_KEY(node_b), - Py_EQ); - if (res < 0) + PyObject *key_a = Py_NewRef(_odictnode_KEY(node_a)); + PyObject *key_b = Py_NewRef(_odictnode_KEY(node_b)); + int res = PyObject_RichCompareBool(key_a, key_b, Py_EQ); + Py_DECREF(key_a); + Py_DECREF(key_b); + if (res < 0) { return res; - else if (res == 0) + } + else if (a->od_state != state_a || b->od_state != state_b) { + PyErr_SetString(PyExc_RuntimeError, + "OrderedDict mutated during iteration"); + return -1; + } + else if (res == 0) { + // This check comes after the check on the state + // in order for the exception to be set correctly. return 0; + } /* otherwise it must match, so move on to the next one */ node_a = _odictnode_NEXT(node_a); diff --git a/Objects/setobject.c b/Objects/setobject.c index 68986bb6a6b..e85b37f4086 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -184,14 +184,14 @@ set_add_entry(PySetObject *so, PyObject *key, Py_hash_t hash) found_unused_or_dummy: if (freeslot == NULL) goto found_unused; - so->used++; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, so->used + 1); freeslot->key = key; freeslot->hash = hash; return 0; found_unused: so->fill++; - so->used++; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, so->used + 1); entry->key = key; entry->hash = hash; if ((size_t)so->fill*5 < mask*3) @@ -357,7 +357,7 @@ set_discard_entry(PySetObject *so, PyObject *key, Py_hash_t hash) old_key = entry->key; entry->key = dummy; entry->hash = -1; - so->used--; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, so->used - 1); Py_DECREF(old_key); return DISCARD_FOUND; } @@ -365,13 +365,9 @@ set_discard_entry(PySetObject *so, PyObject *key, Py_hash_t hash) static int set_add_key(PySetObject *so, PyObject *key) { - Py_hash_t hash; - - if (!PyUnicode_CheckExact(key) || - (hash = _PyASCIIObject_CAST(key)->hash) == -1) { - hash = PyObject_Hash(key); - if (hash == -1) - return -1; + Py_hash_t hash = _PyObject_HashFast(key); + if (hash == -1) { + return -1; } return set_add_entry(so, key, hash); } @@ -379,13 +375,9 @@ set_add_key(PySetObject *so, PyObject *key) static int set_contains_key(PySetObject *so, PyObject *key) { - Py_hash_t hash; - - if (!PyUnicode_CheckExact(key) || - (hash = _PyASCIIObject_CAST(key)->hash) == -1) { - hash = PyObject_Hash(key); - if (hash == -1) - return -1; + Py_hash_t hash = _PyObject_HashFast(key); + if (hash == -1) { + return -1; } return set_contains_entry(so, key, hash); } @@ -393,13 +385,9 @@ set_contains_key(PySetObject *so, PyObject *key) static int set_discard_key(PySetObject *so, PyObject *key) { - Py_hash_t hash; - - if (!PyUnicode_CheckExact(key) || - (hash = _PyASCIIObject_CAST(key)->hash) == -1) { - hash = PyObject_Hash(key); - if (hash == -1) - return -1; + Py_hash_t hash = _PyObject_HashFast(key); + if (hash == -1) { + return -1; } return set_discard_entry(so, key, hash); } @@ -409,7 +397,7 @@ set_empty_to_minsize(PySetObject *so) { memset(so->smalltable, 0, sizeof(so->smalltable)); so->fill = 0; - so->used = 0; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, 0); so->mask = PySet_MINSIZE - 1; so->table = so->smalltable; so->hash = -1; @@ -627,7 +615,7 @@ set_merge_lock_held(PySetObject *so, PyObject *otherset) } } so->fill = other->fill; - so->used = other->used; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, other->used); return 0; } @@ -636,7 +624,7 @@ set_merge_lock_held(PySetObject *so, PyObject *otherset) setentry *newtable = so->table; size_t newmask = (size_t)so->mask; so->fill = other->used; - so->used = other->used; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, other->used); for (i = other->mask + 1; i > 0 ; i--, other_entry++) { key = other_entry->key; if (key != NULL && key != dummy) { @@ -690,7 +678,7 @@ set_pop_impl(PySetObject *so) key = entry->key; entry->key = dummy; entry->hash = -1; - so->used--; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, so->used - 1); so->finger = entry - so->table + 1; /* next place to start */ return key; } @@ -1045,14 +1033,13 @@ set_update_internal(PySetObject *so, PyObject *other) set.update so: setobject *others as args: object - / Update the set, adding elements from all others. [clinic start generated code]*/ static PyObject * set_update_impl(PySetObject *so, PyObject *args) -/*[clinic end generated code: output=34f6371704974c8a input=eb47c4fbaeb3286e]*/ +/*[clinic end generated code: output=34f6371704974c8a input=df4fe486e38cd337]*/ { Py_ssize_t i; @@ -1185,7 +1172,9 @@ set_swap_bodies(PySetObject *a, PySetObject *b) Py_hash_t h; t = a->fill; a->fill = b->fill; b->fill = t; - t = a->used; a->used = b->used; b->used = t; + t = a->used; + FT_ATOMIC_STORE_SSIZE_RELAXED(a->used, b->used); + FT_ATOMIC_STORE_SSIZE_RELAXED(b->used, t); t = a->mask; a->mask = b->mask; b->mask = t; u = a->table; @@ -1273,14 +1262,13 @@ set_clear_impl(PySetObject *so) set.union so: setobject *others as args: object - / Return a new set with elements from the set and all others. [clinic start generated code]*/ static PyObject * set_union_impl(PySetObject *so, PyObject *args) -/*[clinic end generated code: output=2c83d05a446a1477 input=2e2024fa1e40ac84]*/ +/*[clinic end generated code: output=2c83d05a446a1477 input=ddf088706e9577b2]*/ { PySetObject *result; PyObject *other; @@ -1423,14 +1411,13 @@ set_intersection(PySetObject *so, PyObject *other) set.intersection as set_intersection_multi so: setobject *others as args: object - / Return a new set with elements common to the set and all others. [clinic start generated code]*/ static PyObject * set_intersection_multi_impl(PySetObject *so, PyObject *args) -/*[clinic end generated code: output=2406ef3387adbe2f input=04108ea6d7f0532b]*/ +/*[clinic end generated code: output=2406ef3387adbe2f input=0d9f3805ccbba6a4]*/ { Py_ssize_t i; @@ -1471,14 +1458,13 @@ set_intersection_update(PySetObject *so, PyObject *other) set.intersection_update as set_intersection_update_multi so: setobject *others as args: object - / Update the set, keeping only elements found in it and all others. [clinic start generated code]*/ static PyObject * set_intersection_update_multi_impl(PySetObject *so, PyObject *args) -/*[clinic end generated code: output=251c1f729063609d input=ff8f119f97458d16]*/ +/*[clinic end generated code: output=251c1f729063609d input=223c1e086aa669a9]*/ { PyObject *tmp; @@ -1659,14 +1645,13 @@ set_difference_update_internal(PySetObject *so, PyObject *other) set.difference_update so: setobject *others as args: object - / Update the set, removing elements found in others. [clinic start generated code]*/ static PyObject * set_difference_update_impl(PySetObject *so, PyObject *args) -/*[clinic end generated code: output=28685b2fc63e41c4 input=e7abb43c9f2c5a73]*/ +/*[clinic end generated code: output=28685b2fc63e41c4 input=024e6baa6fbcbb3d]*/ { Py_ssize_t i; @@ -1777,14 +1762,13 @@ set_difference(PySetObject *so, PyObject *other) set.difference as set_difference_multi so: setobject *others as args: object - / Return a new set with elements in the set that are not in the others. [clinic start generated code]*/ static PyObject * set_difference_multi_impl(PySetObject *so, PyObject *args) -/*[clinic end generated code: output=3130c3bb3cac873d input=d8ae9bb6d518ab95]*/ +/*[clinic end generated code: output=3130c3bb3cac873d input=ba78ea5f099e58df]*/ { Py_ssize_t i; PyObject *result, *other; diff --git a/Objects/structseq.c b/Objects/structseq.c index d8289f2638d..ee3dbf9d4c0 100644 --- a/Objects/structseq.c +++ b/Objects/structseq.c @@ -41,12 +41,20 @@ get_type_attr_as_size(PyTypeObject *tp, PyObject *name) get_type_attr_as_size(tp, &_Py_ID(n_sequence_fields)) #define REAL_SIZE_TP(tp) \ get_type_attr_as_size(tp, &_Py_ID(n_fields)) -#define REAL_SIZE(op) REAL_SIZE_TP(Py_TYPE(op)) +#define REAL_SIZE(op) get_real_size((PyObject *)op) #define UNNAMED_FIELDS_TP(tp) \ get_type_attr_as_size(tp, &_Py_ID(n_unnamed_fields)) #define UNNAMED_FIELDS(op) UNNAMED_FIELDS_TP(Py_TYPE(op)) +static Py_ssize_t +get_real_size(PyObject *op) +{ + // Compute the real size from the visible size (i.e., Py_SIZE()) and the + // number of non-sequence fields accounted for in tp_basicsize. + Py_ssize_t hidden = Py_TYPE(op)->tp_basicsize - offsetof(PyStructSequence, ob_item); + return Py_SIZE(op) + hidden / sizeof(PyObject *); +} PyObject * PyStructSequence_New(PyTypeObject *type) @@ -120,6 +128,9 @@ structseq_dealloc(PyStructSequence *obj) PyObject_GC_UnTrack(obj); PyTypeObject *tp = Py_TYPE(obj); + // gh-122527: We can't use REAL_SIZE_TP() or any macros that access the + // type's dictionary here, because the dictionary may have already been + // cleared by the garbage collector. size = REAL_SIZE(obj); for (i = 0; i < size; ++i) { Py_XDECREF(obj->ob_item[i]); @@ -565,10 +576,14 @@ initialize_members(PyStructSequence_Desc *desc, static void initialize_static_fields(PyTypeObject *type, PyStructSequence_Desc *desc, - PyMemberDef *tp_members, unsigned long tp_flags) + PyMemberDef *tp_members, Py_ssize_t n_members, + unsigned long tp_flags) { type->tp_name = desc->name; - type->tp_basicsize = sizeof(PyStructSequence) - sizeof(PyObject *); + // Account for hidden members in tp_basicsize because they are not + // included in the variable size. + Py_ssize_t n_hidden = n_members - desc->n_in_sequence; + type->tp_basicsize = sizeof(PyStructSequence) + (n_hidden - 1) * sizeof(PyObject *); type->tp_itemsize = sizeof(PyObject *); type->tp_dealloc = (destructor)structseq_dealloc; type->tp_repr = (reprfunc)structseq_repr; @@ -621,7 +636,7 @@ _PyStructSequence_InitBuiltinWithFlags(PyInterpreterState *interp, if (members == NULL) { goto error; } - initialize_static_fields(type, desc, members, tp_flags); + initialize_static_fields(type, desc, members, n_members, tp_flags); _Py_SetImmortal((PyObject *)type); } @@ -684,7 +699,7 @@ PyStructSequence_InitType2(PyTypeObject *type, PyStructSequence_Desc *desc) if (members == NULL) { return -1; } - initialize_static_fields(type, desc, members, 0); + initialize_static_fields(type, desc, members, n_members, 0); if (initialize_static_type(type, desc, n_members, n_unnamed_members) < 0) { PyMem_Free(members); return -1; @@ -710,7 +725,7 @@ _PyStructSequence_FiniBuiltin(PyInterpreterState *interp, PyTypeObject *type) assert(type->tp_name != NULL); assert(type->tp_base == &PyTuple_Type); assert((type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); - assert(_Py_IsImmortal(type)); + assert(_Py_IsImmortalLoose(type)); // Cannot delete a type if it still has subclasses if (_PyType_HasSubclasses(type)) { @@ -760,7 +775,8 @@ _PyStructSequence_NewType(PyStructSequence_Desc *desc, unsigned long tp_flags) /* The name in this PyType_Spec is statically allocated so it is */ /* expected that it'll outlive the PyType_Spec */ spec.name = desc->name; - spec.basicsize = sizeof(PyStructSequence) - sizeof(PyObject *); + Py_ssize_t hidden = n_members - desc->n_in_sequence; + spec.basicsize = (int)(sizeof(PyStructSequence) + (hidden - 1) * sizeof(PyObject *)); spec.itemsize = sizeof(PyObject *); spec.flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | tp_flags; spec.slots = slots; diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 5ae1ee9a89a..371fb074387 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -1132,7 +1132,7 @@ maybe_freelist_pop(Py_ssize_t size) return NULL; } assert(size > 0); - if (size < PyTuple_MAXSAVESIZE) { + if (size <= PyTuple_MAXSAVESIZE) { Py_ssize_t index = size - 1; PyTupleObject *op = TUPLE_FREELIST.items[index]; if (op != NULL) { diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 63902ebc1f9..c911c302003 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -452,7 +452,7 @@ set_tp_bases(PyTypeObject *self, PyObject *bases, int initial) assert(PyTuple_GET_SIZE(bases) == 1); assert(PyTuple_GET_ITEM(bases, 0) == (PyObject *)self->tp_base); assert(self->tp_base->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN); - assert(_Py_IsImmortal(self->tp_base)); + assert(_Py_IsImmortalLoose(self->tp_base)); } _Py_SetImmortal(bases); } @@ -469,7 +469,7 @@ clear_tp_bases(PyTypeObject *self, int final) Py_CLEAR(self->tp_bases); } else { - assert(_Py_IsImmortal(self->tp_bases)); + assert(_Py_IsImmortalLoose(self->tp_bases)); _Py_ClearImmortal(self->tp_bases); } } @@ -534,7 +534,7 @@ clear_tp_mro(PyTypeObject *self, int final) Py_CLEAR(self->tp_mro); } else { - assert(_Py_IsImmortal(self->tp_mro)); + assert(_Py_IsImmortalLoose(self->tp_mro)); _Py_ClearImmortal(self->tp_mro); } } @@ -999,6 +999,8 @@ type_modified_unlocked(PyTypeObject *type) if (type->tp_version_tag == 0) { return; } + // Cannot modify static builtin types. + assert((type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) == 0); PyObject *subclasses = lookup_tp_subclasses(type); if (subclasses != NULL) { @@ -1349,6 +1351,9 @@ type_set_module(PyTypeObject *type, PyObject *value, void *context) PyType_Modified(type); PyObject *dict = lookup_tp_dict(type); + if (PyDict_Pop(dict, &_Py_ID(__firstlineno__), NULL) < 0) { + return -1; + } return PyDict_SetItem(dict, &_Py_ID(__module__), value); } @@ -5069,15 +5074,10 @@ find_name_in_mro(PyTypeObject *type, PyObject *name, int *error) { ASSERT_TYPE_LOCK_HELD(); - Py_hash_t hash; - if (!PyUnicode_CheckExact(name) || - (hash = _PyASCIIObject_CAST(name)->hash) == -1) - { - hash = PyObject_Hash(name); - if (hash == -1) { - *error = -1; - return NULL; - } + Py_hash_t hash = _PyObject_HashFast(name); + if (hash == -1) { + *error = -1; + return NULL; } /* Look in tp_dict of types in MRO */ @@ -5210,7 +5210,7 @@ _PyType_LookupRef(PyTypeObject *type, PyObject *name) #ifdef Py_GIL_DISABLED // synchronize-with other writing threads by doing an acquire load on the sequence while (1) { - int sequence = _PySeqLock_BeginRead(&entry->sequence); + uint32_t sequence = _PySeqLock_BeginRead(&entry->sequence); uint32_t entry_version = _Py_atomic_load_uint32_relaxed(&entry->version); uint32_t type_version = _Py_atomic_load_uint32_acquire(&type->tp_version_tag); if (entry_version == type_version && @@ -5660,7 +5660,7 @@ fini_static_type(PyInterpreterState *interp, PyTypeObject *type, int isbuiltin, int final) { assert(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN); - assert(_Py_IsImmortal((PyObject *)type)); + assert(_Py_IsImmortalLoose((PyObject *)type)); type_dealloc_common(type); @@ -6374,28 +6374,11 @@ compatible_for_assignment(PyTypeObject* oldto, PyTypeObject* newto, const char* return 0; } -static int -object_set_class(PyObject *self, PyObject *value, void *closure) -{ - if (value == NULL) { - PyErr_SetString(PyExc_TypeError, - "can't delete __class__ attribute"); - return -1; - } - if (!PyType_Check(value)) { - PyErr_Format(PyExc_TypeError, - "__class__ must be set to a class, not '%s' object", - Py_TYPE(value)->tp_name); - return -1; - } - PyTypeObject *newto = (PyTypeObject *)value; - - if (PySys_Audit("object.__setattr__", "OsO", - self, "__class__", value) < 0) { - return -1; - } +static int +object_set_class_world_stopped(PyObject *self, PyTypeObject *newto) +{ PyTypeObject *oldto = Py_TYPE(self); /* In versions of CPython prior to 3.5, the code in @@ -6461,39 +6444,66 @@ object_set_class(PyObject *self, PyObject *value, void *closure) /* Changing the class will change the implicit dict keys, * so we must materialize the dictionary first. */ if (oldto->tp_flags & Py_TPFLAGS_INLINE_VALUES) { - PyDictObject *dict = _PyObject_MaterializeManagedDict(self); + PyDictObject *dict = _PyObject_GetManagedDict(self); if (dict == NULL) { - return -1; + dict = _PyObject_MaterializeManagedDict_LockHeld(self); + if (dict == NULL) { + return -1; + } } - bool error = false; - - Py_BEGIN_CRITICAL_SECTION2(self, dict); - - // If we raced after materialization and replaced the dict - // then the materialized dict should no longer have the - // inline values in which case detach is a nop. - assert(_PyObject_GetManagedDict(self) == dict || - dict->ma_values != _PyObject_InlineValues(self)); + assert(_PyObject_GetManagedDict(self) == dict); if (_PyDict_DetachFromObject(dict, self) < 0) { - error = true; - } - - Py_END_CRITICAL_SECTION2(); - if (error) { return -1; } + } if (newto->tp_flags & Py_TPFLAGS_HEAPTYPE) { Py_INCREF(newto); } - Py_BEGIN_CRITICAL_SECTION(self); - // The real Py_TYPE(self) (`oldto`) may have changed from - // underneath us in another thread, so we re-fetch it here. - oldto = Py_TYPE(self); + Py_SET_TYPE(self, newto); - Py_END_CRITICAL_SECTION(); + + return 0; + } + else { + return -1; + } +} + +static int +object_set_class(PyObject *self, PyObject *value, void *closure) +{ + + if (value == NULL) { + PyErr_SetString(PyExc_TypeError, + "can't delete __class__ attribute"); + return -1; + } + if (!PyType_Check(value)) { + PyErr_Format(PyExc_TypeError, + "__class__ must be set to a class, not '%s' object", + Py_TYPE(value)->tp_name); + return -1; + } + PyTypeObject *newto = (PyTypeObject *)value; + + if (PySys_Audit("object.__setattr__", "OsO", + self, "__class__", value) < 0) { + return -1; + } + +#ifdef Py_GIL_DISABLED + PyInterpreterState *interp = _PyInterpreterState_GET(); + _PyEval_StopTheWorld(interp); +#endif + PyTypeObject *oldto = Py_TYPE(self); + int res = object_set_class_world_stopped(self, newto); +#ifdef Py_GIL_DISABLED + _PyEval_StartTheWorld(interp); +#endif + if (res == 0) { if (oldto->tp_flags & Py_TPFLAGS_HEAPTYPE) { Py_DECREF(oldto); } @@ -6501,9 +6511,7 @@ object_set_class(PyObject *self, PyObject *value, void *closure) RARE_EVENT_INC(set_class); return 0; } - else { - return -1; - } + return res; } static PyGetSetDef object_getsets[] = { @@ -10859,6 +10867,84 @@ recurse_down_subclasses(PyTypeObject *type, PyObject *attr_name, return 0; } +static int +expect_manually_inherited(PyTypeObject *type, void **slot) +{ + PyObject *typeobj = (PyObject *)type; + if (slot == (void *)&type->tp_init) { + /* This is a best-effort list of builtin exception types + that have their own tp_init function. */ + if (typeobj != PyExc_BaseException + && typeobj != PyExc_BaseExceptionGroup + && typeobj != PyExc_ImportError + && typeobj != PyExc_NameError + && typeobj != PyExc_OSError + && typeobj != PyExc_StopIteration + && typeobj != PyExc_SyntaxError + && typeobj != PyExc_UnicodeDecodeError + && typeobj != PyExc_UnicodeEncodeError + + && type != &PyBool_Type + && type != &PyBytes_Type + && type != &PyMemoryView_Type + && type != &PyComplex_Type + && type != &PyEnum_Type + && type != &PyFilter_Type + && type != &PyFloat_Type + && type != &PyFrozenSet_Type + && type != &PyLong_Type + && type != &PyMap_Type + && type != &PyRange_Type + && type != &PyReversed_Type + && type != &PySlice_Type + && type != &PyTuple_Type + && type != &PyUnicode_Type + && type != &PyZip_Type) + + { + return 1; + } + } + else if (slot == (void *)&type->tp_str) { + /* This is a best-effort list of builtin exception types + that have their own tp_str function. */ + if (typeobj == PyExc_AttributeError || typeobj == PyExc_NameError) { + return 1; + } + } + else if (slot == (void *)&type->tp_getattr + || slot == (void *)&type->tp_getattro) + { + /* This is a best-effort list of builtin types + that have their own tp_getattr function. */ + if (typeobj == PyExc_BaseException + || type == &PyByteArray_Type + || type == &PyBytes_Type + || type == &PyComplex_Type + || type == &PyDict_Type + || type == &PyEnum_Type + || type == &PyFilter_Type + || type == &PyLong_Type + || type == &PyList_Type + || type == &PyMap_Type + || type == &PyMemoryView_Type + || type == &PyProperty_Type + || type == &PyRange_Type + || type == &PyReversed_Type + || type == &PySet_Type + || type == &PySlice_Type + || type == &PySuper_Type + || type == &PyTuple_Type + || type == &PyZip_Type) + { + return 1; + } + } + + /* It must be inherited (see type_ready_inherit()).. */ + return 0; +} + /* This function is called by PyType_Ready() to populate the type's dictionary with method descriptors for function slots. For each function slot (like tp_repr) that's defined in the type, one or more @@ -10903,6 +10989,26 @@ add_operators(PyTypeObject *type) ptr = slotptr(type, p->offset); if (!ptr || !*ptr) continue; + if (type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN + && type->tp_base != NULL) + { + /* Also ignore when the type slot has been inherited. */ + void **ptr_base = slotptr(type->tp_base, p->offset); + if (ptr_base && *ptr == *ptr_base) { + /* Ideally we would always ignore any manually inherited + slots, Which would mean inheriting the slot wrapper + using normal attribute lookup rather than keeping + a distinct copy. However, that would introduce + a slight change in behavior that could break + existing code. + + In the meantime, look the other way when the definition + explicitly inherits the slot. */ + if (!expect_manually_inherited(type, ptr)) { + continue; + } + } + } int r = PyDict_Contains(dict, p->name_strobj); if (r > 0) continue; diff --git a/Objects/typevarobject.c b/Objects/typevarobject.c index c8ab14053de..f3e3ed0c9af 100644 --- a/Objects/typevarobject.c +++ b/Objects/typevarobject.c @@ -1640,7 +1640,16 @@ typealias_alloc(PyObject *name, PyObject *type_params, PyObject *compute_value, return NULL; } ta->name = Py_NewRef(name); - ta->type_params = Py_IsNone(type_params) ? NULL : Py_XNewRef(type_params); + if ( + type_params == NULL + || Py_IsNone(type_params) + || (PyTuple_Check(type_params) && PyTuple_GET_SIZE(type_params) == 0) + ) { + ta->type_params = NULL; + } + else { + ta->type_params = Py_NewRef(type_params); + } ta->compute_value = Py_XNewRef(compute_value); ta->value = Py_XNewRef(value); ta->module = Py_XNewRef(module); diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index b21886e22c1..5a6ae78fe23 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -247,7 +247,8 @@ _PyUnicode_InternedSize_Immortal(void) // value, to help detect bugs in optimizations. while (PyDict_Next(dict, &pos, &key, &value)) { - if (_Py_IsImmortal(key)) { + assert(PyUnicode_CHECK_INTERNED(key) != SSTATE_INTERNED_IMMORTAL_STATIC); + if (PyUnicode_CHECK_INTERNED(key) == SSTATE_INTERNED_IMMORTAL) { count++; } } @@ -319,7 +320,8 @@ init_global_interned_strings(PyInterpreterState *interp) return _PyStatus_ERR("failed to create global interned dict"); } - /* Intern statically allocated string identifiers and deepfreeze strings. + /* Intern statically allocated string identifiers, deepfreeze strings, + * and one-byte latin-1 strings. * This must be done before any module initialization so that statically * allocated string identifiers are used instead of heap allocated strings. * Deepfreeze uses the interned identifiers if present to save space @@ -327,14 +329,11 @@ init_global_interned_strings(PyInterpreterState *interp) */ _PyUnicode_InitStaticStrings(interp); -#ifdef Py_GIL_DISABLED -// In the free-threaded build, intern the 1-byte strings as well for (int i = 0; i < 256; i++) { PyObject *s = LATIN1(i); _PyUnicode_InternStatic(interp, &s); assert(s == LATIN1(i)); } -#endif #ifdef Py_DEBUG assert(_PyUnicode_CheckConsistency(&_Py_STR(empty), 1)); @@ -683,10 +682,14 @@ _PyUnicode_CheckConsistency(PyObject *op, int check_content) /* Check interning state */ #ifdef Py_DEBUG + // Note that we do not check `_Py_IsImmortal(op)`, since stable ABI + // extensions can make immortal strings mortal (but with a high enough + // refcount). + // The other way is extremely unlikely (worth a potential failed assertion + // in a debug build), so we do check `!_Py_IsImmortal(op)`. switch (PyUnicode_CHECK_INTERNED(op)) { case SSTATE_NOT_INTERNED: if (ascii->state.statically_allocated) { - CHECK(_Py_IsImmortal(op)); // This state is for two exceptions: // - strings are currently checked before they're interned // - the 256 one-latin1-character strings @@ -702,11 +705,9 @@ _PyUnicode_CheckConsistency(PyObject *op, int check_content) break; case SSTATE_INTERNED_IMMORTAL: CHECK(!ascii->state.statically_allocated); - CHECK(_Py_IsImmortal(op)); break; case SSTATE_INTERNED_IMMORTAL_STATIC: CHECK(ascii->state.statically_allocated); - CHECK(_Py_IsImmortal(op)); break; default: Py_UNREACHABLE(); @@ -1899,7 +1900,6 @@ static PyObject* get_latin1_char(Py_UCS1 ch) { PyObject *o = LATIN1(ch); - assert(_Py_IsImmortal(o)); return o; } @@ -14817,7 +14817,16 @@ unicode_vectorcall(PyObject *type, PyObject *const *args, return PyObject_Str(object); } const char *encoding = arg_as_utf8(args[1], "encoding"); - const char *errors = (nargs == 3) ? arg_as_utf8(args[2], "errors") : NULL; + if (encoding == NULL) { + return NULL; + } + const char *errors = NULL; + if (nargs == 3) { + errors = arg_as_utf8(args[2], "errors"); + if (errors == NULL) { + return NULL; + } + } return PyUnicode_FromEncodedObject(object, encoding, errors); } @@ -15049,27 +15058,14 @@ intern_static(PyInterpreterState *interp, PyObject *s /* stolen */) assert(s != NULL); assert(_PyUnicode_CHECK(s)); assert(_PyUnicode_STATE(s).statically_allocated); - assert(_Py_IsImmortal(s)); - - switch (PyUnicode_CHECK_INTERNED(s)) { - case SSTATE_NOT_INTERNED: - break; - case SSTATE_INTERNED_IMMORTAL_STATIC: - return s; - default: - Py_FatalError("_PyUnicode_InternStatic called on wrong string"); - } + assert(!PyUnicode_CHECK_INTERNED(s)); #ifdef Py_DEBUG /* We must not add process-global interned string if there's already a * per-interpreter interned_dict, which might contain duplicates. - * Except "short string" singletons: those are special-cased. */ + */ PyObject *interned = get_interned_dict(interp); - assert(interned == NULL || unicode_is_singleton(s)); -#ifdef Py_GIL_DISABLED - // In the free-threaded build, don't allow even the short strings. assert(interned == NULL); -#endif #endif /* Look in the global cache first. */ @@ -15141,11 +15137,6 @@ intern_common(PyInterpreterState *interp, PyObject *s /* stolen */, return s; } - /* Handle statically allocated strings. */ - if (_PyUnicode_STATE(s).statically_allocated) { - return intern_static(interp, s); - } - /* Is it already interned? */ switch (PyUnicode_CHECK_INTERNED(s)) { case SSTATE_NOT_INTERNED: @@ -15162,6 +15153,9 @@ intern_common(PyInterpreterState *interp, PyObject *s /* stolen */, return s; } + /* Statically allocated strings must be already interned. */ + assert(!_PyUnicode_STATE(s).statically_allocated); + #if Py_GIL_DISABLED /* In the free-threaded build, all interned strings are immortal */ immortalize = 1; @@ -15172,13 +15166,11 @@ intern_common(PyInterpreterState *interp, PyObject *s /* stolen */, immortalize = 1; } - /* if it's a short string, get the singleton -- and intern it */ + /* if it's a short string, get the singleton */ if (PyUnicode_GET_LENGTH(s) == 1 && PyUnicode_KIND(s) == PyUnicode_1BYTE_KIND) { PyObject *r = LATIN1(*(unsigned char*)PyUnicode_DATA(s)); - if (!PyUnicode_CHECK_INTERNED(r)) { - r = intern_static(interp, r); - } + assert(PyUnicode_CHECK_INTERNED(r)); Py_DECREF(s); return r; } @@ -15190,7 +15182,7 @@ intern_common(PyInterpreterState *interp, PyObject *s /* stolen */, { PyObject *r = (PyObject *)_Py_hashtable_get(INTERNED_STRINGS, s); if (r != NULL) { - assert(_Py_IsImmortal(r)); + assert(_PyUnicode_STATE(r).statically_allocated); assert(r != s); // r must be statically_allocated; s is not Py_DECREF(s); return Py_NewRef(r); @@ -15280,7 +15272,7 @@ void PyUnicode_InternInPlace(PyObject **p) { PyInterpreterState *interp = _PyInterpreterState_GET(); - _PyUnicode_InternImmortal(interp, p); + _PyUnicode_InternMortal(interp, p); } // Public-looking name kept for the stable ABI; user should not call this: diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c index 0fcd37d949b..61f05514a48 100644 --- a/Objects/weakrefobject.c +++ b/Objects/weakrefobject.c @@ -426,6 +426,10 @@ get_or_create_weakref(PyTypeObject *type, PyObject *obj, PyObject *callback) return basic_ref; } PyWeakReference *newref = allocate_weakref(type, obj, callback); + if (newref == NULL) { + UNLOCK_WEAKREFS(obj); + return NULL; + } insert_weakref(newref, list); UNLOCK_WEAKREFS(obj); return newref; @@ -433,6 +437,9 @@ get_or_create_weakref(PyTypeObject *type, PyObject *obj, PyObject *callback) else { // We may not be able to safely allocate inside the lock PyWeakReference *newref = allocate_weakref(type, obj, callback); + if (newref == NULL) { + return NULL; + } LOCK_WEAKREFS(obj); insert_weakref(newref, list); UNLOCK_WEAKREFS(obj); diff --git a/PC/pyconfig.h.in b/PC/pyconfig.h.in index d72d6282c28..424421f6ff1 100644 --- a/PC/pyconfig.h.in +++ b/PC/pyconfig.h.in @@ -169,9 +169,9 @@ WIN32 is still required for the locale module. #endif /* MS_WIN64 */ /* set the version macros for the windows headers */ -/* Python 3.9+ requires Windows 8 or greater */ -#define Py_WINVER 0x0602 /* _WIN32_WINNT_WIN8 */ -#define Py_NTDDI NTDDI_WIN8 +/* Python 3.12+ requires Windows 8.1 or greater */ +#define Py_WINVER 0x0603 /* _WIN32_WINNT_WINBLUE (8.1) */ +#define Py_NTDDI NTDDI_WINBLUE /* We only set these values when building Python - we don't want to force these values on extensions, as that will affect the prototypes and diff --git a/PCbuild/_testlimitedcapi.vcxproj b/PCbuild/_testlimitedcapi.vcxproj index 252039d9310..7e5809fec31 100644 --- a/PCbuild/_testlimitedcapi.vcxproj +++ b/PCbuild/_testlimitedcapi.vcxproj @@ -107,6 +107,7 @@ + diff --git a/PCbuild/_testlimitedcapi.vcxproj.filters b/PCbuild/_testlimitedcapi.vcxproj.filters index 7efbb0acf8f..47f059040be 100644 --- a/PCbuild/_testlimitedcapi.vcxproj.filters +++ b/PCbuild/_testlimitedcapi.vcxproj.filters @@ -23,6 +23,7 @@ + diff --git a/PCbuild/find_python.bat b/PCbuild/find_python.bat index af85f6d3624..6db579fa8de 100644 --- a/PCbuild/find_python.bat +++ b/PCbuild/find_python.bat @@ -39,15 +39,15 @@ @if "%_Py_EXTERNALS_DIR%"=="" (set _Py_EXTERNALS_DIR=%_Py_D%\..\externals) @rem If we have Python in externals, use that one -@if exist "%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe" ("%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe" -Ec "import sys; assert sys.version_info[:2] >= (3, 8)" >nul 2>nul) && (set PYTHON="%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe") && (set _Py_Python_Source=found in externals directory) && goto :found || rmdir /Q /S "%_Py_EXTERNALS_DIR%\pythonx86" +@if exist "%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe" ("%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe" -Ec "import sys; assert sys.version_info[:2] >= (3, 10)" >nul 2>nul) && (set PYTHON="%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe") && (set _Py_Python_Source=found in externals directory) && goto :found || rmdir /Q /S "%_Py_EXTERNALS_DIR%\pythonx86" @rem If HOST_PYTHON is recent enough, use that -@if NOT "%HOST_PYTHON%"=="" @%HOST_PYTHON% -Ec "import sys; assert sys.version_info[:2] >= (3, 9)" >nul 2>nul && (set PYTHON="%HOST_PYTHON%") && (set _Py_Python_Source=found as HOST_PYTHON) && goto :found +@if NOT "%HOST_PYTHON%"=="" @%HOST_PYTHON% -Ec "import sys; assert sys.version_info[:2] >= (3, 10)" >nul 2>nul && (set PYTHON="%HOST_PYTHON%") && (set _Py_Python_Source=found as HOST_PYTHON) && goto :found @rem If py.exe finds a recent enough version, use that one @rem It is fine to add new versions to this list when they have released, @rem but we do not use prerelease builds here. -@for %%p in (3.12 3.11 3.10 3.9) do @py -%%p -EV >nul 2>&1 && (set PYTHON=py -%%p) && (set _Py_Python_Source=found %%p with py.exe) && goto :found +@for %%p in (3.12 3.11 3.10) do @py -%%p -EV >nul 2>&1 && (set PYTHON=py -%%p) && (set _Py_Python_Source=found %%p with py.exe) && goto :found @if NOT exist "%_Py_EXTERNALS_DIR%" mkdir "%_Py_EXTERNALS_DIR%" @set _Py_NUGET=%NUGET% diff --git a/PCbuild/get_externals.bat b/PCbuild/get_externals.bat index 1927938ef08..a1a67966182 100644 --- a/PCbuild/get_externals.bat +++ b/PCbuild/get_externals.bat @@ -53,7 +53,7 @@ echo.Fetching external libraries... set libraries= set libraries=%libraries% bzip2-1.0.8 if NOT "%IncludeLibffiSrc%"=="false" set libraries=%libraries% libffi-3.4.4 -if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-3.0.13 +if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-3.0.15 set libraries=%libraries% mpdecimal-4.0.0 set libraries=%libraries% sqlite-3.45.3.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.14.0 @@ -77,7 +77,7 @@ echo.Fetching external binaries... set binaries= if NOT "%IncludeLibffi%"=="false" set binaries=%binaries% libffi-3.4.4 -if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-3.0.13 +if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-3.0.15 if NOT "%IncludeTkinter%"=="false" set binaries=%binaries% tcltk-8.6.14.0 if NOT "%IncludeSSLSrc%"=="false" set binaries=%binaries% nasm-2.11.06 diff --git a/PCbuild/python.props b/PCbuild/python.props index 86fe8531d7d..c8ecdb4515a 100644 --- a/PCbuild/python.props +++ b/PCbuild/python.props @@ -75,8 +75,8 @@ $(libffiDir)$(ArchName)\ $(libffiOutDir)include $(ExternalsDir)\mpdecimal-4.0.0\ - $(ExternalsDir)openssl-3.0.13\ - $(ExternalsDir)openssl-bin-3.0.13\$(ArchName)\ + $(ExternalsDir)openssl-3.0.15\ + $(ExternalsDir)openssl-bin-3.0.15\$(ArchName)\ $(opensslOutDir)include $(ExternalsDir)\nasm-2.11.06\ $(ExternalsDir)\zlib-1.3.1\ diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 33ab50fb8c4..70212903c83 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -163,6 +163,7 @@ + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 828f14db34e..3eed5a9465b 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -429,6 +429,9 @@ Include\cpython + + Include\cpython + Include\cpython diff --git a/PCbuild/regen.targets b/PCbuild/regen.targets index 4aa14ed1fad..416241d9d0d 100644 --- a/PCbuild/regen.targets +++ b/PCbuild/regen.targets @@ -90,23 +90,23 @@ Inputs="@(_CasesSources)" Outputs="@(_CasesOutputs)" DependsOnTargets="FindPythonForBuild"> - - - - - - - - - diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c index 6353bacb5c6..34f744516c1 100644 --- a/Parser/action_helpers.c +++ b/Parser/action_helpers.c @@ -961,6 +961,8 @@ _PyPegen_check_fstring_conversion(Parser *p, Token* conv_token, expr_ty conv) return result_token_with_metadata(p, conv, conv_token->metadata); } +static asdl_expr_seq * +unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions); ResultTokenWithMetadata * _PyPegen_setup_full_format_spec(Parser *p, Token *colon, asdl_expr_seq *spec, int lineno, int col_offset, int end_lineno, int end_col_offset, PyArena *arena) @@ -999,8 +1001,16 @@ _PyPegen_setup_full_format_spec(Parser *p, Token *colon, asdl_expr_seq *spec, in assert(j == non_empty_count); spec = resized_spec; } - expr_ty res = _PyAST_JoinedStr(spec, lineno, col_offset, end_lineno, - end_col_offset, p->arena); + expr_ty res; + Py_ssize_t n = asdl_seq_LEN(spec); + if (n == 0 || (n == 1 && asdl_seq_GET(spec, 0)->kind == Constant_kind)) { + res = _PyAST_JoinedStr(spec, lineno, col_offset, end_lineno, + end_col_offset, p->arena); + } else { + res = _PyPegen_concatenate_strings(p, spec, + lineno, col_offset, end_lineno, + end_col_offset, arena); + } if (!res) { return NULL; } @@ -1300,6 +1310,7 @@ unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions) expr_ty _PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b) { + asdl_expr_seq *expr = unpack_top_level_joined_strs(p, raw_expressions); Py_ssize_t n_items = asdl_seq_LEN(expr); @@ -1464,7 +1475,6 @@ expr_ty _PyPegen_formatted_value(Parser *p, expr_ty expression, Token *debug, Re debug_end_offset = end_col_offset; debug_metadata = closing_brace->metadata; } - expr_ty debug_text = _PyAST_Constant(debug_metadata, NULL, lineno, col_offset + 1, debug_end_line, debug_end_offset - 1, p->arena); if (!debug_text) { @@ -1497,16 +1507,23 @@ _PyPegen_concatenate_strings(Parser *p, asdl_expr_seq *strings, Py_ssize_t n_flattened_elements = 0; for (i = 0; i < len; i++) { expr_ty elem = asdl_seq_GET(strings, i); - if (elem->kind == Constant_kind) { - if (PyBytes_CheckExact(elem->v.Constant.value)) { - bytes_found = 1; - } else { - unicode_string_found = 1; - } - n_flattened_elements++; - } else { - n_flattened_elements += asdl_seq_LEN(elem->v.JoinedStr.values); - f_string_found = 1; + switch(elem->kind) { + case Constant_kind: + if (PyBytes_CheckExact(elem->v.Constant.value)) { + bytes_found = 1; + } else { + unicode_string_found = 1; + } + n_flattened_elements++; + break; + case JoinedStr_kind: + n_flattened_elements += asdl_seq_LEN(elem->v.JoinedStr.values); + f_string_found = 1; + break; + default: + n_flattened_elements++; + f_string_found = 1; + break; } } @@ -1548,16 +1565,19 @@ _PyPegen_concatenate_strings(Parser *p, asdl_expr_seq *strings, Py_ssize_t j = 0; for (i = 0; i < len; i++) { expr_ty elem = asdl_seq_GET(strings, i); - if (elem->kind == Constant_kind) { - asdl_seq_SET(flattened, current_pos++, elem); - } else { - for (j = 0; j < asdl_seq_LEN(elem->v.JoinedStr.values); j++) { - expr_ty subvalue = asdl_seq_GET(elem->v.JoinedStr.values, j); - if (subvalue == NULL) { - return NULL; + switch(elem->kind) { + case JoinedStr_kind: + for (j = 0; j < asdl_seq_LEN(elem->v.JoinedStr.values); j++) { + expr_ty subvalue = asdl_seq_GET(elem->v.JoinedStr.values, j); + if (subvalue == NULL) { + return NULL; + } + asdl_seq_SET(flattened, current_pos++, subvalue); } - asdl_seq_SET(flattened, current_pos++, subvalue); - } + break; + default: + asdl_seq_SET(flattened, current_pos++, elem); + break; } } diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index e338656a5b1..3711cf1280f 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -880,7 +880,7 @@ def visitModule(self, mod): Py_ssize_t i, numfields = 0; int res = -1; - PyObject *key, *value, *fields, *remaining_fields = NULL; + PyObject *key, *value, *fields, *attributes = NULL, *remaining_fields = NULL; if (PyObject_GetOptionalAttr((PyObject*)Py_TYPE(self), state->_fields, &fields) < 0) { goto cleanup; } @@ -947,22 +947,32 @@ def visitModule(self, mod): goto cleanup; } } - else if ( - PyUnicode_CompareWithASCIIString(key, "lineno") != 0 && - PyUnicode_CompareWithASCIIString(key, "col_offset") != 0 && - PyUnicode_CompareWithASCIIString(key, "end_lineno") != 0 && - PyUnicode_CompareWithASCIIString(key, "end_col_offset") != 0 - ) { - if (PyErr_WarnFormat( - PyExc_DeprecationWarning, 1, - "%.400s.__init__ got an unexpected keyword argument '%U'. " - "Support for arbitrary keyword arguments is deprecated " - "and will be removed in Python 3.15.", - Py_TYPE(self)->tp_name, key - ) < 0) { + else { + // Lazily initialize "attributes" + if (attributes == NULL) { + attributes = PyObject_GetAttr((PyObject*)Py_TYPE(self), state->_attributes); + if (attributes == NULL) { + res = -1; + goto cleanup; + } + } + int contains = PySequence_Contains(attributes, key); + if (contains == -1) { res = -1; goto cleanup; } + else if (contains == 0) { + if (PyErr_WarnFormat( + PyExc_DeprecationWarning, 1, + "%.400s.__init__ got an unexpected keyword argument '%U'. " + "Support for arbitrary keyword arguments is deprecated " + "and will be removed in Python 3.15.", + Py_TYPE(self)->tp_name, key + ) < 0) { + res = -1; + goto cleanup; + } + } } res = PyObject_SetAttr(self, key, value); if (res < 0) { @@ -1045,6 +1055,7 @@ def visitModule(self, mod): Py_DECREF(field_types); } cleanup: + Py_XDECREF(attributes); Py_XDECREF(fields); Py_XDECREF(remaining_fields); return res; diff --git a/Parser/lexer/lexer.c b/Parser/lexer/lexer.c index 82b0e4ee352..8c868593f94 100644 --- a/Parser/lexer/lexer.c +++ b/Parser/lexer/lexer.c @@ -884,7 +884,7 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t return MAKE_TOKEN(ERRORTOKEN); } { - /* Accept floating point numbers. */ + /* Accept floating-point numbers. */ if (c == '.') { c = tok_nextc(tok); fraction: @@ -989,6 +989,7 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t the_current_tok->last_expr_buffer = NULL; the_current_tok->last_expr_size = 0; the_current_tok->last_expr_end = -1; + the_current_tok->in_format_spec = 0; the_current_tok->f_string_debug = 0; switch (*tok->start) { @@ -1137,15 +1138,20 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t * by the `{` case, so for ensuring that we are on the 0th level, we need * to adjust it manually */ int cursor = current_tok->curly_bracket_depth - (c != '{'); - if (cursor == 0 && !_PyLexer_update_fstring_expr(tok, c)) { + int in_format_spec = current_tok->in_format_spec; + int cursor_in_format_with_debug = + cursor == 1 && (current_tok->f_string_debug || in_format_spec); + int cursor_valid = cursor == 0 || cursor_in_format_with_debug; + if ((cursor_valid) && !_PyLexer_update_fstring_expr(tok, c)) { return MAKE_TOKEN(ENDMARKER); } - if (cursor == 0 && c != '{' && set_fstring_expr(tok, token, c)) { + if ((cursor_valid) && c != '{' && set_fstring_expr(tok, token, c)) { return MAKE_TOKEN(ERRORTOKEN); } if (c == ':' && cursor == current_tok->curly_bracket_expr_start_depth) { current_tok->kind = TOK_FSTRING_MODE; + current_tok->in_format_spec = 1; p_start = tok->start; p_end = tok->cur; return MAKE_TOKEN(_PyToken_OneChar(c)); @@ -1232,9 +1238,13 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t if (INSIDE_FSTRING(tok)) { current_tok->curly_bracket_depth--; + if (current_tok->curly_bracket_depth < 0) { + return MAKE_TOKEN(_PyTokenizer_syntaxerror(tok, "f-string: unmatched '%c'", c)); + } if (c == '}' && current_tok->curly_bracket_depth == current_tok->curly_bracket_expr_start_depth) { current_tok->curly_bracket_expr_start_depth--; current_tok->kind = TOK_FSTRING_MODE; + current_tok->in_format_spec = 0; current_tok->f_string_debug = 0; } } @@ -1317,11 +1327,11 @@ tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct tok->multi_line_start = tok->line_start; while (end_quote_size != current_tok->f_string_quote_size) { int c = tok_nextc(tok); - if (tok->done == E_ERROR) { + if (tok->done == E_ERROR || tok->done == E_DECODE) { return MAKE_TOKEN(ERRORTOKEN); } int in_format_spec = ( - current_tok->last_expr_end != -1 + current_tok->in_format_spec && INSIDE_FSTRING_EXPR(current_tok) ); @@ -1337,6 +1347,7 @@ tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct if (in_format_spec && c == '\n') { tok_backup(tok, c); TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE; + current_tok->in_format_spec = 0; p_start = tok->start; p_end = tok->cur; return MAKE_TOKEN(FSTRING_MIDDLE); @@ -1378,6 +1389,9 @@ tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct } if (c == '{') { + if (!_PyLexer_update_fstring_expr(tok, c)) { + return MAKE_TOKEN(ENDMARKER); + } int peek = tok_nextc(tok); if (peek != '{' || in_format_spec) { tok_backup(tok, peek); @@ -1387,6 +1401,7 @@ tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct return MAKE_TOKEN(_PyTokenizer_syntaxerror(tok, "f-string: expressions nested too deeply")); } TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE; + current_tok->in_format_spec = 0; p_start = tok->start; p_end = tok->cur; } else { @@ -1406,13 +1421,15 @@ tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct // scanning (indicated by the end of the expression being set) and we are not at the top level // of the bracket stack (-1 is the top level). Since format specifiers can't legally use double // brackets, we can bypass it here. - if (peek == '}' && !in_format_spec) { + int cursor = current_tok->curly_bracket_depth; + if (peek == '}' && !in_format_spec && cursor == 0) { p_start = tok->start; p_end = tok->cur - 1; } else { tok_backup(tok, peek); tok_backup(tok, c); TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE; + current_tok->in_format_spec = 0; p_start = tok->start; p_end = tok->cur; } diff --git a/Parser/lexer/state.c b/Parser/lexer/state.c index 653ddafd411..1665debea30 100644 --- a/Parser/lexer/state.c +++ b/Parser/lexer/state.c @@ -12,7 +12,8 @@ struct tok_state * _PyTokenizer_tok_new(void) { - struct tok_state *tok = (struct tok_state *)PyMem_Malloc( + struct tok_state *tok = (struct tok_state *)PyMem_Calloc( + 1, sizeof(struct tok_state)); if (tok == NULL) return NULL; @@ -74,6 +75,7 @@ free_fstring_expressions(struct tok_state *tok) mode->last_expr_buffer = NULL; mode->last_expr_size = 0; mode->last_expr_end = -1; + mode->in_format_spec = 0; } } } diff --git a/Parser/lexer/state.h b/Parser/lexer/state.h index 61d090d6d2f..9ed3babfdbf 100644 --- a/Parser/lexer/state.h +++ b/Parser/lexer/state.h @@ -58,6 +58,7 @@ typedef struct _tokenizer_mode { Py_ssize_t last_expr_end; char* last_expr_buffer; int f_string_debug; + int in_format_spec; } tokenizer_mode; /* Tokenizer state */ diff --git a/Parser/myreadline.c b/Parser/myreadline.c index 18256653548..74c44ff7771 100644 --- a/Parser/myreadline.c +++ b/Parser/myreadline.c @@ -28,7 +28,7 @@ PyAPI_DATA(PyThreadState*) _PyOS_ReadlineTState; PyThreadState *_PyOS_ReadlineTState = NULL; -static PyThread_type_lock _PyOS_ReadlineLock = NULL; +static PyMutex _PyOS_ReadlineLock; int (*PyOS_InputHook)(void) = NULL; @@ -373,29 +373,22 @@ PyOS_Readline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) size_t len; PyThreadState *tstate = _PyThreadState_GET(); - if (_PyOS_ReadlineTState == tstate) { + if (_Py_atomic_load_ptr_relaxed(&_PyOS_ReadlineTState) == tstate) { PyErr_SetString(PyExc_RuntimeError, "can't re-enter readline"); return NULL; } - + // GH-123321: We need to acquire the lock before setting + // _PyOS_ReadlineTState, otherwise the variable may be nullified by a + // different thread. + Py_BEGIN_ALLOW_THREADS + PyMutex_Lock(&_PyOS_ReadlineLock); + _Py_atomic_store_ptr_relaxed(&_PyOS_ReadlineTState, tstate); if (PyOS_ReadlineFunctionPointer == NULL) { PyOS_ReadlineFunctionPointer = PyOS_StdioReadline; } - if (_PyOS_ReadlineLock == NULL) { - _PyOS_ReadlineLock = PyThread_allocate_lock(); - if (_PyOS_ReadlineLock == NULL) { - PyErr_SetString(PyExc_MemoryError, "can't allocate lock"); - return NULL; - } - } - - _PyOS_ReadlineTState = tstate; - Py_BEGIN_ALLOW_THREADS - PyThread_acquire_lock(_PyOS_ReadlineLock, 1); - /* This is needed to handle the unlikely case that the * interpreter is in interactive mode *and* stdin/out are not * a tty. This can happen, for example if python is run like @@ -418,11 +411,12 @@ PyOS_Readline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) else { rv = (*PyOS_ReadlineFunctionPointer)(sys_stdin, sys_stdout, prompt); } - Py_END_ALLOW_THREADS - PyThread_release_lock(_PyOS_ReadlineLock); - - _PyOS_ReadlineTState = NULL; + // gh-123321: Must set the variable and then release the lock before + // taking the GIL. Otherwise a deadlock or segfault may occur. + _Py_atomic_store_ptr_relaxed(&_PyOS_ReadlineTState, NULL); + PyMutex_Unlock(&_PyOS_ReadlineLock); + Py_END_ALLOW_THREADS if (rv == NULL) return NULL; diff --git a/Parser/pegen.c b/Parser/pegen.c index 6efb5477c7b..0c3c4689dd7 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -296,12 +296,22 @@ _PyPegen_fill_token(Parser *p) #define NSTATISTICS _PYPEGEN_NSTATISTICS #define memo_statistics _PyRuntime.parser.memo_statistics +#ifdef Py_GIL_DISABLED +#define MUTEX_LOCK() PyMutex_Lock(&_PyRuntime.parser.mutex) +#define MUTEX_UNLOCK() PyMutex_Unlock(&_PyRuntime.parser.mutex) +#else +#define MUTEX_LOCK() +#define MUTEX_UNLOCK() +#endif + void _PyPegen_clear_memo_statistics(void) { + MUTEX_LOCK(); for (int i = 0; i < NSTATISTICS; i++) { memo_statistics[i] = 0; } + MUTEX_UNLOCK(); } PyObject * @@ -311,18 +321,23 @@ _PyPegen_get_memo_statistics(void) if (ret == NULL) { return NULL; } + + MUTEX_LOCK(); for (int i = 0; i < NSTATISTICS; i++) { PyObject *value = PyLong_FromLong(memo_statistics[i]); if (value == NULL) { + MUTEX_UNLOCK(); Py_DECREF(ret); return NULL; } // PyList_SetItem borrows a reference to value. if (PyList_SetItem(ret, i, value) < 0) { + MUTEX_UNLOCK(); Py_DECREF(ret); return NULL; } } + MUTEX_UNLOCK(); return ret; } #endif @@ -341,14 +356,16 @@ _PyPegen_is_memoized(Parser *p, int type, void *pres) for (Memo *m = t->memo; m != NULL; m = m->next) { if (m->type == type) { -#if defined(PY_DEBUG) +#if defined(Py_DEBUG) if (0 <= type && type < NSTATISTICS) { long count = m->mark - p->mark; // A memoized negative result counts for one. if (count <= 0) { count = 1; } + MUTEX_LOCK(); memo_statistics[type] += count; + MUTEX_UNLOCK(); } #endif p->mark = m->mark; diff --git a/Parser/string_parser.c b/Parser/string_parser.c index 93ad92b8235..9537c543b0e 100644 --- a/Parser/string_parser.c +++ b/Parser/string_parser.c @@ -18,7 +18,7 @@ warn_invalid_escape_sequence(Parser *p, const char *first_invalid_escape, Token // to avoid showing the warning twice. return 0; } - unsigned char c = *first_invalid_escape; + unsigned char c = (unsigned char)*first_invalid_escape; if ((t->type == FSTRING_MIDDLE || t->type == FSTRING_END) && (c == '{' || c == '}')) { // in this case the tokenizer has already emitted a warning, // see Parser/tokenizer/helpers.c:warn_invalid_escape_sequence @@ -90,12 +90,12 @@ decode_unicode_with_escapes(Parser *parser, const char *s, size_t len, Token *t) const char *end; /* check for integer overflow */ - if (len > SIZE_MAX / 6) { + if (len > (size_t)PY_SSIZE_T_MAX / 6) { return NULL; } /* "ä" (2 bytes) may become "\U000000E4" (10 bytes), or 1:5 "\ä" (3 bytes) may become "\u005c\U000000E4" (16 bytes), or ~1:6 */ - u = PyBytes_FromStringAndSize((char *)NULL, len * 6); + u = PyBytes_FromStringAndSize((char *)NULL, (Py_ssize_t)len * 6); if (u == NULL) { return NULL; } @@ -142,11 +142,11 @@ decode_unicode_with_escapes(Parser *parser, const char *s, size_t len, Token *t) *p++ = *s++; } } - len = p - buf; + len = (size_t)(p - buf); s = buf; const char *first_invalid_escape; - v = _PyUnicode_DecodeUnicodeEscapeInternal(s, len, NULL, NULL, &first_invalid_escape); + v = _PyUnicode_DecodeUnicodeEscapeInternal(s, (Py_ssize_t)len, NULL, NULL, &first_invalid_escape); // HACK: later we can simply pass the line no, since we don't preserve the tokens // when we are decoding the string but we preserve the line numbers. @@ -185,7 +185,7 @@ PyObject * _PyPegen_decode_string(Parser *p, int raw, const char *s, size_t len, Token *t) { if (raw) { - return PyUnicode_DecodeUTF8Stateful(s, len, NULL, NULL); + return PyUnicode_DecodeUTF8Stateful(s, (Py_ssize_t)len, NULL, NULL); } return decode_unicode_with_escapes(p, s, len, t); } @@ -274,9 +274,9 @@ _PyPegen_parse_string(Parser *p, Token *t) } } if (rawmode) { - return PyBytes_FromStringAndSize(s, len); + return PyBytes_FromStringAndSize(s, (Py_ssize_t)len); } - return decode_bytes_with_escapes(p, s, len, t); + return decode_bytes_with_escapes(p, s, (Py_ssize_t)len, t); } return _PyPegen_decode_string(p, rawmode, s, len, t); } diff --git a/Programs/_freeze_module.c b/Programs/_freeze_module.c index 2a462a42cda..891e4256e89 100644 --- a/Programs/_freeze_module.c +++ b/Programs/_freeze_module.c @@ -110,6 +110,9 @@ static PyObject * compile_and_marshal(const char *name, const char *text) { char *filename = (char *) malloc(strlen(name) + 10); + if (filename == NULL) { + return PyErr_NoMemory(); + } sprintf(filename, "", name); PyObject *code = Py_CompileStringExFlags(text, filename, Py_file_input, NULL, 0); @@ -133,6 +136,9 @@ get_varname(const char *name, const char *prefix) { size_t n = strlen(prefix); char *varname = (char *) malloc(strlen(name) + n + 1); + if (varname == NULL) { + return NULL; + } (void)strcpy(varname, prefix); for (size_t i = 0; name[i] != '\0'; i++) { if (name[i] == '.') { @@ -178,6 +184,11 @@ write_frozen(const char *outpath, const char *inpath, const char *name, fprintf(outfile, "%s\n", header); char *arrayname = get_varname(name, "_Py_M__"); + if (arrayname == NULL) { + fprintf(stderr, "memory error: could not allocate varname\n"); + fclose(outfile); + return -1; + } write_code(outfile, marshalled, arrayname); free(arrayname); diff --git a/Programs/_testembed.c b/Programs/_testembed.c index d149b6a0c5c..96dbfabd7ed 100644 --- a/Programs/_testembed.c +++ b/Programs/_testembed.c @@ -8,6 +8,7 @@ #include #include "pycore_initconfig.h" // _PyConfig_InitCompatConfig() #include "pycore_runtime.h" // _PyRuntime +#include "pycore_pythread.h" // PyThread_start_joinable_thread() #include "pycore_import.h" // _PyImport_FrozenBootstrap #include #include @@ -93,6 +94,14 @@ static void _testembed_Py_Initialize(void) } +static int test_import_in_subinterpreters(void) +{ + _testembed_Py_InitializeFromConfig(); + PyThreadState_Swap(Py_NewInterpreter()); + return PyRun_SimpleString("import readline"); // gh-124160 +} + + /***************************************************** * Test repeated initialisation and subinterpreters *****************************************************/ @@ -170,15 +179,23 @@ PyInit_embedded_ext(void) static int test_repeated_init_exec(void) { if (main_argc < 3) { - fprintf(stderr, "usage: %s test_repeated_init_exec CODE\n", PROGRAM); + fprintf(stderr, + "usage: %s test_repeated_init_exec CODE ...\n", PROGRAM); exit(1); } const char *code = main_argv[2]; + int loops = main_argc > 3 + ? main_argc - 2 + : INIT_LOOPS; - for (int i=1; i <= INIT_LOOPS; i++) { - fprintf(stderr, "--- Loop #%d ---\n", i); + for (int i=0; i < loops; i++) { + fprintf(stderr, "--- Loop #%d ---\n", i+1); fflush(stderr); + if (main_argc > 3) { + code = main_argv[i+2]; + } + _testembed_Py_InitializeFromConfig(); int err = PyRun_SimpleString(code); Py_Finalize(); @@ -2014,6 +2031,22 @@ static int test_init_main_interpreter_settings(void) return 0; } +static void do_init(void *unused) +{ + _testembed_Py_Initialize(); + Py_Finalize(); +} + +static int test_init_in_background_thread(void) +{ + PyThread_handle_t handle; + PyThread_ident_t ident; + if (PyThread_start_joinable_thread(&do_init, NULL, &ident, &handle) < 0) { + return -1; + } + return PyThread_join_thread(handle); +} + #ifndef MS_WINDOWS #include "test_frozenmain.h" // M_test_frozenmain @@ -2159,6 +2192,7 @@ static struct TestCase TestCases[] = { {"test_repeated_init_exec", test_repeated_init_exec}, {"test_repeated_simple_init", test_repeated_simple_init}, {"test_forced_io_encoding", test_forced_io_encoding}, + {"test_import_in_subinterpreters", test_import_in_subinterpreters}, {"test_repeated_init_and_subinterpreters", test_repeated_init_and_subinterpreters}, {"test_repeated_init_and_inittab", test_repeated_init_and_inittab}, {"test_pre_initialization_api", test_pre_initialization_api}, @@ -2203,6 +2237,7 @@ static struct TestCase TestCases[] = { {"test_get_argc_argv", test_get_argc_argv}, {"test_init_use_frozen_modules", test_init_use_frozen_modules}, {"test_init_main_interpreter_settings", test_init_main_interpreter_settings}, + {"test_init_in_background_thread", test_init_in_background_thread}, // Audit {"test_open_code_hook", test_open_code_hook}, diff --git a/Python/Python-ast.c b/Python/Python-ast.c index 01ffea18693..e38a1452715 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -5079,7 +5079,7 @@ ast_type_init(PyObject *self, PyObject *args, PyObject *kw) Py_ssize_t i, numfields = 0; int res = -1; - PyObject *key, *value, *fields, *remaining_fields = NULL; + PyObject *key, *value, *fields, *attributes = NULL, *remaining_fields = NULL; if (PyObject_GetOptionalAttr((PyObject*)Py_TYPE(self), state->_fields, &fields) < 0) { goto cleanup; } @@ -5146,22 +5146,32 @@ ast_type_init(PyObject *self, PyObject *args, PyObject *kw) goto cleanup; } } - else if ( - PyUnicode_CompareWithASCIIString(key, "lineno") != 0 && - PyUnicode_CompareWithASCIIString(key, "col_offset") != 0 && - PyUnicode_CompareWithASCIIString(key, "end_lineno") != 0 && - PyUnicode_CompareWithASCIIString(key, "end_col_offset") != 0 - ) { - if (PyErr_WarnFormat( - PyExc_DeprecationWarning, 1, - "%.400s.__init__ got an unexpected keyword argument '%U'. " - "Support for arbitrary keyword arguments is deprecated " - "and will be removed in Python 3.15.", - Py_TYPE(self)->tp_name, key - ) < 0) { + else { + // Lazily initialize "attributes" + if (attributes == NULL) { + attributes = PyObject_GetAttr((PyObject*)Py_TYPE(self), state->_attributes); + if (attributes == NULL) { + res = -1; + goto cleanup; + } + } + int contains = PySequence_Contains(attributes, key); + if (contains == -1) { res = -1; goto cleanup; } + else if (contains == 0) { + if (PyErr_WarnFormat( + PyExc_DeprecationWarning, 1, + "%.400s.__init__ got an unexpected keyword argument '%U'. " + "Support for arbitrary keyword arguments is deprecated " + "and will be removed in Python 3.15.", + Py_TYPE(self)->tp_name, key + ) < 0) { + res = -1; + goto cleanup; + } + } } res = PyObject_SetAttr(self, key, value); if (res < 0) { @@ -5244,6 +5254,7 @@ ast_type_init(PyObject *self, PyObject *args, PyObject *kw) Py_DECREF(field_types); } cleanup: + Py_XDECREF(attributes); Py_XDECREF(fields); Py_XDECREF(remaining_fields); return res; diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c index 55c821754c2..34b4445be27 100644 --- a/Python/Python-tokenize.c +++ b/Python/Python-tokenize.c @@ -1,9 +1,10 @@ #include "Python.h" #include "errcode.h" +#include "internal/pycore_critical_section.h" // Py_BEGIN_CRITICAL_SECTION #include "../Parser/lexer/state.h" #include "../Parser/lexer/lexer.h" #include "../Parser/tokenizer/tokenizer.h" -#include "../Parser/pegen.h" // _PyPegen_byte_offset_to_character_offset() +#include "../Parser/pegen.h" // _PyPegen_byte_offset_to_character_offset() static struct PyModuleDef _tokenizemodule; @@ -84,14 +85,16 @@ tokenizeriter_new_impl(PyTypeObject *type, PyObject *readline, } static int -_tokenizer_error(struct tok_state *tok) +_tokenizer_error(tokenizeriterobject *it) { + _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(it); if (PyErr_Occurred()) { return -1; } const char *msg = NULL; PyObject* errtype = PyExc_SyntaxError; + struct tok_state *tok = it->tok; switch (tok->done) { case E_TOKEN: msg = "invalid token"; @@ -177,17 +180,78 @@ _tokenizer_error(struct tok_state *tok) return result; } +static PyObject * +_get_current_line(tokenizeriterobject *it, const char *line_start, Py_ssize_t size, + int *line_changed) +{ + _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(it); + PyObject *line; + if (it->tok->lineno != it->last_lineno) { + // Line has changed since last token, so we fetch the new line and cache it + // in the iter object. + Py_XDECREF(it->last_line); + line = PyUnicode_DecodeUTF8(line_start, size, "replace"); + it->last_line = line; + it->byte_col_offset_diff = 0; + } + else { + line = it->last_line; + *line_changed = 0; + } + return line; +} + +static void +_get_col_offsets(tokenizeriterobject *it, struct token token, const char *line_start, + PyObject *line, int line_changed, Py_ssize_t lineno, Py_ssize_t end_lineno, + Py_ssize_t *col_offset, Py_ssize_t *end_col_offset) +{ + _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(it); + Py_ssize_t byte_offset = -1; + if (token.start != NULL && token.start >= line_start) { + byte_offset = token.start - line_start; + if (line_changed) { + *col_offset = _PyPegen_byte_offset_to_character_offset_line(line, 0, byte_offset); + it->byte_col_offset_diff = byte_offset - *col_offset; + } + else { + *col_offset = byte_offset - it->byte_col_offset_diff; + } + } + + if (token.end != NULL && token.end >= it->tok->line_start) { + Py_ssize_t end_byte_offset = token.end - it->tok->line_start; + if (lineno == end_lineno) { + // If the whole token is at the same line, we can just use the token.start + // buffer for figuring out the new column offset, since using line is not + // performant for very long lines. + Py_ssize_t token_col_offset = _PyPegen_byte_offset_to_character_offset_line(line, byte_offset, end_byte_offset); + *end_col_offset = *col_offset + token_col_offset; + it->byte_col_offset_diff += token.end - token.start - token_col_offset; + } + else { + *end_col_offset = _PyPegen_byte_offset_to_character_offset_raw(it->tok->line_start, end_byte_offset); + it->byte_col_offset_diff += end_byte_offset - *end_col_offset; + } + } + it->last_lineno = lineno; + it->last_end_lineno = end_lineno; +} + static PyObject * tokenizeriter_next(tokenizeriterobject *it) { PyObject* result = NULL; + + Py_BEGIN_CRITICAL_SECTION(it); + struct token token; _PyToken_Init(&token); int type = _PyTokenizer_Get(it->tok, &token); if (type == ERRORTOKEN) { if(!PyErr_Occurred()) { - _tokenizer_error(it->tok); + _tokenizer_error(it); assert(PyErr_Occurred()); } goto exit; @@ -224,18 +288,7 @@ tokenizeriter_next(tokenizeriterobject *it) size -= 1; } - if (it->tok->lineno != it->last_lineno) { - // Line has changed since last token, so we fetch the new line and cache it - // in the iter object. - Py_XDECREF(it->last_line); - line = PyUnicode_DecodeUTF8(line_start, size, "replace"); - it->last_line = line; - it->byte_col_offset_diff = 0; - } else { - // Line hasn't changed so we reuse the cached one. - line = it->last_line; - line_changed = 0; - } + line = _get_current_line(it, line_start, size, &line_changed); } if (line == NULL) { Py_DECREF(str); @@ -244,36 +297,10 @@ tokenizeriter_next(tokenizeriterobject *it) Py_ssize_t lineno = ISSTRINGLIT(type) ? it->tok->first_lineno : it->tok->lineno; Py_ssize_t end_lineno = it->tok->lineno; - it->last_lineno = lineno; - it->last_end_lineno = end_lineno; - Py_ssize_t col_offset = -1; Py_ssize_t end_col_offset = -1; - Py_ssize_t byte_offset = -1; - if (token.start != NULL && token.start >= line_start) { - byte_offset = token.start - line_start; - if (line_changed) { - col_offset = _PyPegen_byte_offset_to_character_offset_line(line, 0, byte_offset); - it->byte_col_offset_diff = byte_offset - col_offset; - } - else { - col_offset = byte_offset - it->byte_col_offset_diff; - } - } - if (token.end != NULL && token.end >= it->tok->line_start) { - Py_ssize_t end_byte_offset = token.end - it->tok->line_start; - if (lineno == end_lineno) { - // If the whole token is at the same line, we can just use the token.start - // buffer for figuring out the new column offset, since using line is not - // performant for very long lines. - Py_ssize_t token_col_offset = _PyPegen_byte_offset_to_character_offset_line(line, byte_offset, end_byte_offset); - end_col_offset = col_offset + token_col_offset; - it->byte_col_offset_diff += token.end - token.start - token_col_offset; - } else { - end_col_offset = _PyPegen_byte_offset_to_character_offset_raw(it->tok->line_start, end_byte_offset); - it->byte_col_offset_diff += end_byte_offset - end_col_offset; - } - } + _get_col_offsets(it, token, line_start, line, line_changed, + lineno, end_lineno, &col_offset, &end_col_offset); if (it->tok->tok_extra_tokens) { if (is_trailing_token) { @@ -315,6 +342,8 @@ tokenizeriter_next(tokenizeriterobject *it) if (type == ENDMARKER) { it->done = 1; } + + Py_END_CRITICAL_SECTION(); return result; } diff --git a/Python/ast_opt.c b/Python/ast_opt.c index 6d1bfafef3c..fa6bc70d748 100644 --- a/Python/ast_opt.c +++ b/Python/ast_opt.c @@ -1086,10 +1086,13 @@ astfold_type_param(type_param_ty node_, PyArena *ctx_, _PyASTOptimizeState *stat switch (node_->kind) { case TypeVar_kind: CALL_OPT(astfold_expr, expr_ty, node_->v.TypeVar.bound); + CALL_OPT(astfold_expr, expr_ty, node_->v.TypeVar.default_value); break; case ParamSpec_kind: + CALL_OPT(astfold_expr, expr_ty, node_->v.ParamSpec.default_value); break; case TypeVarTuple_kind: + CALL_OPT(astfold_expr, expr_ty, node_->v.TypeVarTuple.default_value); break; } return 1; diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 8f368578767..5afa94cf221 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -2601,8 +2601,8 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) b = PyLong_AsLongAndOverflow(item, &overflow); } if (overflow == 0 && - (i_result >= 0 ? (b <= LONG_MAX - i_result) - : (b >= LONG_MIN - i_result))) + (i_result >= 0 ? (b <= PY_SSIZE_T_MAX - i_result) + : (b >= PY_SSIZE_T_MIN - i_result))) { i_result += b; Py_DECREF(item); diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 0dc60bbbb6a..1e6185d3c9e 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -362,7 +362,7 @@ dummy_func( EXIT_IF(!PyLong_CheckExact(value)); STAT_INC(TO_BOOL, hit); if (_PyLong_IsZero((PyLongObject *)value)) { - assert(_Py_IsImmortal(value)); + assert(_Py_IsImmortalLoose(value)); res = Py_False; } else { @@ -389,7 +389,7 @@ dummy_func( EXIT_IF(!PyUnicode_CheckExact(value)); STAT_INC(TO_BOOL, hit); if (value == &_Py_STR(empty)) { - assert(_Py_IsImmortal(value)); + assert(_Py_IsImmortalLoose(value)); res = Py_False; } else { @@ -1037,7 +1037,7 @@ dummy_func( if (retval == NULL) { if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration) ) { - monitor_raise(tstate, frame, this_instr); + _PyEval_MonitorRaise(tstate, frame, this_instr); } if (_PyGen_FetchStopIterationValue(&retval) == 0) { assert(retval != NULL); @@ -2170,14 +2170,15 @@ dummy_func( new_version = _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, value); ep->me_value = value; } - Py_DECREF(old_value); - STAT_INC(STORE_ATTR, hit); /* Ensure dict is GC tracked if it needs to be */ if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(value)) { _PyObject_GC_TRACK(dict); } - /* PEP 509 */ - dict->ma_version_tag = new_version; + dict->ma_version_tag = new_version; // PEP 509 + // old_value should be DECREFed after GC track checking is done, if not, it could raise a segmentation fault, + // when dict only holds the strong reference to value in ep->me_value. + Py_DECREF(old_value); + STAT_INC(STORE_ATTR, hit); Py_DECREF(owner); } @@ -2603,7 +2604,7 @@ dummy_func( if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { ERROR_NO_POP(); } - monitor_raise(tstate, frame, this_instr); + _PyEval_MonitorRaise(tstate, frame, this_instr); _PyErr_Clear(tstate); } /* iterator ended normally */ @@ -2626,6 +2627,7 @@ dummy_func( if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { ERROR_NO_POP(); } + _PyEval_MonitorRaise(tstate, frame, frame->instr_ptr); _PyErr_Clear(tstate); } /* iterator ended normally */ @@ -2650,7 +2652,7 @@ dummy_func( if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { ERROR_NO_POP(); } - monitor_raise(tstate, frame, this_instr); + _PyEval_MonitorRaise(tstate, frame, this_instr); _PyErr_Clear(tstate); } /* iterator ended normally */ @@ -2695,7 +2697,10 @@ dummy_func( assert(Py_TYPE(iter) == &PyListIter_Type); PyListObject *seq = it->it_seq; EXIT_IF(seq == NULL); - EXIT_IF((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)); + if ((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) { + it->it_index = -1; + EXIT_IF(1); + } } op(_ITER_NEXT_LIST, (iter -- iter, next)) { @@ -3403,6 +3408,8 @@ dummy_func( PyFunctionObject *init = (PyFunctionObject *)cls->_spec_cache.init; PyCodeObject *code = (PyCodeObject *)init->func_code; DEOPT_IF(code->co_argcount != oparg+1); + DEOPT_IF((code->co_flags & (CO_VARKEYWORDS | CO_VARARGS | CO_OPTIMIZED)) != CO_OPTIMIZED); + DEOPT_IF(code->co_kwonlyargcount); DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize + _Py_InitCleanup.co_framesize)); STAT_INC(CALL, hit); PyObject *self = _PyType_NewManagedObject(tp); @@ -4315,8 +4322,6 @@ dummy_func( if (optimized <= 0) { exit->temperature = restart_backoff_counter(exit->temperature); if (optimized < 0) { - Py_DECREF(current_executor); - tstate->previous_executor = Py_None; GOTO_UNWIND(); } GOTO_TIER_ONE(target); diff --git a/Python/ceval.c b/Python/ceval.c index 324d062fe9b..351ddd2666c 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -223,9 +223,6 @@ maybe_lltrace_resume_frame(_PyInterpreterFrame *frame, _PyInterpreterFrame *skip #endif -static void monitor_raise(PyThreadState *tstate, - _PyInterpreterFrame *frame, - _Py_CODEUNIT *instr); static void monitor_reraise(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr); @@ -676,15 +673,6 @@ extern void _PyUOpPrint(const _PyUOpInstruction *uop); * so consume 3 units of C stack */ #define PY_EVAL_C_STACK_UNITS 2 -#if defined(_MSC_VER) && defined(_Py_USING_PGO) -/* gh-111786: _PyEval_EvalFrameDefault is too large to optimize for speed with - PGO on MSVC. Disable that optimization temporarily. If this is fixed - upstream, we should gate this on the version of MSVC. - */ -# pragma optimize("t", off) -/* This setting is reversed below following _PyEval_EvalFrameDefault */ -#endif - PyObject* _Py_HOT_FUNCTION _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int throwflag) { @@ -882,7 +870,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int PyTraceBack_Here(f); } } - monitor_raise(tstate, frame, next_instr-1); + _PyEval_MonitorRaise(tstate, frame, next_instr-1); exception_unwind: { /* We can't use frame->instr_ptr here, as RERAISE may have set it */ @@ -1125,7 +1113,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int # pragma GCC diagnostic pop #elif defined(_MSC_VER) /* MS_WINDOWS */ # pragma warning(pop) -# pragma optimize("", on) #endif static void @@ -2194,8 +2181,8 @@ no_tools_for_local_event(PyThreadState *tstate, _PyInterpreterFrame *frame, int } } -static void -monitor_raise(PyThreadState *tstate, _PyInterpreterFrame *frame, +void +_PyEval_MonitorRaise(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr) { if (no_tools_for_global_event(tstate, PY_MONITORING_EVENT_RAISE)) { @@ -2476,6 +2463,7 @@ _PyEval_GetBuiltinId(_Py_Identifier *name) PyObject * PyEval_GetLocals(void) { + // We need to return a borrowed reference here, so some tricks are needed PyThreadState *tstate = _PyThreadState_GET(); _PyInterpreterFrame *current_frame = _PyThreadState_GetFrame(tstate); if (current_frame == NULL) { @@ -2483,7 +2471,37 @@ PyEval_GetLocals(void) return NULL; } - PyObject *locals = _PyEval_GetFrameLocals(); + // Be aware that this returns a new reference + PyObject *locals = _PyFrame_GetLocals(current_frame); + + if (locals == NULL) { + return NULL; + } + + if (PyFrameLocalsProxy_Check(locals)) { + PyFrameObject *f = _PyFrame_GetFrameObject(current_frame); + PyObject *ret = f->f_locals_cache; + if (ret == NULL) { + ret = PyDict_New(); + if (ret == NULL) { + Py_DECREF(locals); + return NULL; + } + f->f_locals_cache = ret; + } + if (PyDict_Update(ret, locals) < 0) { + // At this point, if the cache dict is broken, it will stay broken, as + // trying to clean it up or replace it will just cause other problems + ret = NULL; + } + Py_DECREF(locals); + return ret; + } + + assert(PyMapping_Check(locals)); + assert(Py_REFCNT(locals) > 1); + Py_DECREF(locals); + return locals; } diff --git a/Python/ceval_gil.c b/Python/ceval_gil.c index 5617504a495..0b45caba0d4 100644 --- a/Python/ceval_gil.c +++ b/Python/ceval_gil.c @@ -901,6 +901,18 @@ unsignal_pending_calls(PyThreadState *tstate, PyInterpreterState *interp) #endif } +static void +clear_pending_handling_thread(struct _pending_calls *pending) +{ +#ifdef Py_GIL_DISABLED + PyMutex_Lock(&pending->mutex); + pending->handling_thread = NULL; + PyMutex_Unlock(&pending->mutex); +#else + pending->handling_thread = NULL; +#endif +} + static int make_pending_calls(PyThreadState *tstate) { @@ -933,7 +945,7 @@ make_pending_calls(PyThreadState *tstate) int32_t npending; if (_make_pending_calls(pending, &npending) != 0) { - pending->handling_thread = NULL; + clear_pending_handling_thread(pending); /* There might not be more calls to make, but we play it safe. */ signal_pending_calls(tstate, interp); return -1; @@ -945,7 +957,7 @@ make_pending_calls(PyThreadState *tstate) if (_Py_IsMainThread() && _Py_IsMainInterpreter(interp)) { if (_make_pending_calls(pending_main, &npending) != 0) { - pending->handling_thread = NULL; + clear_pending_handling_thread(pending); /* There might not be more calls to make, but we play it safe. */ signal_pending_calls(tstate, interp); return -1; @@ -956,7 +968,7 @@ make_pending_calls(PyThreadState *tstate) } } - pending->handling_thread = NULL; + clear_pending_handling_thread(pending); return 0; } @@ -991,12 +1003,34 @@ _Py_FinishPendingCalls(PyThreadState *tstate) assert(PyGILState_Check()); assert(_PyThreadState_CheckConsistency(tstate)); - if (make_pending_calls(tstate) < 0) { - PyObject *exc = _PyErr_GetRaisedException(tstate); - PyErr_BadInternalCall(); - _PyErr_ChainExceptions1(exc); - _PyErr_Print(tstate); - } + struct _pending_calls *pending = &tstate->interp->ceval.pending; + struct _pending_calls *pending_main = + _Py_IsMainThread() && _Py_IsMainInterpreter(tstate->interp) + ? &_PyRuntime.ceval.pending_mainthread + : NULL; + /* make_pending_calls() may return early without making all pending + calls, so we keep trying until we're actually done. */ + int32_t npending; +#ifndef NDEBUG + int32_t npending_prev = INT32_MAX; +#endif + do { + if (make_pending_calls(tstate) < 0) { + PyObject *exc = _PyErr_GetRaisedException(tstate); + PyErr_BadInternalCall(); + _PyErr_ChainExceptions1(exc); + _PyErr_Print(tstate); + } + + npending = _Py_atomic_load_int32_relaxed(&pending->npending); + if (pending_main != NULL) { + npending += _Py_atomic_load_int32_relaxed(&pending_main->npending); + } +#ifndef NDEBUG + assert(npending_prev > npending); + npending_prev = npending; +#endif + } while (npending > 0); } int diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h index 50941e4ec47..1ab83856fd9 100644 --- a/Python/ceval_macros.h +++ b/Python/ceval_macros.h @@ -314,17 +314,18 @@ GETITEM(PyObject *v, Py_ssize_t i) { /* gh-115999 tracks progress on addressing this. */ \ static_assert(0, "The specializing interpreter is not yet thread-safe"); \ } while (0); +#define PAUSE_ADAPTIVE_COUNTER(COUNTER) ((void)COUNTER) #else #define ADVANCE_ADAPTIVE_COUNTER(COUNTER) \ do { \ (COUNTER) = advance_backoff_counter((COUNTER)); \ } while (0); -#endif #define PAUSE_ADAPTIVE_COUNTER(COUNTER) \ do { \ (COUNTER) = pause_backoff_counter((COUNTER)); \ } while (0); +#endif #define UNBOUNDLOCAL_ERROR_MSG \ "cannot access local variable '%s' where it is not associated with a value" diff --git a/Python/compile.c b/Python/compile.c index 98daa724e5a..7d93f2a05f6 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -119,6 +119,7 @@ enum fblocktype { WHILE_LOOP, FOR_LOOP, TRY_EXCEPT, FINALLY_TRY, FINALLY_END, struct fblockinfo { enum fblocktype fb_type; jump_target_label fb_block; + location fb_loc; /* (optional) type-specific exit or cleanup block */ jump_target_label fb_exit; /* (optional) additional information required for unwinding */ @@ -589,9 +590,17 @@ compiler_unit_free(struct compiler_unit *u) PyMem_Free(u); } -static struct compiler_unit * -get_class_compiler_unit(struct compiler *c) +static int +compiler_maybe_add_static_attribute_to_class(struct compiler *c, expr_ty e) { + assert(e->kind == Attribute_kind); + expr_ty attr_value = e->v.Attribute.value; + if (attr_value->kind != Name_kind || + e->v.Attribute.ctx != Store || + !_PyUnicode_EqualToASCIIString(attr_value->v.Name.id, "self")) + { + return SUCCESS; + } Py_ssize_t stack_size = PyList_GET_SIZE(c->c_stack); for (Py_ssize_t i = stack_size - 1; i >= 0; i--) { PyObject *capsule = PyList_GET_ITEM(c->c_stack, i); @@ -599,10 +608,12 @@ get_class_compiler_unit(struct compiler *c) capsule, CAPSULE_NAME); assert(u); if (u->u_scope_type == COMPILER_SCOPE_CLASS) { - return u; + assert(u->u_static_attributes); + RETURN_IF_ERROR(PySet_Add(u->u_static_attributes, e->v.Attribute.attr)); + break; } } - return NULL; + return SUCCESS; } static int @@ -1423,6 +1434,7 @@ compiler_push_fblock(struct compiler *c, location loc, f = &c->u->u_fblock[c->u->u_nfblocks++]; f->fb_type = t; f->fb_block = block_label; + f->fb_loc = loc; f->fb_exit = exit; f->fb_datum = datum; return SUCCESS; @@ -1550,7 +1562,7 @@ compiler_unwind_fblock(struct compiler *c, location *ploc, case WITH: case ASYNC_WITH: - *ploc = LOC((stmt_ty)info->fb_datum); + *ploc = info->fb_loc; ADDOP(c, *ploc, POP_BLOCK); if (preserve_tos) { ADDOP_I(c, *ploc, SWAP, 2); @@ -2532,7 +2544,18 @@ compiler_class_body(struct compiler *c, stmt_ty s, int firstlineno) return ERROR; } assert(c->u->u_static_attributes); - PyObject *static_attributes = PySequence_Tuple(c->u->u_static_attributes); + PyObject *static_attributes_unsorted = PySequence_List(c->u->u_static_attributes); + if (static_attributes_unsorted == NULL) { + compiler_exit_scope(c); + return ERROR; + } + if (PyList_Sort(static_attributes_unsorted) != 0) { + compiler_exit_scope(c); + Py_DECREF(static_attributes_unsorted); + return ERROR; + } + PyObject *static_attributes = PySequence_Tuple(static_attributes_unsorted); + Py_DECREF(static_attributes_unsorted); if (static_attributes == NULL) { compiler_exit_scope(c); return ERROR; @@ -3150,7 +3173,7 @@ compiler_async_for(struct compiler *c, stmt_ty s) NEW_JUMP_TARGET_LABEL(c, end); VISIT(c, expr, s->v.AsyncFor.iter); - ADDOP(c, loc, GET_AITER); + ADDOP(c, LOC(s->v.AsyncFor.iter), GET_AITER); USE_LABEL(c, start); RETURN_IF_ERROR(compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL)); @@ -5374,14 +5397,15 @@ compiler_sync_comprehension_generator(struct compiler *c, location loc, } if (IS_LABEL(start)) { VISIT(c, expr, gen->iter); - ADDOP(c, loc, GET_ITER); + ADDOP(c, LOC(gen->iter), GET_ITER); } } } + if (IS_LABEL(start)) { depth++; USE_LABEL(c, start); - ADDOP_JUMP(c, loc, FOR_ITER, anchor); + ADDOP_JUMP(c, LOC(gen->iter), FOR_ITER, anchor); } VISIT(c, expr, gen->target); @@ -5473,7 +5497,7 @@ compiler_async_comprehension_generator(struct compiler *c, location loc, else { /* Sub-iter - calculate on the fly */ VISIT(c, expr, gen->iter); - ADDOP(c, loc, GET_AITER); + ADDOP(c, LOC(gen->iter), GET_AITER); } } @@ -5763,15 +5787,14 @@ pop_inlined_comprehension_state(struct compiler *c, location loc, } static inline int -compiler_comprehension_iter(struct compiler *c, location loc, - comprehension_ty comp) +compiler_comprehension_iter(struct compiler *c, comprehension_ty comp) { VISIT(c, expr, comp->iter); if (comp->is_async) { - ADDOP(c, loc, GET_AITER); + ADDOP(c, LOC(comp->iter), GET_AITER); } else { - ADDOP(c, loc, GET_ITER); + ADDOP(c, LOC(comp->iter), GET_ITER); } return SUCCESS; } @@ -5797,7 +5820,7 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, outermost = (comprehension_ty) asdl_seq_GET(generators, 0); if (is_inlined) { - if (compiler_comprehension_iter(c, loc, outermost)) { + if (compiler_comprehension_iter(c, outermost)) { goto error; } if (push_inlined_comprehension_state(c, loc, entry, &inline_state)) { @@ -5883,7 +5906,7 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, } Py_CLEAR(co); - if (compiler_comprehension_iter(c, loc, outermost)) { + if (compiler_comprehension_iter(c, outermost)) { goto error; } @@ -6026,7 +6049,7 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos) /* Evaluate EXPR */ VISIT(c, expr, item->context_expr); - + loc = LOC(item->context_expr); ADDOP(c, loc, BEFORE_ASYNC_WITH); ADDOP_I(c, loc, GET_AWAITABLE, 1); ADDOP_LOAD_CONST(c, loc, Py_None); @@ -6124,7 +6147,7 @@ compiler_with(struct compiler *c, stmt_ty s, int pos) /* Evaluate EXPR */ VISIT(c, expr, item->context_expr); /* Will push bound __exit__ */ - location loc = LOC(s); + location loc = LOC(item->context_expr); ADDOP(c, loc, BEFORE_WITH); ADDOP_JUMP(c, loc, SETUP_WITH, final); @@ -6157,7 +6180,6 @@ compiler_with(struct compiler *c, stmt_ty s, int pos) /* For successful outcome: * call __exit__(None, None, None) */ - loc = LOC(s); RETURN_IF_ERROR(compiler_call_exit_with_nones(c, loc)); ADDOP(c, loc, POP_TOP); ADDOP_JUMP(c, loc, JUMP, exit); @@ -6234,7 +6256,7 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) break; case YieldFrom_kind: if (!_PyST_IsFunctionLike(c->u->u_ste)) { - return compiler_error(c, loc, "'yield' outside function"); + return compiler_error(c, loc, "'yield from' outside function"); } if (c->u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION) { return compiler_error(c, loc, "'yield from' inside async function"); @@ -6283,17 +6305,7 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) ADDOP(c, loc, NOP); return SUCCESS; } - if (e->v.Attribute.value->kind == Name_kind && - _PyUnicode_EqualToASCIIString(e->v.Attribute.value->v.Name.id, "self")) - { - struct compiler_unit *class_u = get_class_compiler_unit(c); - if (class_u != NULL) { - assert(class_u->u_scope_type == COMPILER_SCOPE_CLASS); - assert(class_u->u_static_attributes); - RETURN_IF_ERROR( - PySet_Add(class_u->u_static_attributes, e->v.Attribute.attr)); - } - } + RETURN_IF_ERROR(compiler_maybe_add_static_attribute_to_class(c, e)); VISIT(c, expr, e->v.Attribute.value); loc = LOC(e); loc = update_start_location_to_match_attr(c, loc, e); @@ -7478,7 +7490,7 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc) ADDOP(c, LOC(m->pattern), POP_TOP); } VISIT_SEQ(c, stmt, m->body); - ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP, end); // If the pattern fails to match, we want the line number of the // cleanup to be associated with the failed pattern, not the last line // of the body diff --git a/Python/context.c b/Python/context.c index 63318d1e597..c5052935b34 100644 --- a/Python/context.c +++ b/Python/context.c @@ -203,6 +203,7 @@ PyContextVar_Get(PyObject *ovar, PyObject *def, PyObject **val) goto not_found; } +#ifndef Py_GIL_DISABLED if (var->var_cached != NULL && var->var_cached_tsid == ts->id && var->var_cached_tsver == ts->context_ver) @@ -210,6 +211,7 @@ PyContextVar_Get(PyObject *ovar, PyObject *def, PyObject **val) *val = var->var_cached; goto found; } +#endif assert(PyContext_CheckExact(ts->context)); PyHamtObject *vars = ((PyContext *)ts->context)->ctx_vars; @@ -221,9 +223,11 @@ PyContextVar_Get(PyObject *ovar, PyObject *def, PyObject **val) } if (res == 1) { assert(found != NULL); +#ifndef Py_GIL_DISABLED var->var_cached = found; /* borrow */ var->var_cached_tsid = ts->id; var->var_cached_tsver = ts->context_ver; +#endif *val = found; goto found; @@ -723,8 +727,10 @@ PyTypeObject PyContext_Type = { static int contextvar_set(PyContextVar *var, PyObject *val) { +#ifndef Py_GIL_DISABLED var->var_cached = NULL; PyThreadState *ts = _PyThreadState_GET(); +#endif PyContext *ctx = context_get(); if (ctx == NULL) { @@ -739,16 +745,20 @@ contextvar_set(PyContextVar *var, PyObject *val) Py_SETREF(ctx->ctx_vars, new_vars); +#ifndef Py_GIL_DISABLED var->var_cached = val; /* borrow */ var->var_cached_tsid = ts->id; var->var_cached_tsver = ts->context_ver; +#endif return 0; } static int contextvar_del(PyContextVar *var) { +#ifndef Py_GIL_DISABLED var->var_cached = NULL; +#endif PyContext *ctx = context_get(); if (ctx == NULL) { @@ -823,9 +833,11 @@ contextvar_new(PyObject *name, PyObject *def) var->var_default = Py_XNewRef(def); +#ifndef Py_GIL_DISABLED var->var_cached = NULL; var->var_cached_tsid = 0; var->var_cached_tsver = 0; +#endif if (_PyObject_GC_MAY_BE_TRACKED(name) || (def != NULL && _PyObject_GC_MAY_BE_TRACKED(def))) @@ -863,9 +875,11 @@ contextvar_tp_clear(PyContextVar *self) { Py_CLEAR(self->var_name); Py_CLEAR(self->var_default); +#ifndef Py_GIL_DISABLED self->var_cached = NULL; self->var_cached_tsid = 0; self->var_cached_tsver = 0; +#endif return 0; } diff --git a/Python/crossinterp.c b/Python/crossinterp.c index a03456a8bbf..2af0dd6191a 100644 --- a/Python/crossinterp.c +++ b/Python/crossinterp.c @@ -699,7 +699,7 @@ _PyXI_excinfo_InitFromException(_PyXI_excinfo *info, PyObject *exc) Py_DECREF(tbexc); if (info->errdisplay == NULL) { #ifdef Py_DEBUG - PyErr_FormatUnraisable("Exception ignored while formating TracebackException"); + PyErr_FormatUnraisable("Exception ignored while formatting TracebackException"); #endif PyErr_Clear(); } diff --git a/Python/emscripten_trampoline.c b/Python/emscripten_trampoline.c index 2a80ec4f18d..960c6b4a2ef 100644 --- a/Python/emscripten_trampoline.c +++ b/Python/emscripten_trampoline.c @@ -10,7 +10,17 @@ * https://github.com/GoogleChromeLabs/wasm-feature-detect/blob/main/src/detectors/type-reflection/index.js */ EM_JS(int, _PyEM_detect_type_reflection, (), { - return "Function" in WebAssembly; + if (!("Function" in WebAssembly)) { + return false; + } + if (WebAssembly.Function.type) { + // Node v20 + Module.PyEM_CountArgs = (func) => WebAssembly.Function.type(wasmTable.get(func)).parameters.length; + } else { + // Node >= 22, v8-based browsers + Module.PyEM_CountArgs = (func) => wasmTable.get(func).type().parameters.length; + } + return true; }); void @@ -43,7 +53,7 @@ EM_JS(int, _PyEM_CountFuncParams, (PyCFunctionWithKeywords func), if (n !== undefined) { return n; } - n = WebAssembly.Function.type(wasmTable.get(func)).parameters.length; + n = Module.PyEM_CountArgs(func); _PyEM_CountFuncParams.cache.set(func, n); return n; } diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 4e0f73f7e23..df7c19a80e4 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -347,7 +347,7 @@ } STAT_INC(TO_BOOL, hit); if (_PyLong_IsZero((PyLongObject *)value)) { - assert(_Py_IsImmortal(value)); + assert(_Py_IsImmortalLoose(value)); res = Py_False; } else { @@ -398,7 +398,7 @@ } STAT_INC(TO_BOOL, hit); if (value == &_Py_STR(empty)) { - assert(_Py_IsImmortal(value)); + assert(_Py_IsImmortalLoose(value)); res = Py_False; } else { @@ -2635,6 +2635,7 @@ if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { JUMP_TO_ERROR(); } + _PyEval_MonitorRaise(tstate, frame, frame->instr_ptr); _PyErr_Clear(tstate); } /* iterator ended normally */ @@ -2675,8 +2676,11 @@ JUMP_TO_JUMP_TARGET(); } if ((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) { - UOP_STAT_INC(uopcode, miss); - JUMP_TO_JUMP_TARGET(); + it->it_index = -1; + if (1) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } } break; } @@ -4386,8 +4390,6 @@ if (optimized <= 0) { exit->temperature = restart_backoff_counter(exit->temperature); if (optimized < 0) { - Py_DECREF(current_executor); - tstate->previous_executor = Py_None; GOTO_UNWIND(); } GOTO_TIER_ONE(target); diff --git a/Python/flowgraph.c b/Python/flowgraph.c index b8d3f066d18..ff70e473702 100644 --- a/Python/flowgraph.c +++ b/Python/flowgraph.c @@ -518,21 +518,6 @@ no_redundant_jumps(cfg_builder *g) { return true; } -static bool -all_exits_have_lineno(basicblock *entryblock) { - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - for (int i = 0; i < b->b_iused; i++) { - cfg_instr *instr = &b->b_instr[i]; - if (instr->i_opcode == RETURN_VALUE) { - if (instr->i_loc.lineno < 0) { - assert(0); - return false; - } - } - } - } - return true; -} #endif /***** CFG preprocessing (jump targets and exceptions) *****/ @@ -975,13 +960,14 @@ remove_unreachable(basicblock *entryblock) { basicblock **sp = stack; entryblock->b_predecessors = 1; *sp++ = entryblock; + entryblock->b_visited = 1; while (sp > stack) { basicblock *b = *(--sp); - b->b_visited = 1; if (b->b_next && BB_HAS_FALLTHROUGH(b)) { if (!b->b_next->b_visited) { assert(b->b_next->b_predecessors == 0); *sp++ = b->b_next; + b->b_next->b_visited = 1; } b->b_next->b_predecessors++; } @@ -991,8 +977,8 @@ remove_unreachable(basicblock *entryblock) { if (is_jump(instr) || is_block_push(instr)) { target = instr->i_target; if (!target->b_visited) { - assert(target->b_predecessors == 0 || target == b->b_next); *sp++ = target; + target->b_visited = 1; } target->b_predecessors++; } @@ -2500,7 +2486,6 @@ _PyCfg_OptimizeCodeUnit(cfg_builder *g, PyObject *consts, PyObject *const_cache, RETURN_IF_ERROR(insert_superinstructions(g)); RETURN_IF_ERROR(push_cold_blocks_to_end(g)); - assert(all_exits_have_lineno(g->g_entryblock)); RETURN_IF_ERROR(resolve_line_numbers(g, firstlineno)); return SUCCESS; } diff --git a/Python/gc.c b/Python/gc.c index aa8b216124c..8dbcb340d40 100644 --- a/Python/gc.c +++ b/Python/gc.c @@ -45,7 +45,7 @@ typedef struct _gc_runtime_state GCState; // move_legacy_finalizers() removes this flag instead. // Between them, unreachable list is not normal list and we can not use // most gc_list_* functions for it. -#define NEXT_MASK_UNREACHABLE 2 +#define NEXT_MASK_UNREACHABLE (1) #define AS_GC(op) _Py_AS_GC(op) #define FROM_GC(gc) _Py_FROM_GC(gc) @@ -95,48 +95,9 @@ gc_decref(PyGC_Head *g) g->_gc_prev -= 1 << _PyGC_PREV_SHIFT; } -static inline int -gc_old_space(PyGC_Head *g) -{ - return g->_gc_next & _PyGC_NEXT_MASK_OLD_SPACE_1; -} - -static inline int -flip_old_space(int space) -{ - assert(space == 0 || space == 1); - return space ^ _PyGC_NEXT_MASK_OLD_SPACE_1; -} - -static inline void -gc_flip_old_space(PyGC_Head *g) -{ - g->_gc_next ^= _PyGC_NEXT_MASK_OLD_SPACE_1; -} -static inline void -gc_set_old_space(PyGC_Head *g, int space) -{ - assert(space == 0 || space == _PyGC_NEXT_MASK_OLD_SPACE_1); - g->_gc_next &= ~_PyGC_NEXT_MASK_OLD_SPACE_1; - g->_gc_next |= space; -} +#define GEN_HEAD(gcstate, n) (&(gcstate)->generations[n].head) -static PyGC_Head * -GEN_HEAD(GCState *gcstate, int n) -{ - assert((gcstate->visited_space & (~1)) == 0); - switch(n) { - case 0: - return &gcstate->young.head; - case 1: - return &gcstate->old[gcstate->visited_space].head; - case 2: - return &gcstate->old[gcstate->visited_space^1].head; - default: - Py_UNREACHABLE(); - } -} static GCState * get_gc_state(void) @@ -155,12 +116,11 @@ _PyGC_InitState(GCState *gcstate) GEN.head._gc_prev = (uintptr_t)&GEN.head; \ } while (0) - assert(gcstate->young.count == 0); - assert(gcstate->old[0].count == 0); - assert(gcstate->old[1].count == 0); - INIT_HEAD(gcstate->young); - INIT_HEAD(gcstate->old[0]); - INIT_HEAD(gcstate->old[1]); + for (int i = 0; i < NUM_GENERATIONS; i++) { + assert(gcstate->generations[i].count == 0); + INIT_HEAD(gcstate->generations[i]); + }; + gcstate->generation0 = GEN_HEAD(gcstate, 0); INIT_HEAD(gcstate->permanent_generation); #undef INIT_HEAD @@ -181,7 +141,6 @@ _PyGC_Init(PyInterpreterState *interp) if (gcstate->callbacks == NULL) { return _PyStatus_NO_MEMORY(); } - gcstate->heap_size = 0; return _PyStatus_OK(); } @@ -259,7 +218,6 @@ gc_list_is_empty(PyGC_Head *list) static inline void gc_list_append(PyGC_Head *node, PyGC_Head *list) { - assert((list->_gc_prev & ~_PyGC_PREV_MASK) == 0); PyGC_Head *last = (PyGC_Head *)list->_gc_prev; // last <-> node @@ -317,8 +275,6 @@ gc_list_merge(PyGC_Head *from, PyGC_Head *to) PyGC_Head *from_tail = GC_PREV(from); assert(from_head != from); assert(from_tail != from); - assert(gc_list_is_empty(to) || - gc_old_space(to_tail) == gc_old_space(from_tail)); _PyGCHead_SET_NEXT(to_tail, from_head); _PyGCHead_SET_PREV(from_head, to_tail); @@ -387,8 +343,8 @@ enum flagstates {collecting_clear_unreachable_clear, static void validate_list(PyGC_Head *head, enum flagstates flags) { - assert((head->_gc_prev & ~_PyGC_PREV_MASK) == 0); - assert((head->_gc_next & ~_PyGC_PREV_MASK) == 0); + assert((head->_gc_prev & PREV_MASK_COLLECTING) == 0); + assert((head->_gc_next & NEXT_MASK_UNREACHABLE) == 0); uintptr_t prev_value = 0, next_value = 0; switch (flags) { case collecting_clear_unreachable_clear: @@ -410,7 +366,7 @@ validate_list(PyGC_Head *head, enum flagstates flags) PyGC_Head *gc = GC_NEXT(head); while (gc != head) { PyGC_Head *trueprev = GC_PREV(gc); - PyGC_Head *truenext = GC_NEXT(gc); + PyGC_Head *truenext = (PyGC_Head *)(gc->_gc_next & ~NEXT_MASK_UNREACHABLE); assert(truenext != NULL); assert(trueprev == prev); assert((gc->_gc_prev & PREV_MASK_COLLECTING) == prev_value); @@ -420,54 +376,8 @@ validate_list(PyGC_Head *head, enum flagstates flags) } assert(prev == GC_PREV(head)); } - -static void -validate_old(GCState *gcstate) -{ - for (int space = 0; space < 2; space++) { - PyGC_Head *head = &gcstate->old[space].head; - PyGC_Head *gc = GC_NEXT(head); - while (gc != head) { - PyGC_Head *next = GC_NEXT(gc); - assert(gc_old_space(gc) == space); - gc = next; - } - } -} - -static void -validate_consistent_old_space(PyGC_Head *head) -{ - PyGC_Head *prev = head; - PyGC_Head *gc = GC_NEXT(head); - if (gc == head) { - return; - } - int old_space = gc_old_space(gc); - while (gc != head) { - PyGC_Head *truenext = GC_NEXT(gc); - assert(truenext != NULL); - assert(gc_old_space(gc) == old_space); - prev = gc; - gc = truenext; - } - assert(prev == GC_PREV(head)); -} - -static void -gc_list_validate_space(PyGC_Head *head, int space) { - PyGC_Head *gc = GC_NEXT(head); - while (gc != head) { - assert(gc_old_space(gc) == space); - gc = GC_NEXT(gc); - } -} - #else #define validate_list(x, y) do{}while(0) -#define validate_old(g) do{}while(0) -#define validate_consistent_old_space(l) do{}while(0) -#define gc_list_validate_space(l, s) do{}while(0) #endif /*** end of list stuff ***/ @@ -485,6 +395,10 @@ update_refs(PyGC_Head *containers) while (gc != containers) { next = GC_NEXT(gc); PyObject *op = FROM_GC(gc); + /* Move any object that might have become immortal to the + * permanent generation as the reference count is not accurately + * reflecting the actual number of live references to this object + */ if (_Py_IsImmortal(op)) { gc_list_move(gc, &get_gc_state()->permanent_generation.head); gc = next; @@ -587,13 +501,12 @@ visit_reachable(PyObject *op, void *arg) // Manually unlink gc from unreachable list because the list functions // don't work right in the presence of NEXT_MASK_UNREACHABLE flags. PyGC_Head *prev = GC_PREV(gc); - PyGC_Head *next = GC_NEXT(gc); + PyGC_Head *next = (PyGC_Head*)(gc->_gc_next & ~NEXT_MASK_UNREACHABLE); _PyObject_ASSERT(FROM_GC(prev), prev->_gc_next & NEXT_MASK_UNREACHABLE); _PyObject_ASSERT(FROM_GC(next), next->_gc_next & NEXT_MASK_UNREACHABLE); - prev->_gc_next = gc->_gc_next; // copy flag bits - gc->_gc_next &= ~NEXT_MASK_UNREACHABLE; + prev->_gc_next = gc->_gc_next; // copy NEXT_MASK_UNREACHABLE _PyGCHead_SET_PREV(next, prev); gc_list_append(gc, reachable); @@ -645,9 +558,6 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable) * or to the right have been scanned yet. */ - validate_consistent_old_space(young); - /* Record which old space we are in, and set NEXT_MASK_UNREACHABLE bit for convenience */ - uintptr_t flags = NEXT_MASK_UNREACHABLE | (gc->_gc_next & _PyGC_NEXT_MASK_OLD_SPACE_1); while (gc != young) { if (gc_get_refs(gc)) { /* gc is definitely reachable from outside the @@ -693,18 +603,17 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable) // But this may pollute the unreachable list head's 'next' pointer // too. That's semantically senseless but expedient here - the // damage is repaired when this function ends. - last->_gc_next = flags | (uintptr_t)gc; + last->_gc_next = (NEXT_MASK_UNREACHABLE | (uintptr_t)gc); _PyGCHead_SET_PREV(gc, last); - gc->_gc_next = flags | (uintptr_t)unreachable; + gc->_gc_next = (NEXT_MASK_UNREACHABLE | (uintptr_t)unreachable); unreachable->_gc_prev = (uintptr_t)gc; } - gc = _PyGCHead_NEXT(prev); + gc = (PyGC_Head*)prev->_gc_next; } // young->_gc_prev must be last element remained in the list. young->_gc_prev = (uintptr_t)prev; - young->_gc_next &= _PyGC_PREV_MASK; // don't let the pollution of the list head's next pointer leak - unreachable->_gc_next &= _PyGC_PREV_MASK; + unreachable->_gc_next &= ~NEXT_MASK_UNREACHABLE; } static void @@ -763,8 +672,8 @@ move_legacy_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers) PyObject *op = FROM_GC(gc); _PyObject_ASSERT(op, gc->_gc_next & NEXT_MASK_UNREACHABLE); - next = GC_NEXT(gc); gc->_gc_next &= ~NEXT_MASK_UNREACHABLE; + next = (PyGC_Head*)gc->_gc_next; if (has_legacy_finalizer(op)) { gc_clear_collecting(gc); @@ -787,8 +696,8 @@ clear_unreachable_mask(PyGC_Head *unreachable) PyGC_Head *gc, *next; for (gc = GC_NEXT(unreachable); gc != unreachable; gc = next) { _PyObject_ASSERT((PyObject*)FROM_GC(gc), gc->_gc_next & NEXT_MASK_UNREACHABLE); - next = GC_NEXT(gc); gc->_gc_next &= ~NEXT_MASK_UNREACHABLE; + next = (PyGC_Head*)gc->_gc_next; } validate_list(unreachable, collecting_set_unreachable_clear); } @@ -958,7 +867,6 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) /* Invoke the callbacks we decided to honor. It's safe to invoke them * because they can't reference unreachable objects. */ - int visited_space = get_gc_state()->visited_space; while (! gc_list_is_empty(&wrcb_to_call)) { PyObject *temp; PyObject *callback; @@ -993,7 +901,6 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) Py_DECREF(op); if (wrcb_to_call._gc_next == (uintptr_t)gc) { /* object is still alive -- move it */ - gc_set_old_space(gc, visited_space); gc_list_move(gc, old); } else { @@ -1122,6 +1029,25 @@ delete_garbage(PyThreadState *tstate, GCState *gcstate, } +// Show stats for objects in each generations +static void +show_stats_each_generations(GCState *gcstate) +{ + char buf[100]; + size_t pos = 0; + + for (int i = 0; i < NUM_GENERATIONS && pos < sizeof(buf); i++) { + pos += PyOS_snprintf(buf+pos, sizeof(buf)-pos, + " %zd", + gc_list_size(GEN_HEAD(gcstate, i))); + } + + PySys_FormatStderr( + "gc: objects in each generation:%s\n" + "gc: objects in permanent generation: %zd\n", + buf, gc_list_size(&gcstate->permanent_generation.head)); +} + /* Deduce which objects among "base" are unreachable from outside the list and move them to 'unreachable'. The process consist in the following steps: @@ -1195,6 +1121,7 @@ deduce_unreachable(PyGC_Head *base, PyGC_Head *unreachable) { * the reachable objects instead. But this is a one-time cost, probably not * worth complicating the code to speed just a little. */ + gc_list_init(unreachable); move_unreachable(base, unreachable); // gc_prev is pointer again validate_list(base, collecting_clear_unreachable_clear); validate_list(unreachable, collecting_set_unreachable_set); @@ -1233,292 +1160,220 @@ handle_resurrected_objects(PyGC_Head *unreachable, PyGC_Head* still_unreachable, } -#define UNTRACK_TUPLES 1 -#define UNTRACK_DICTS 2 - -static void -gc_collect_region(PyThreadState *tstate, - PyGC_Head *from, - PyGC_Head *to, - int untrack, - struct gc_collection_stats *stats); - -static inline Py_ssize_t -gc_list_set_space(PyGC_Head *list, int space) -{ - Py_ssize_t size = 0; - PyGC_Head *gc; - for (gc = GC_NEXT(list); gc != list; gc = GC_NEXT(gc)) { - gc_set_old_space(gc, space); - size++; - } - return size; -} - -/* Making progress in the incremental collector - * In order to eventually collect all cycles - * the incremental collector must progress through the old - * space faster than objects are added to the old space. - * - * Each young or incremental collection adds a numebr of - * objects, S (for survivors) to the old space, and - * incremental collectors scan I objects from the old space. - * I > S must be true. We also want I > S * N to be where - * N > 1. Higher values of N mean that the old space is - * scanned more rapidly. - * The default incremental threshold of 10 translates to - * N == 1.4 (1 + 4/threshold) +/* Invoke progress callbacks to notify clients that garbage collection + * is starting or stopping */ - -/* Divide by 10, so that the default incremental threshold of 10 - * scans objects at 1% of the heap size */ -#define SCAN_RATE_DIVISOR 10 - static void -add_stats(GCState *gcstate, int gen, struct gc_collection_stats *stats) +invoke_gc_callback(PyThreadState *tstate, const char *phase, + int generation, Py_ssize_t collected, + Py_ssize_t uncollectable) { - gcstate->generation_stats[gen].collected += stats->collected; - gcstate->generation_stats[gen].uncollectable += stats->uncollectable; - gcstate->generation_stats[gen].collections += 1; -} + assert(!_PyErr_Occurred(tstate)); -static void -gc_collect_young(PyThreadState *tstate, - struct gc_collection_stats *stats) -{ + /* we may get called very early */ GCState *gcstate = &tstate->interp->gc; - PyGC_Head *young = &gcstate->young.head; - PyGC_Head *visited = &gcstate->old[gcstate->visited_space].head; -#ifdef Py_STATS - { - Py_ssize_t count = 0; - PyGC_Head *gc; - for (gc = GC_NEXT(young); gc != young; gc = GC_NEXT(gc)) { - count++; - } + if (gcstate->callbacks == NULL) { + return; } -#endif - PyGC_Head survivors; - gc_list_init(&survivors); - gc_collect_region(tstate, young, &survivors, UNTRACK_TUPLES, stats); - Py_ssize_t survivor_count = 0; - if (gcstate->visited_space) { - /* objects in visited space have bit set, so we set it here */ - survivor_count = gc_list_set_space(&survivors, 1); - } - else { - PyGC_Head *gc; - for (gc = GC_NEXT(&survivors); gc != &survivors; gc = GC_NEXT(gc)) { -#ifdef GC_DEBUG - assert(gc_old_space(gc) == 0); -#endif - survivor_count++; + /* The local variable cannot be rebound, check it for sanity */ + assert(PyList_CheckExact(gcstate->callbacks)); + PyObject *info = NULL; + if (PyList_GET_SIZE(gcstate->callbacks) != 0) { + info = Py_BuildValue("{sisnsn}", + "generation", generation, + "collected", collected, + "uncollectable", uncollectable); + if (info == NULL) { + PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); + return; } } - (void)survivor_count; // Silence compiler warning - gc_list_merge(&survivors, visited); - validate_old(gcstate); - gcstate->young.count = 0; - gcstate->old[gcstate->visited_space].count++; - Py_ssize_t scale_factor = gcstate->old[0].threshold; - if (scale_factor < 1) { - scale_factor = 1; - } - gcstate->work_to_do += gcstate->heap_size / SCAN_RATE_DIVISOR / scale_factor; - add_stats(gcstate, 0, stats); -} - -#ifndef NDEBUG -static inline int -IS_IN_VISITED(PyGC_Head *gc, int visited_space) -{ - assert(visited_space == 0 || flip_old_space(visited_space) == 0); - return gc_old_space(gc) == visited_space; -} -#endif - -struct container_and_flag { - PyGC_Head *container; - int visited_space; - uintptr_t size; -}; -/* A traversal callback for adding to container) */ -static int -visit_add_to_container(PyObject *op, void *arg) -{ - OBJECT_STAT_INC(object_visits); - struct container_and_flag *cf = (struct container_and_flag *)arg; - int visited = cf->visited_space; - assert(visited == get_gc_state()->visited_space); - if (!_Py_IsImmortal(op) && _PyObject_IS_GC(op)) { - PyGC_Head *gc = AS_GC(op); - if (_PyObject_GC_IS_TRACKED(op) && - gc_old_space(gc) != visited) { - gc_flip_old_space(gc); - gc_list_move(gc, cf->container); - cf->size++; - } + PyObject *phase_obj = PyUnicode_FromString(phase); + if (phase_obj == NULL) { + Py_XDECREF(info); + PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); + return; } - return 0; -} -static uintptr_t -expand_region_transitively_reachable(PyGC_Head *container, PyGC_Head *gc, GCState *gcstate) -{ - validate_list(container, collecting_clear_unreachable_clear); - struct container_and_flag arg = { - .container = container, - .visited_space = gcstate->visited_space, - .size = 0 - }; - assert(GC_NEXT(gc) == container); - while (gc != container) { - /* Survivors will be moved to visited space, so they should - * have been marked as visited */ - assert(IS_IN_VISITED(gc, gcstate->visited_space)); - PyObject *op = FROM_GC(gc); - if (_Py_IsImmortal(op)) { - PyGC_Head *next = GC_NEXT(gc); - gc_list_move(gc, &get_gc_state()->permanent_generation.head); - gc = next; - continue; + PyObject *stack[] = {phase_obj, info}; + for (Py_ssize_t i=0; icallbacks); i++) { + PyObject *r, *cb = PyList_GET_ITEM(gcstate->callbacks, i); + Py_INCREF(cb); /* make sure cb doesn't go away */ + r = PyObject_Vectorcall(cb, stack, 2, NULL); + if (r == NULL) { + PyErr_WriteUnraisable(cb); } - traverseproc traverse = Py_TYPE(op)->tp_traverse; - (void) traverse(op, - visit_add_to_container, - &arg); - gc = GC_NEXT(gc); + else { + Py_DECREF(r); + } + Py_DECREF(cb); } - return arg.size; + Py_DECREF(phase_obj); + Py_XDECREF(info); + assert(!_PyErr_Occurred(tstate)); } -/* Do bookkeeping for a completed GC cycle */ -static void -completed_cycle(GCState *gcstate) -{ -#ifdef Py_DEBUG - PyGC_Head *not_visited = &gcstate->old[gcstate->visited_space^1].head; - assert(gc_list_is_empty(not_visited)); -#endif - gcstate->visited_space = flip_old_space(gcstate->visited_space); - /* Make sure all young objects have old space bit set correctly */ - PyGC_Head *young = &gcstate->young.head; - PyGC_Head *gc = GC_NEXT(young); - while (gc != young) { - PyGC_Head *next = GC_NEXT(gc); - gc_set_old_space(gc, gcstate->visited_space); - gc = next; - } - gcstate->work_to_do = 0; -} -static void -gc_collect_increment(PyThreadState *tstate, struct gc_collection_stats *stats) -{ - GCState *gcstate = &tstate->interp->gc; - PyGC_Head *not_visited = &gcstate->old[gcstate->visited_space^1].head; - PyGC_Head *visited = &gcstate->old[gcstate->visited_space].head; - PyGC_Head increment; - gc_list_init(&increment); - Py_ssize_t scale_factor = gcstate->old[0].threshold; - if (scale_factor < 1) { - scale_factor = 1; - } - gc_list_merge(&gcstate->young.head, &increment); - gcstate->young.count = 0; - gc_list_validate_space(&increment, gcstate->visited_space); - Py_ssize_t increment_size = 0; - while (increment_size < gcstate->work_to_do) { - if (gc_list_is_empty(not_visited)) { - break; +/* Find the oldest generation (highest numbered) where the count + * exceeds the threshold. Objects in the that generation and + * generations younger than it will be collected. */ +static int +gc_select_generation(GCState *gcstate) +{ + for (int i = NUM_GENERATIONS-1; i >= 0; i--) { + if (gcstate->generations[i].count > gcstate->generations[i].threshold) { + /* Avoid quadratic performance degradation in number + of tracked objects (see also issue #4074): + + To limit the cost of garbage collection, there are two strategies; + - make each collection faster, e.g. by scanning fewer objects + - do less collections + This heuristic is about the latter strategy. + + In addition to the various configurable thresholds, we only trigger a + full collection if the ratio + + long_lived_pending / long_lived_total + + is above a given value (hardwired to 25%). + + The reason is that, while "non-full" collections (i.e., collections of + the young and middle generations) will always examine roughly the same + number of objects -- determined by the aforementioned thresholds --, + the cost of a full collection is proportional to the total number of + long-lived objects, which is virtually unbounded. + + Indeed, it has been remarked that doing a full collection every + of object creations entails a dramatic performance + degradation in workloads which consist in creating and storing lots of + long-lived objects (e.g. building a large list of GC-tracked objects would + show quadratic performance, instead of linear as expected: see issue #4074). + + Using the above ratio, instead, yields amortized linear performance in + the total number of objects (the effect of which can be summarized + thusly: "each full garbage collection is more and more costly as the + number of objects grows, but we do fewer and fewer of them"). + + This heuristic was suggested by Martin von Löwis on python-dev in + June 2008. His original analysis and proposal can be found at: + http://mail.python.org/pipermail/python-dev/2008-June/080579.html + */ + if (i == NUM_GENERATIONS - 1 + && gcstate->long_lived_pending < gcstate->long_lived_total / 4) + { + continue; + } + return i; } - PyGC_Head *gc = _PyGCHead_NEXT(not_visited); - gc_list_move(gc, &increment); - increment_size++; - gc_set_old_space(gc, gcstate->visited_space); - increment_size += expand_region_transitively_reachable(&increment, gc, gcstate); - } - gc_list_validate_space(&increment, gcstate->visited_space); - PyGC_Head survivors; - gc_list_init(&survivors); - gc_collect_region(tstate, &increment, &survivors, UNTRACK_TUPLES, stats); - gc_list_validate_space(&survivors, gcstate->visited_space); - gc_list_merge(&survivors, visited); - assert(gc_list_is_empty(&increment)); - gcstate->work_to_do += gcstate->heap_size / SCAN_RATE_DIVISOR / scale_factor; - gcstate->work_to_do -= increment_size; - - validate_old(gcstate); - add_stats(gcstate, 1, stats); - if (gc_list_is_empty(not_visited)) { - completed_cycle(gcstate); } + return -1; } -static void -gc_collect_full(PyThreadState *tstate, - struct gc_collection_stats *stats) -{ - GCState *gcstate = &tstate->interp->gc; - validate_old(gcstate); - PyGC_Head *young = &gcstate->young.head; - PyGC_Head *pending = &gcstate->old[gcstate->visited_space^1].head; - PyGC_Head *visited = &gcstate->old[gcstate->visited_space].head; - /* merge all generations into visited */ - gc_list_validate_space(young, gcstate->visited_space); - gc_list_set_space(pending, gcstate->visited_space); - gc_list_merge(young, pending); - gcstate->young.count = 0; - gc_list_merge(pending, visited); - - gc_collect_region(tstate, visited, visited, - UNTRACK_TUPLES | UNTRACK_DICTS, - stats); - gcstate->young.count = 0; - gcstate->old[0].count = 0; - gcstate->old[1].count = 0; - - gcstate->work_to_do = - gcstate->young.threshold * 2; - _PyGC_ClearAllFreeLists(tstate->interp); - validate_old(gcstate); - add_stats(gcstate, 2, stats); -} - -/* This is the main function. Read this to understand how the +/* This is the main function. Read this to understand how the * collection process works. */ -static void -gc_collect_region(PyThreadState *tstate, - PyGC_Head *from, - PyGC_Head *to, - int untrack, - struct gc_collection_stats *stats) +static Py_ssize_t +gc_collect_main(PyThreadState *tstate, int generation, _PyGC_Reason reason) { + int i; + Py_ssize_t m = 0; /* # objects collected */ + Py_ssize_t n = 0; /* # unreachable objects that couldn't be collected */ + PyGC_Head *young; /* the generation we are examining */ + PyGC_Head *old; /* next older generation */ PyGC_Head unreachable; /* non-problematic unreachable trash */ PyGC_Head finalizers; /* objects with, & reachable from, __del__ */ - PyGC_Head *gc; /* initialize to prevent a compiler warning */ + PyGC_Head *gc; + PyTime_t t1 = 0; /* initialize to prevent a compiler warning */ GCState *gcstate = &tstate->interp->gc; + // gc_collect_main() must not be called before _PyGC_Init + // or after _PyGC_Fini() assert(gcstate->garbage != NULL); assert(!_PyErr_Occurred(tstate)); - gc_list_init(&unreachable); - deduce_unreachable(from, &unreachable); - validate_consistent_old_space(from); - if (untrack & UNTRACK_TUPLES) { - untrack_tuples(from); + int expected = 0; + if (!_Py_atomic_compare_exchange_int(&gcstate->collecting, &expected, 1)) { + // Don't start a garbage collection if one is already in progress. + return 0; + } + + if (generation == GENERATION_AUTO) { + // Select the oldest generation that needs collecting. We will collect + // objects from that generation and all generations younger than it. + generation = gc_select_generation(gcstate); + if (generation < 0) { + // No generation needs to be collected. + _Py_atomic_store_int(&gcstate->collecting, 0); + return 0; + } + } + + assert(generation >= 0 && generation < NUM_GENERATIONS); + +#ifdef Py_STATS + if (_Py_stats) { + _Py_stats->object_stats.object_visits = 0; + } +#endif + GC_STAT_ADD(generation, collections, 1); + + if (reason != _Py_GC_REASON_SHUTDOWN) { + invoke_gc_callback(tstate, "start", generation, 0, 0); + } + + if (gcstate->debug & _PyGC_DEBUG_STATS) { + PySys_WriteStderr("gc: collecting generation %d...\n", generation); + show_stats_each_generations(gcstate); + // ignore error: don't interrupt the GC if reading the clock fails + (void)PyTime_PerfCounterRaw(&t1); + } + + if (PyDTrace_GC_START_ENABLED()) { + PyDTrace_GC_START(generation); + } + + /* update collection and allocation counters */ + if (generation+1 < NUM_GENERATIONS) { + gcstate->generations[generation+1].count += 1; } - if (untrack & UNTRACK_DICTS) { - untrack_dicts(from); + for (i = 0; i <= generation; i++) { + gcstate->generations[i].count = 0; } - validate_consistent_old_space(to); - if (from != to) { - gc_list_merge(from, to); + + /* merge younger generations with one we are currently collecting */ + for (i = 0; i < generation; i++) { + gc_list_merge(GEN_HEAD(gcstate, i), GEN_HEAD(gcstate, generation)); + } + + /* handy references */ + young = GEN_HEAD(gcstate, generation); + if (generation < NUM_GENERATIONS-1) { + old = GEN_HEAD(gcstate, generation+1); + } + else { + old = young; } - validate_consistent_old_space(to); + validate_list(old, collecting_clear_unreachable_clear); + + deduce_unreachable(young, &unreachable); + + untrack_tuples(young); /* Move reachable objects to next generation. */ + if (young != old) { + if (generation == NUM_GENERATIONS - 2) { + gcstate->long_lived_pending += gc_list_size(young); + } + gc_list_merge(young, old); + } + else { + /* We only un-track dicts in full collections, to avoid quadratic + dict build-up. See issue #14775. */ + untrack_dicts(young); + gcstate->long_lived_pending = 0; + gcstate->long_lived_total = gc_list_size(young); + } /* All objects in unreachable are trash, but objects reachable from * legacy finalizers (e.g. tp_del) can't safely be deleted. @@ -1532,8 +1387,10 @@ gc_collect_region(PyThreadState *tstate, * and we move those into the finalizers list too. */ move_legacy_finalizer_reachable(&finalizers); + validate_list(&finalizers, collecting_clear_unreachable_clear); validate_list(&unreachable, collecting_set_unreachable_clear); + /* Print debugging information. */ if (gcstate->debug & _PyGC_DEBUG_COLLECTABLE) { for (gc = GC_NEXT(&unreachable); gc != &unreachable; gc = GC_NEXT(gc)) { @@ -1542,101 +1399,91 @@ gc_collect_region(PyThreadState *tstate, } /* Clear weakrefs and invoke callbacks as necessary. */ - stats->collected += handle_weakrefs(&unreachable, to); - gc_list_validate_space(to, gcstate->visited_space); - validate_list(to, collecting_clear_unreachable_clear); + m += handle_weakrefs(&unreachable, old); + + validate_list(old, collecting_clear_unreachable_clear); validate_list(&unreachable, collecting_set_unreachable_clear); /* Call tp_finalize on objects which have one. */ finalize_garbage(tstate, &unreachable); + /* Handle any objects that may have resurrected after the call * to 'finalize_garbage' and continue the collection with the * objects that are still unreachable */ PyGC_Head final_unreachable; - gc_list_init(&final_unreachable); - handle_resurrected_objects(&unreachable, &final_unreachable, to); + handle_resurrected_objects(&unreachable, &final_unreachable, old); /* Call tp_clear on objects in the final_unreachable set. This will cause * the reference cycles to be broken. It may also cause some objects * in finalizers to be freed. */ - stats->collected += gc_list_size(&final_unreachable); - delete_garbage(tstate, gcstate, &final_unreachable, to); + m += gc_list_size(&final_unreachable); + delete_garbage(tstate, gcstate, &final_unreachable, old); /* Collect statistics on uncollectable objects found and print * debugging information. */ - Py_ssize_t n = 0; for (gc = GC_NEXT(&finalizers); gc != &finalizers; gc = GC_NEXT(gc)) { n++; - if (gcstate->debug & _PyGC_DEBUG_COLLECTABLE) + if (gcstate->debug & _PyGC_DEBUG_UNCOLLECTABLE) debug_cycle("uncollectable", FROM_GC(gc)); } - stats->uncollectable = n; + if (gcstate->debug & _PyGC_DEBUG_STATS) { + PyTime_t t2; + (void)PyTime_PerfCounterRaw(&t2); + double d = PyTime_AsSecondsDouble(t2 - t1); + PySys_WriteStderr( + "gc: done, %zd unreachable, %zd uncollectable, %.4fs elapsed\n", + n+m, n, d); + } + /* Append instances in the uncollectable set to a Python * reachable list of garbage. The programmer has to deal with * this if they insist on creating this type of structure. */ - handle_legacy_finalizers(tstate, gcstate, &finalizers, to); - gc_list_validate_space(to, gcstate->visited_space); - validate_list(to, collecting_clear_unreachable_clear); -} + handle_legacy_finalizers(tstate, gcstate, &finalizers, old); + validate_list(old, collecting_clear_unreachable_clear); -/* Invoke progress callbacks to notify clients that garbage collection - * is starting or stopping - */ -static void -do_gc_callback(GCState *gcstate, const char *phase, - int generation, struct gc_collection_stats *stats) -{ - assert(!PyErr_Occurred()); + /* Clear free list only during the collection of the highest + * generation */ + if (generation == NUM_GENERATIONS-1) { + _PyGC_ClearAllFreeLists(tstate->interp); + } - /* The local variable cannot be rebound, check it for sanity */ - assert(PyList_CheckExact(gcstate->callbacks)); - PyObject *info = NULL; - if (PyList_GET_SIZE(gcstate->callbacks) != 0) { - info = Py_BuildValue("{sisnsn}", - "generation", generation, - "collected", stats->collected, - "uncollectable", stats->uncollectable); - if (info == NULL) { - PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); - return; + if (_PyErr_Occurred(tstate)) { + if (reason == _Py_GC_REASON_SHUTDOWN) { + _PyErr_Clear(tstate); + } + else { + PyErr_FormatUnraisable("Exception ignored in garbage collection"); } } - PyObject *phase_obj = PyUnicode_FromString(phase); - if (phase_obj == NULL) { - Py_XDECREF(info); - PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); - return; + /* Update stats */ + struct gc_generation_stats *stats = &gcstate->generation_stats[generation]; + stats->collections++; + stats->collected += m; + stats->uncollectable += n; + + GC_STAT_ADD(generation, objects_collected, m); +#ifdef Py_STATS + if (_Py_stats) { + GC_STAT_ADD(generation, object_visits, + _Py_stats->object_stats.object_visits); + _Py_stats->object_stats.object_visits = 0; } +#endif - PyObject *stack[] = {phase_obj, info}; - for (Py_ssize_t i=0; icallbacks); i++) { - PyObject *r, *cb = PyList_GET_ITEM(gcstate->callbacks, i); - Py_INCREF(cb); /* make sure cb doesn't go away */ - r = PyObject_Vectorcall(cb, stack, 2, NULL); - if (r == NULL) { - PyErr_WriteUnraisable(cb); - } - else { - Py_DECREF(r); - } - Py_DECREF(cb); + if (PyDTrace_GC_DONE_ENABLED()) { + PyDTrace_GC_DONE(n + m); } - Py_DECREF(phase_obj); - Py_XDECREF(info); - assert(!PyErr_Occurred()); -} -static void -invoke_gc_callback(GCState *gcstate, const char *phase, - int generation, struct gc_collection_stats *stats) -{ - if (gcstate->callbacks == NULL) { - return; + if (reason != _Py_GC_REASON_SHUTDOWN) { + invoke_gc_callback(tstate, "stop", generation, m, n); } - do_gc_callback(gcstate, phase, generation, stats); + + assert(!_PyErr_Occurred(tstate)); + _Py_atomic_store_int(&gcstate->collecting, 0); + return n + m; } static int @@ -1726,20 +1573,10 @@ void _PyGC_Freeze(PyInterpreterState *interp) { GCState *gcstate = &interp->gc; - /* The permanent_generation has its old space bit set to zero */ - if (gcstate->visited_space) { - gc_list_set_space(&gcstate->young.head, 0); - } - gc_list_merge(&gcstate->young.head, &gcstate->permanent_generation.head); - gcstate->young.count = 0; - PyGC_Head*old0 = &gcstate->old[0].head; - PyGC_Head*old1 = &gcstate->old[1].head; - gc_list_merge(old0, &gcstate->permanent_generation.head); - gcstate->old[0].count = 0; - gc_list_set_space(old1, 0); - gc_list_merge(old1, &gcstate->permanent_generation.head); - gcstate->old[1].count = 0; - validate_old(gcstate); + for (int i = 0; i < NUM_GENERATIONS; ++i) { + gc_list_merge(GEN_HEAD(gcstate, i), &gcstate->permanent_generation.head); + gcstate->generations[i].count = 0; + } } void @@ -1747,8 +1584,7 @@ _PyGC_Unfreeze(PyInterpreterState *interp) { GCState *gcstate = &interp->gc; gc_list_merge(&gcstate->permanent_generation.head, - &gcstate->old[0].head); - validate_old(gcstate); + GEN_HEAD(gcstate, NUM_GENERATIONS-1)); } Py_ssize_t @@ -1784,63 +1620,29 @@ PyGC_IsEnabled(void) return gcstate->enabled; } +/* Public API to invoke gc.collect() from C */ Py_ssize_t -_PyGC_Collect(PyThreadState *tstate, int generation, _PyGC_Reason reason) +PyGC_Collect(void) { + PyThreadState *tstate = _PyThreadState_GET(); GCState *gcstate = &tstate->interp->gc; - int expected = 0; - if (!_Py_atomic_compare_exchange_int(&gcstate->collecting, &expected, 1)) { - // Don't start a garbage collection if one is already in progress. + if (!gcstate->enabled) { return 0; } - struct gc_collection_stats stats = { 0 }; - if (reason != _Py_GC_REASON_SHUTDOWN) { - invoke_gc_callback(gcstate, "start", generation, &stats); - } - if (PyDTrace_GC_START_ENABLED()) { - PyDTrace_GC_START(generation); - } + Py_ssize_t n; PyObject *exc = _PyErr_GetRaisedException(tstate); - switch(generation) { - case 0: - gc_collect_young(tstate, &stats); - break; - case 1: - gc_collect_increment(tstate, &stats); - break; - case 2: - gc_collect_full(tstate, &stats); - break; - default: - Py_UNREACHABLE(); - } - if (PyDTrace_GC_DONE_ENABLED()) { - PyDTrace_GC_DONE(stats.uncollectable + stats.collected); - } - if (reason != _Py_GC_REASON_SHUTDOWN) { - invoke_gc_callback(gcstate, "stop", generation, &stats); - } + n = gc_collect_main(tstate, NUM_GENERATIONS - 1, _Py_GC_REASON_MANUAL); _PyErr_SetRaisedException(tstate, exc); - GC_STAT_ADD(generation, objects_collected, stats.collected); -#ifdef Py_STATS - if (_Py_stats) { - GC_STAT_ADD(generation, object_visits, - _Py_stats->object_stats.object_visits); - _Py_stats->object_stats.object_visits = 0; - } -#endif - validate_old(gcstate); - _Py_atomic_store_int(&gcstate->collecting, 0); - return stats.uncollectable + stats.collected; + + return n; } -/* Public API to invoke gc.collect() from C */ Py_ssize_t -PyGC_Collect(void) +_PyGC_Collect(PyThreadState *tstate, int generation, _PyGC_Reason reason) { - return _PyGC_Collect(_PyThreadState_GET(), 2, _Py_GC_REASON_MANUAL); + return gc_collect_main(tstate, generation, reason); } void @@ -1852,7 +1654,7 @@ _PyGC_CollectNoFail(PyThreadState *tstate) during interpreter shutdown (and then never finish it). See http://bugs.python.org/issue8713#msg195178 for an example. */ - _PyGC_Collect(_PyThreadState_GET(), 2, _Py_GC_REASON_SHUTDOWN); + gc_collect_main(tstate, NUM_GENERATIONS - 1, _Py_GC_REASON_SHUTDOWN); } void @@ -1896,6 +1698,13 @@ _PyGC_DumpShutdownStats(PyInterpreterState *interp) } } +static void +finalize_unlink_gc_head(PyGC_Head *gc) { + PyGC_Head *prev = GC_PREV(gc); + PyGC_Head *next = GC_NEXT(gc); + _PyGCHead_SET_NEXT(prev, next); + _PyGCHead_SET_PREV(next, prev); +} void _PyGC_Fini(PyInterpreterState *interp) @@ -1904,9 +1713,25 @@ _PyGC_Fini(PyInterpreterState *interp) Py_CLEAR(gcstate->garbage); Py_CLEAR(gcstate->callbacks); - /* We expect that none of this interpreters objects are shared - with other interpreters. - See https://github.com/python/cpython/issues/90228. */ + /* Prevent a subtle bug that affects sub-interpreters that use basic + * single-phase init extensions (m_size == -1). Those extensions cause objects + * to be shared between interpreters, via the PyDict_Update(mdict, m_copy) call + * in import_find_extension(). + * + * If they are GC objects, their GC head next or prev links could refer to + * the interpreter _gc_runtime_state PyGC_Head nodes. Those nodes go away + * when the interpreter structure is freed and so pointers to them become + * invalid. If those objects are still used by another interpreter and + * UNTRACK is called on them, a crash will happen. We untrack the nodes + * here to avoid that. + * + * This bug was originally fixed when reported as gh-90228. The bug was + * re-introduced in gh-94673. + */ + for (int i = 0; i < NUM_GENERATIONS; i++) { + finalize_unlink_gc_head(&gcstate->generations[i].head); + } + finalize_unlink_gc_head(&gcstate->permanent_generation.head); } /* for debugging */ @@ -1991,11 +1816,10 @@ _PyObject_GC_Link(PyObject *op) GCState *gcstate = &tstate->interp->gc; gc->_gc_next = 0; gc->_gc_prev = 0; - gcstate->young.count++; /* number of allocated GC objects */ - gcstate->heap_size++; - if (gcstate->young.count > gcstate->young.threshold && + gcstate->generations[0].count++; /* number of allocated GC objects */ + if (gcstate->generations[0].count > gcstate->generations[0].threshold && gcstate->enabled && - gcstate->young.threshold && + gcstate->generations[0].threshold && !_Py_atomic_load_int_relaxed(&gcstate->collecting) && !_PyErr_Occurred(tstate)) { @@ -2006,9 +1830,11 @@ _PyObject_GC_Link(PyObject *op) void _Py_RunGC(PyThreadState *tstate) { - if (tstate->interp->gc.enabled) { - _PyGC_Collect(tstate, 1, _Py_GC_REASON_HEAP); + GCState *gcstate = get_gc_state(); + if (!gcstate->enabled) { + return; } + gc_collect_main(tstate, GENERATION_AUTO, _Py_GC_REASON_HEAP); } static PyObject * @@ -2116,10 +1942,9 @@ PyObject_GC_Del(void *op) #endif } GCState *gcstate = get_gc_state(); - if (gcstate->young.count > 0) { - gcstate->young.count--; + if (gcstate->generations[0].count > 0) { + gcstate->generations[0].count--; } - gcstate->heap_size--; PyObject_Free(((char *)op)-presize); } @@ -2141,36 +1966,26 @@ PyObject_GC_IsFinalized(PyObject *obj) return 0; } -static int -visit_generation(gcvisitobjects_t callback, void *arg, struct gc_generation *gen) -{ - PyGC_Head *gc_list, *gc; - gc_list = &gen->head; - for (gc = GC_NEXT(gc_list); gc != gc_list; gc = GC_NEXT(gc)) { - PyObject *op = FROM_GC(gc); - Py_INCREF(op); - int res = callback(op, arg); - Py_DECREF(op); - if (!res) { - return -1; - } - } - return 0; -} - void PyUnstable_GC_VisitObjects(gcvisitobjects_t callback, void *arg) { + size_t i; GCState *gcstate = get_gc_state(); int origenstate = gcstate->enabled; gcstate->enabled = 0; - if (visit_generation(callback, arg, &gcstate->young)) { - goto done; - } - if (visit_generation(callback, arg, &gcstate->old[0])) { - goto done; + for (i = 0; i < NUM_GENERATIONS; i++) { + PyGC_Head *gc_list, *gc; + gc_list = GEN_HEAD(gcstate, i); + for (gc = GC_NEXT(gc_list); gc != gc_list; gc = GC_NEXT(gc)) { + PyObject *op = FROM_GC(gc); + Py_INCREF(op); + int res = callback(op, arg); + Py_DECREF(op); + if (!res) { + goto done; + } + } } - visit_generation(callback, arg, &gcstate->old[1]); done: gcstate->enabled = origenstate; } diff --git a/Python/gc_free_threading.c b/Python/gc_free_threading.c index f19362c9573..140f1eaa88b 100644 --- a/Python/gc_free_threading.c +++ b/Python/gc_free_threading.c @@ -455,6 +455,30 @@ mark_reachable(PyObject *op) } #ifdef GC_DEBUG +static bool +validate_refcounts(const mi_heap_t *heap, const mi_heap_area_t *area, + void *block, size_t block_size, void *args) +{ + PyObject *op = op_from_block(block, args, false); + if (op == NULL) { + return true; + } + + _PyObject_ASSERT_WITH_MSG(op, !gc_is_unreachable(op), + "object should not be marked as unreachable yet"); + + if (_Py_REF_IS_MERGED(op->ob_ref_shared)) { + _PyObject_ASSERT_WITH_MSG(op, op->ob_tid == 0, + "merged objects should have ob_tid == 0"); + } + else if (!_Py_IsImmortal(op)) { + _PyObject_ASSERT_WITH_MSG(op, op->ob_tid != 0, + "unmerged objects should have ob_tid != 0"); + } + + return true; +} + static bool validate_gc_objects(const mi_heap_t *heap, const mi_heap_area_t *area, void *block, size_t block_size, void *args) @@ -498,6 +522,19 @@ mark_heap_visitor(const mi_heap_t *heap, const mi_heap_area_t *area, return true; } +static bool +restore_refs(const mi_heap_t *heap, const mi_heap_area_t *area, + void *block, size_t block_size, void *args) +{ + PyObject *op = op_from_block(block, args, false); + if (op == NULL) { + return true; + } + gc_restore_tid(op); + gc_clear_unreachable(op); + return true; +} + /* Return true if object has a pre-PEP 442 finalization method. */ static int has_legacy_finalizer(PyObject *op) @@ -549,6 +586,13 @@ static int deduce_unreachable_heap(PyInterpreterState *interp, struct collection_state *state) { + +#ifdef GC_DEBUG + // Check that all objects are marked as unreachable and that the computed + // reference count difference (stored in `ob_tid`) is non-negative. + gc_visit_heaps(interp, &validate_refcounts, &state->base); +#endif + // Identify objects that are directly reachable from outside the GC heap // by computing the difference between the refcount and the number of // incoming references. @@ -563,6 +607,8 @@ deduce_unreachable_heap(PyInterpreterState *interp, // Transitively mark reachable objects by clearing the // _PyGC_BITS_UNREACHABLE flag. if (gc_visit_heaps(interp, &mark_heap_visitor, &state->base) < 0) { + // On out-of-memory, restore the refcounts and bail out. + gc_visit_heaps(interp, &restore_refs, &state->base); return -1; } @@ -698,7 +744,7 @@ void _PyGC_InitState(GCState *gcstate) { // TODO: move to pycore_runtime_init.h once the incremental GC lands. - gcstate->young.threshold = 2000; + gcstate->generations[0].threshold = 2000; } @@ -996,8 +1042,8 @@ cleanup_worklist(struct worklist *worklist) static bool gc_should_collect(GCState *gcstate) { - int count = _Py_atomic_load_int_relaxed(&gcstate->young.count); - int threshold = gcstate->young.threshold; + int count = _Py_atomic_load_int_relaxed(&gcstate->generations[0].count); + int threshold = gcstate->generations[0].threshold; if (count <= threshold || threshold == 0 || !gcstate->enabled) { return false; } @@ -1005,7 +1051,7 @@ gc_should_collect(GCState *gcstate) // objects. A few tests rely on immediate scheduling of the GC so we ignore // the scaled threshold if generations[1].threshold is set to zero. return (count > gcstate->long_lived_total / 4 || - gcstate->old[0].threshold == 0); + gcstate->generations[1].threshold == 0); } static void @@ -1019,7 +1065,7 @@ record_allocation(PyThreadState *tstate) if (gc->alloc_count >= LOCAL_ALLOC_COUNT_THRESHOLD) { // TODO: Use Py_ssize_t for the generation count. GCState *gcstate = &tstate->interp->gc; - _Py_atomic_add_int(&gcstate->young.count, (int)gc->alloc_count); + _Py_atomic_add_int(&gcstate->generations[0].count, (int)gc->alloc_count); gc->alloc_count = 0; if (gc_should_collect(gcstate) && @@ -1038,7 +1084,7 @@ record_deallocation(PyThreadState *tstate) gc->alloc_count--; if (gc->alloc_count <= -LOCAL_ALLOC_COUNT_THRESHOLD) { GCState *gcstate = &tstate->interp->gc; - _Py_atomic_add_int(&gcstate->young.count, (int)gc->alloc_count); + _Py_atomic_add_int(&gcstate->generations[0].count, (int)gc->alloc_count); gc->alloc_count = 0; } } @@ -1050,12 +1096,10 @@ gc_collect_internal(PyInterpreterState *interp, struct collection_state *state, // update collection and allocation counters if (generation+1 < NUM_GENERATIONS) { - state->gcstate->old[generation].count += 1; + state->gcstate->generations[generation+1].count += 1; } - - state->gcstate->young.count = 0; - for (int i = 1; i <= generation; ++i) { - state->gcstate->old[i-1].count = 0; + for (int i = 0; i <= generation; i++) { + state->gcstate->generations[i].count = 0; } // merge refcounts for all queued objects @@ -1066,7 +1110,8 @@ gc_collect_internal(PyInterpreterState *interp, struct collection_state *state, int err = deduce_unreachable_heap(interp, state); if (err < 0) { _PyEval_StartTheWorld(interp); - goto error; + PyErr_NoMemory(); + return; } // Print debugging information. @@ -1100,7 +1145,12 @@ gc_collect_internal(PyInterpreterState *interp, struct collection_state *state, _PyEval_StartTheWorld(interp); if (err < 0) { - goto error; + cleanup_worklist(&state->unreachable); + cleanup_worklist(&state->legacy_finalizers); + cleanup_worklist(&state->wrcb_to_call); + cleanup_worklist(&state->objs_to_decref); + PyErr_NoMemory(); + return; } // Call tp_clear on objects in the unreachable set. This will cause @@ -1110,15 +1160,6 @@ gc_collect_internal(PyInterpreterState *interp, struct collection_state *state, // Append objects with legacy finalizers to the "gc.garbage" list. handle_legacy_finalizers(state); - return; - -error: - cleanup_worklist(&state->unreachable); - cleanup_worklist(&state->legacy_finalizers); - cleanup_worklist(&state->wrcb_to_call); - cleanup_worklist(&state->objs_to_decref); - PyErr_NoMemory(); - PyErr_FormatUnraisable("Out of memory during garbage collection"); } /* This is the main function. Read this to understand how the diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index ace91a3ca9b..1488e4215cf 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -875,6 +875,8 @@ PyFunctionObject *init = (PyFunctionObject *)cls->_spec_cache.init; PyCodeObject *code = (PyCodeObject *)init->func_code; DEOPT_IF(code->co_argcount != oparg+1, CALL); + DEOPT_IF((code->co_flags & (CO_VARKEYWORDS | CO_VARARGS | CO_OPTIMIZED)) != CO_OPTIMIZED, CALL); + DEOPT_IF(code->co_kwonlyargcount, CALL); DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize + _Py_InitCleanup.co_framesize), CALL); STAT_INC(CALL, hit); PyObject *self = _PyType_NewManagedObject(tp); @@ -2787,7 +2789,7 @@ if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { goto error; } - monitor_raise(tstate, frame, this_instr); + _PyEval_MonitorRaise(tstate, frame, this_instr); _PyErr_Clear(tstate); } /* iterator ended normally */ @@ -3307,7 +3309,7 @@ if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { goto error; } - monitor_raise(tstate, frame, this_instr); + _PyEval_MonitorRaise(tstate, frame, this_instr); _PyErr_Clear(tstate); } /* iterator ended normally */ @@ -5317,7 +5319,7 @@ if (retval == NULL) { if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration) ) { - monitor_raise(tstate, frame, this_instr); + _PyEval_MonitorRaise(tstate, frame, this_instr); } if (_PyGen_FetchStopIterationValue(&retval) == 0) { assert(retval != NULL); @@ -5606,14 +5608,15 @@ new_version = _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, value); ep->me_value = value; } - Py_DECREF(old_value); - STAT_INC(STORE_ATTR, hit); /* Ensure dict is GC tracked if it needs to be */ if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(value)) { _PyObject_GC_TRACK(dict); } - /* PEP 509 */ - dict->ma_version_tag = new_version; + dict->ma_version_tag = new_version; // PEP 509 + // old_value should be DECREFed after GC track checking is done, if not, it could raise a segmentation fault, + // when dict only holds the strong reference to value in ep->me_value. + Py_DECREF(old_value); + STAT_INC(STORE_ATTR, hit); Py_DECREF(owner); stack_pointer += -2; DISPATCH(); @@ -5936,7 +5939,7 @@ DEOPT_IF(!PyLong_CheckExact(value), TO_BOOL); STAT_INC(TO_BOOL, hit); if (_PyLong_IsZero((PyLongObject *)value)) { - assert(_Py_IsImmortal(value)); + assert(_Py_IsImmortalLoose(value)); res = Py_False; } else { @@ -5996,7 +5999,7 @@ DEOPT_IF(!PyUnicode_CheckExact(value), TO_BOOL); STAT_INC(TO_BOOL, hit); if (value == &_Py_STR(empty)) { - assert(_Py_IsImmortal(value)); + assert(_Py_IsImmortalLoose(value)); res = Py_False; } else { diff --git a/Python/getargs.c b/Python/getargs.c index 0b272374e08..e95ccec851f 100644 --- a/Python/getargs.c +++ b/Python/getargs.c @@ -2004,6 +2004,19 @@ parser_clear(struct _PyArg_Parser *parser) if (parser->is_kwtuple_owned) { Py_CLEAR(parser->kwtuple); } + + if (parser->format) { + parser->fname = NULL; + } + else { + assert(parser->fname != NULL); + } + parser->custom_msg = NULL; + parser->pos = 0; + parser->min = 0; + parser->max = 0; + parser->is_kwtuple_owned = 0; + parser->once.v = 0; } static PyObject* @@ -2561,7 +2574,7 @@ _PyArg_UnpackKeywordsWithVararg(PyObject *const *args, Py_ssize_t nargs, * * Otherwise, we leave a place at `buf[vararg]` for vararg tuple * so the index is `i + 1`. */ - if (nargs < vararg && i != vararg) { + if (i < vararg) { buf[i] = current_arg; } else { diff --git a/Python/import.c b/Python/import.c index 98ecaed36f0..2ec596828e3 100644 --- a/Python/import.c +++ b/Python/import.c @@ -814,6 +814,8 @@ static int clear_singlephase_extension(PyInterpreterState *interp, // Currently, this is only used for testing. // (See _testinternalcapi.clear_extension().) +// If adding another use, be careful about modules that import themselves +// recursively (see gh-123880). int _PyImport_ClearExtension(PyObject *name, PyObject *filename) { @@ -1048,7 +1050,7 @@ del_cached_def(struct extensions_cache_value *value) However, this decref would be problematic if the module def were dynamically allocated, it were the last ref, and this function were called with an interpreter other than the def's owner. */ - assert(value->def == NULL || _Py_IsImmortal(value->def)); + assert(value->def == NULL || _Py_IsImmortalLoose(value->def)); Py_XDECREF(value->def->m_base.m_copy); value->def->m_base.m_copy = NULL; @@ -1321,12 +1323,16 @@ _extensions_cache_set(PyObject *path, PyObject *name, value = entry == NULL ? NULL : (struct extensions_cache_value *)entry->value; - /* We should never be updating an existing cache value. */ - assert(value == NULL); if (value != NULL) { - PyErr_Format(PyExc_SystemError, - "extension module %R is already cached", name); - goto finally; + /* gh-123880: If there's an existing cache value, it means a module is + * being imported recursively from its PyInit_* or Py_mod_* function. + * (That function presumably handles returning a partially + * constructed module in such a case.) + * We can reuse the existing cache value; it is owned by the cache. + * (Entries get removed from it in exceptional circumstances, + * after interpreter shutdown, and in runtime shutdown.) + */ + goto finally_oldvalue; } newvalue = alloc_extensions_cache_value(); if (newvalue == NULL) { @@ -1391,6 +1397,7 @@ _extensions_cache_set(PyObject *path, PyObject *name, cleanup_old_cached_def(&olddefbase); } +finally_oldvalue: extensions_lock_release(); if (key != NULL) { hashtable_destroy_str(key); @@ -1532,6 +1539,35 @@ switch_to_main_interpreter(PyThreadState *tstate) return main_tstate; } +static void +switch_back_from_main_interpreter(PyThreadState *tstate, + PyThreadState *main_tstate, + PyObject *tempobj) +{ + assert(main_tstate == PyThreadState_GET()); + assert(_Py_IsMainInterpreter(main_tstate->interp)); + assert(tstate->interp != main_tstate->interp); + + /* Handle any exceptions, which we cannot propagate directly + * to the subinterpreter. */ + if (PyErr_Occurred()) { + if (PyErr_ExceptionMatches(PyExc_MemoryError)) { + /* We trust it will be caught again soon. */ + PyErr_Clear(); + } + else { + /* Printing the exception should be sufficient. */ + PyErr_PrintEx(0); + } + } + + Py_XDECREF(tempobj); + + PyThreadState_Clear(main_tstate); + (void)PyThreadState_Swap(tstate); + PyThreadState_Delete(main_tstate); +} + static PyObject * get_core_module_dict(PyInterpreterState *interp, PyObject *name, PyObject *path) @@ -2015,7 +2051,7 @@ import_run_extension(PyThreadState *tstate, PyModInitFunction p0, singlephase.m_init = p0; } cached = update_global_state_for_extension( - tstate, info->path, info->name, def, &singlephase); + main_tstate, info->path, info->name, def, &singlephase); if (cached == NULL) { assert(PyErr_Occurred()); goto main_finally; @@ -2027,27 +2063,10 @@ import_run_extension(PyThreadState *tstate, PyModInitFunction p0, /* Switch back to the subinterpreter. */ if (switched) { assert(main_tstate != tstate); - - /* Handle any exceptions, which we cannot propagate directly - * to the subinterpreter. */ - if (PyErr_Occurred()) { - if (PyErr_ExceptionMatches(PyExc_MemoryError)) { - /* We trust it will be caught again soon. */ - PyErr_Clear(); - } - else { - /* Printing the exception should be sufficient. */ - PyErr_PrintEx(0); - } - } - + switch_back_from_main_interpreter(tstate, main_tstate, mod); /* Any module we got from the init function will have to be * reloaded in the subinterpreter. */ - Py_CLEAR(mod); - - PyThreadState_Clear(main_tstate); - (void)PyThreadState_Swap(tstate); - PyThreadState_Delete(main_tstate); + mod = NULL; } /*****************************************************************/ @@ -2115,6 +2134,7 @@ import_run_extension(PyThreadState *tstate, PyModInitFunction p0, } +// Used in _PyImport_ClearExtension; see notes there. static int clear_singlephase_extension(PyInterpreterState *interp, PyObject *name, PyObject *path) @@ -2141,9 +2161,21 @@ clear_singlephase_extension(PyInterpreterState *interp, } } + /* We must use the main interpreter to clean up the cache. + * See the note in import_run_extension(). */ + PyThreadState *tstate = PyThreadState_GET(); + PyThreadState *main_tstate = switch_to_main_interpreter(tstate); + if (main_tstate == NULL) { + return -1; + } + /* Clear the cached module def. */ _extensions_cache_delete(path, name); + if (main_tstate != tstate) { + switch_back_from_main_interpreter(tstate, main_tstate, NULL); + } + return 0; } diff --git a/Python/initconfig.c b/Python/initconfig.c index a28c08c5318..84717b4e3c9 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -1542,20 +1542,24 @@ config_wstr_to_int(const wchar_t *wstr, int *result) static PyStatus config_read_gil(PyConfig *config, size_t len, wchar_t first_char) { -#ifdef Py_GIL_DISABLED if (len == 1 && first_char == L'0') { +#ifdef Py_GIL_DISABLED config->enable_gil = _PyConfig_GIL_DISABLE; +#else + return _PyStatus_ERR("Disabling the GIL is not supported by this build"); +#endif } else if (len == 1 && first_char == L'1') { +#ifdef Py_GIL_DISABLED config->enable_gil = _PyConfig_GIL_ENABLE; +#else + return _PyStatus_OK(); +#endif } else { return _PyStatus_ERR("PYTHON_GIL / -X gil must be \"0\" or \"1\""); } return _PyStatus_OK(); -#else - return _PyStatus_ERR("PYTHON_GIL / -X gil are not supported by this build"); -#endif } static PyStatus diff --git a/Python/instrumentation.c b/Python/instrumentation.c index ae790a1441b..3481b5df142 100644 --- a/Python/instrumentation.c +++ b/Python/instrumentation.c @@ -1344,7 +1344,6 @@ int _Py_call_instrumentation_instruction(PyThreadState *tstate, _PyInterpreterFrame* frame, _Py_CODEUNIT *instr) { PyCodeObject *code = _PyFrame_GetCode(frame); - assert(debug_check_sanity(tstate->interp, code)); int offset = (int)(instr - _PyCode_CODE(code)); _PyCoMonitoringData *instrumentation_data = code->_co_monitoring; assert(instrumentation_data->per_instruction_opcodes); @@ -1352,6 +1351,7 @@ _Py_call_instrumentation_instruction(PyThreadState *tstate, _PyInterpreterFrame* if (tstate->tracing) { return next_opcode; } + assert(debug_check_sanity(tstate->interp, code)); PyInterpreterState *interp = tstate->interp; uint8_t tools = instrumentation_data->per_instruction_tools != NULL ? instrumentation_data->per_instruction_tools[offset] : diff --git a/Python/legacy_tracing.c b/Python/legacy_tracing.c index 74118030925..9cc3af1f5e1 100644 --- a/Python/legacy_tracing.c +++ b/Python/legacy_tracing.c @@ -121,6 +121,19 @@ sys_profile_call_or_return( Py_DECREF(meth); return res; } + else if (Py_TYPE(callable) == &PyMethod_Type) { + // CALL instruction will grab the function from the method, + // so if the function is a C function, the return event will + // be emitted. However, CALL event happens before CALL + // instruction, so we need to handle this case here. + PyObject* func = PyMethod_GET_FUNCTION(callable); + if (func == NULL) { + return NULL; + } + if (PyCFunction_Check(func)) { + return call_profile_func(self, func); + } + } Py_RETURN_NONE; } @@ -605,7 +618,7 @@ _PyEval_SetTrace(PyThreadState *tstate, Py_tracefunc func, PyObject *arg) (1 << PY_MONITORING_EVENT_STOP_ITERATION); PyFrameObject* frame = PyEval_GetFrame(); - if (frame->f_trace_opcodes) { + if (frame && frame->f_trace_opcodes) { int ret = _PyEval_SetOpcodeTrace(frame, true); if (ret != 0) { return ret; diff --git a/Python/lock.c b/Python/lock.c index 7c6a5175e88..57675fe1873 100644 --- a/Python/lock.c +++ b/Python/lock.c @@ -514,6 +514,7 @@ void _PySeqLock_LockWrite(_PySeqLock *seqlock) } else if (_Py_atomic_compare_exchange_uint32(&seqlock->sequence, &prev, prev + 1)) { // We've locked the cache + _Py_atomic_fence_release(); break; } else { @@ -547,28 +548,31 @@ uint32_t _PySeqLock_BeginRead(_PySeqLock *seqlock) return sequence; } -uint32_t _PySeqLock_EndRead(_PySeqLock *seqlock, uint32_t previous) +int _PySeqLock_EndRead(_PySeqLock *seqlock, uint32_t previous) { - // Synchronize again and validate that the entry hasn't been updated - // while we were readying the values. - if (_Py_atomic_load_uint32_acquire(&seqlock->sequence) == previous) { + // gh-121368: We need an explicit acquire fence here to ensure that + // this load of the sequence number is not reordered before any loads + // within the read lock. + _Py_atomic_fence_acquire(); + + if (_Py_atomic_load_uint32_relaxed(&seqlock->sequence) == previous) { return 1; - } + } - _Py_yield(); - return 0; + _Py_yield(); + return 0; } -uint32_t _PySeqLock_AfterFork(_PySeqLock *seqlock) +int _PySeqLock_AfterFork(_PySeqLock *seqlock) { // Synchronize again and validate that the entry hasn't been updated // while we were readying the values. - if (SEQLOCK_IS_UPDATING(seqlock->sequence)) { + if (SEQLOCK_IS_UPDATING(seqlock->sequence)) { seqlock->sequence = 0; return 1; - } + } - return 0; + return 0; } #undef PyMutex_Lock diff --git a/Python/marshal.c b/Python/marshal.c index a46fc0ce881..76fa701b541 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -1927,7 +1927,7 @@ machine architecture issues.\n\ Not all Python object types are supported; in general, only objects\n\ whose value is independent from a particular invocation of Python can be\n\ written and read by this module. The following types are supported:\n\ -None, integers, floating point numbers, strings, bytes, bytearrays,\n\ +None, integers, floating-point numbers, strings, bytes, bytearrays,\n\ tuples, lists, sets, dictionaries, and code objects, where it\n\ should be understood that tuples, lists and dictionaries are only\n\ supported as long as the values contained therein are themselves\n\ @@ -1938,7 +1938,7 @@ Variables:\n\ \n\ version -- indicates the format that the module uses. Version 0 is the\n\ historical format, version 1 shares interned strings and version 2\n\ - uses a binary format for floating point numbers.\n\ + uses a binary format for floating-point numbers.\n\ Version 3 shares common object references (New in version 3.4).\n\ \n\ Functions:\n\ diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 13bd62da4d0..1701a1cd217 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -103,7 +103,7 @@ _PyRuntimeState _PyRuntime #if defined(__linux__) && (defined(__GNUC__) || defined(__clang__)) __attribute__ ((section (".PyRuntime"))) #endif -= _PyRuntimeState_INIT(_PyRuntime); += _PyRuntimeState_INIT(_PyRuntime, _Py_Debug_Cookie); _Py_COMP_DIAG_POP static int runtime_initialized = 0; diff --git a/Python/pystate.c b/Python/pystate.c index 602b13e18c7..66fd392c3e3 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -389,7 +389,7 @@ _Py_COMP_DIAG_IGNORE_DEPR_DECLS Note that we initialize "initial" relative to _PyRuntime, to ensure pre-initialized pointers point to the active runtime state (and not "initial"). */ -static const _PyRuntimeState initial = _PyRuntimeState_INIT(_PyRuntime); +static const _PyRuntimeState initial = _PyRuntimeState_INIT(_PyRuntime, ""); _Py_COMP_DIAG_POP #define LOCKS_INIT(runtime) \ @@ -454,6 +454,8 @@ _PyRuntimeState_Init(_PyRuntimeState *runtime) // Py_Initialize() must be running again. // Reset to _PyRuntimeState_INIT. memcpy(runtime, &initial, sizeof(*runtime)); + // Preserve the cookie from the original runtime. + memcpy(runtime->debug_offsets.cookie, _Py_Debug_Cookie, 8); assert(!runtime->_initialized); } @@ -1499,6 +1501,8 @@ init_threadstate(_PyThreadStateImpl *_tstate, tstate->previous_executor = NULL; tstate->dict_global_version = 0; + _tstate->asyncio_running_loop = NULL; + tstate->delete_later = NULL; llist_init(&_tstate->mem_free_queue); @@ -1700,6 +1704,11 @@ PyThreadState_Clear(PyThreadState *tstate) /* Don't clear tstate->pyframe: it is a borrowed reference */ + Py_CLEAR(tstate->threading_local_key); + Py_CLEAR(tstate->threading_local_sentinel); + + Py_CLEAR(((_PyThreadStateImpl *)tstate)->asyncio_running_loop); + Py_CLEAR(tstate->dict); Py_CLEAR(tstate->async_exc); diff --git a/Python/pythonrun.c b/Python/pythonrun.c index ce7f194e929..b67597113ea 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -280,11 +280,42 @@ PyRun_InteractiveOneObjectEx(FILE *fp, PyObject *filename, PyObject *main_dict = PyModule_GetDict(main_module); // borrowed ref PyObject *res = run_mod(mod, filename, main_dict, main_dict, flags, arena, interactive_src, 1); + Py_INCREF(interactive_src); _PyArena_Free(arena); Py_DECREF(main_module); if (res == NULL) { + PyThreadState *tstate = _PyThreadState_GET(); + PyObject *exc = _PyErr_GetRaisedException(tstate); + if (PyType_IsSubtype(Py_TYPE(exc), + (PyTypeObject *) PyExc_SyntaxError)) + { + /* fix "text" attribute */ + assert(interactive_src != NULL); + PyObject *xs = PyUnicode_Splitlines(interactive_src, 1); + if (xs == NULL) { + goto error; + } + PyObject *exc_lineno = PyObject_GetAttr(exc, &_Py_ID(lineno)); + if (exc_lineno == NULL) { + Py_DECREF(xs); + goto error; + } + int n = PyLong_AsInt(exc_lineno); + Py_DECREF(exc_lineno); + if (n <= 0 || n > PyList_GET_SIZE(xs)) { + Py_DECREF(xs); + goto error; + } + PyObject *line = PyList_GET_ITEM(xs, n - 1); + PyObject_SetAttr(exc, &_Py_ID(text), line); + Py_DECREF(xs); + } +error: + Py_DECREF(interactive_src); + _PyErr_SetRaisedException(tstate, exc); return -1; } + Py_DECREF(interactive_src); Py_DECREF(res); flush_io(); diff --git a/Python/specialize.c b/Python/specialize.c index 973baf2fbdc..1a2043d0e8d 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -29,6 +29,10 @@ GCStats _py_gc_stats[NUM_GENERATIONS] = { 0 }; static PyStats _Py_stats_struct = { .gc_stats = _py_gc_stats }; PyStats *_Py_stats = NULL; +#if PYSTATS_MAX_UOP_ID < MAX_UOP_ID +#error "Not enough space allocated for pystats. Increase PYSTATS_MAX_UOP_ID to at least MAX_UOP_ID" +#endif + #define ADD_STAT_TO_DICT(res, field) \ do { \ PyObject *val = PyLong_FromUnsignedLongLong(stats->field); \ diff --git a/Python/stdlib_module_names.h b/Python/stdlib_module_names.h index 9686d10563a..faeed0b7125 100644 --- a/Python/stdlib_module_names.h +++ b/Python/stdlib_module_names.h @@ -97,6 +97,7 @@ static const char* _Py_stdlib_module_names[] = { "_weakref", "_weakrefset", "_winapi", +"_wmi", "_zoneinfo", "abc", "antigravity", diff --git a/Python/tracemalloc.c b/Python/tracemalloc.c index fee7dd0e56d..e58b60ddd5e 100644 --- a/Python/tracemalloc.c +++ b/Python/tracemalloc.c @@ -838,7 +838,7 @@ _PyTraceMalloc_Init(void) tracemalloc_tracebacks = hashtable_new(hashtable_hash_traceback, hashtable_compare_traceback, - NULL, raw_free); + raw_free, NULL); tracemalloc_traces = tracemalloc_create_traces_table(); tracemalloc_domains = tracemalloc_create_domains_table(); diff --git a/README.rst b/README.rst index 9f6a9a6ae09..7c1e463273b 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,5 @@ -This is Python version 3.13.0 beta 3 -==================================== +This is Python version 3.13.0 +============================= .. image:: https://github.com/python/cpython/workflows/Tests/badge.svg :alt: CPython build status on GitHub Actions diff --git a/Tools/build/check_extension_modules.py b/Tools/build/check_extension_modules.py index a9fee4981ea..7de35b499da 100644 --- a/Tools/build/check_extension_modules.py +++ b/Tools/build/check_extension_modules.py @@ -53,6 +53,7 @@ "_overlapped", "_testconsole", "_winapi", + "_wmi", "msvcrt", "nt", "winreg", diff --git a/Tools/build/generate_re_casefix.py b/Tools/build/generate_re_casefix.py index b57ac07426c..6cebfbd025c 100755 --- a/Tools/build/generate_re_casefix.py +++ b/Tools/build/generate_re_casefix.py @@ -23,9 +23,9 @@ def update_file(file, content): # Maps the code of lowercased character to codes of different lowercased # characters which have the same uppercase. -_EXTRA_CASES = { +_EXTRA_CASES = {{ %s -} +}} """ def uname(i): diff --git a/Tools/build/generate_sbom.py b/Tools/build/generate_sbom.py index c08568f2e00..9cc89b8caee 100644 --- a/Tools/build/generate_sbom.py +++ b/Tools/build/generate_sbom.py @@ -96,6 +96,19 @@ def error_if(value: bool, error_message: str) -> None: sys.exit(1) +def is_root_directory_git_index() -> bool: + """Checks if the root directory is a git index""" + try: + subprocess.check_call( + ["git", "-C", str(CPYTHON_ROOT_DIR), "rev-parse"], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + except subprocess.CalledProcessError: + return False + return True + + def filter_gitignored_paths(paths: list[str]) -> list[str]: """ Filter out paths excluded by the gitignore file. @@ -108,6 +121,10 @@ def filter_gitignored_paths(paths: list[str]) -> list[str]: '.gitignore:9:*.a Tools/lib.a' """ + # No paths means no filtering to be done. + if not paths: + return [] + # Filter out files in gitignore. # Non-matching files show up as '::' git_check_ignore_proc = subprocess.run( @@ -337,6 +354,11 @@ def create_externals_sbom() -> None: def main() -> None: + # Don't regenerate the SBOM if we're not a git repository. + if not is_root_directory_git_index(): + print("Skipping SBOM generation due to not being a git repository") + return + create_source_sbom() create_externals_sbom() diff --git a/Tools/build/stable_abi.py b/Tools/build/stable_abi.py index 8b01c91e0d6..f7fccb63685 100644 --- a/Tools/build/stable_abi.py +++ b/Tools/build/stable_abi.py @@ -225,9 +225,9 @@ def sort_key(item): key=sort_key): write(f'EXPORT_DATA({item.name})') -REST_ROLES = { - 'function': 'function', - 'data': 'var', +ITEM_KIND_TO_DOC_ROLE = { + 'function': 'func', + 'data': 'data', 'struct': 'type', 'macro': 'macro', # 'const': 'const', # all undocumented @@ -236,22 +236,28 @@ def sort_key(item): @generator("doc_list", 'Doc/data/stable_abi.dat') def gen_doc_annotations(manifest, args, outfile): - """Generate/check the stable ABI list for documentation annotations""" + """Generate/check the stable ABI list for documentation annotations + + See ``StableABIEntry`` in ``Doc/tools/extensions/c_annotations.py`` + for a description of each field. + """ writer = csv.DictWriter( outfile, ['role', 'name', 'added', 'ifdef_note', 'struct_abi_kind'], lineterminator='\n') writer.writeheader() - for item in manifest.select(REST_ROLES.keys(), include_abi_only=False): + kinds = set(ITEM_KIND_TO_DOC_ROLE) + for item in manifest.select(kinds, include_abi_only=False): if item.ifdef: ifdef_note = manifest.contents[item.ifdef].doc else: ifdef_note = None row = { - 'role': REST_ROLES[item.kind], + 'role': ITEM_KIND_TO_DOC_ROLE[item.kind], 'name': item.name, 'added': item.added, - 'ifdef_note': ifdef_note} + 'ifdef_note': ifdef_note, + } rows = [row] if item.kind == 'struct': row['struct_abi_kind'] = item.struct_abi_kind @@ -259,7 +265,8 @@ def gen_doc_annotations(manifest, args, outfile): rows.append({ 'role': 'member', 'name': f'{item.name}.{member_name}', - 'added': item.added}) + 'added': item.added, + }) writer.writerows(rows) @generator("ctypes_test", 'Lib/test/test_stable_abi_ctypes.py') diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv index adb7f8e5d64..260eaa4f14f 100644 --- a/Tools/c-analyzer/cpython/ignored.tsv +++ b/Tools/c-analyzer/cpython/ignored.tsv @@ -595,6 +595,7 @@ Modules/_testmultiphase.c - slots_nonmodule_with_exec_slots - Modules/_testmultiphase.c - testexport_methods - Modules/_testmultiphase.c - uninitialized_def - Modules/_testsinglephase.c - global_state - +Modules/_testsinglephase.c - static_module_circular - Modules/_xxtestfuzz/_xxtestfuzz.c - _fuzzmodule - Modules/_xxtestfuzz/_xxtestfuzz.c - module_methods - Modules/_xxtestfuzz/fuzzer.c - RE_FLAG_DEBUG - diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index fdb635486b9..77e5e932feb 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -300,9 +300,13 @@ def analyze_stack(op: parser.InstDef, replace_op_arg_1: str | None = None) -> St convert_stack_item(i, replace_op_arg_1) for i in op.inputs if isinstance(i, parser.StackEffect) ] outputs: list[StackItem] = [convert_stack_item(i, replace_op_arg_1) for i in op.outputs] + # Mark variables with matching names at the base of the stack as "peek" + modified = False for input, output in zip(inputs, outputs): - if input.name == output.name: + if input.name == output.name and not modified: input.peek = output.peek = True + else: + modified = True return StackEffect(inputs, outputs) diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py index cc9eb8a0e90..acb5ac3a50a 100644 --- a/Tools/cases_generator/generators_common.py +++ b/Tools/cases_generator/generators_common.py @@ -84,7 +84,7 @@ def replace_error( next(tkn_iter) # RPAREN next(tkn_iter) # Semi colon out.emit(") ") - c_offset = stack.peek_offset.to_c() + c_offset = stack.peek_offset() try: offset = -int(c_offset) close = ";\n" diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py index 5aecac39aef..e0038631c1b 100644 --- a/Tools/cases_generator/stack.py +++ b/Tools/cases_generator/stack.py @@ -47,6 +47,9 @@ class StackOffset: def empty() -> "StackOffset": return StackOffset([], []) + def copy(self) -> "StackOffset": + return StackOffset(self.popped[:], self.pushed[:]) + def pop(self, item: StackItem) -> None: self.popped.append(var_size(item)) @@ -120,14 +123,11 @@ class Stack: def __init__(self) -> None: self.top_offset = StackOffset.empty() self.base_offset = StackOffset.empty() - self.peek_offset = StackOffset.empty() self.variables: list[StackItem] = [] self.defined: set[str] = set() def pop(self, var: StackItem) -> str: self.top_offset.pop(var) - if not var.peek: - self.peek_offset.pop(var) indirect = "&" if var.is_array() else "" if self.variables: popped = self.variables.pop() @@ -201,9 +201,16 @@ def flush(self, out: CWriter, cast_type: str = "PyObject *") -> None: self.variables = [] self.base_offset.clear() self.top_offset.clear() - self.peek_offset.clear() out.start_line() + def peek_offset(self) -> str: + peek = self.base_offset.copy() + for var in self.variables: + if not var.peek: + break + peek.push(var) + return peek.to_c() + def as_comment(self) -> str: return f"/* Variables: {[v.name for v in self.variables]}. Base offset: {self.base_offset.to_c()}. Top offset: {self.top_offset.to_c()} */" diff --git a/Tools/clinic/libclinic/dsl_parser.py b/Tools/clinic/libclinic/dsl_parser.py index cb18374cf07..27c042155eb 100644 --- a/Tools/clinic/libclinic/dsl_parser.py +++ b/Tools/clinic/libclinic/dsl_parser.py @@ -915,8 +915,8 @@ def parse_parameter(self, line: str) -> None: f"invalid parameter declaration (**kwargs?): {line!r}") if function_args.vararg: - if any(p.is_vararg() for p in self.function.parameters.values()): - fail("Too many var args") + self.check_previous_star() + self.check_remaining_star() is_vararg = True parameter = function_args.vararg else: @@ -1124,6 +1124,9 @@ def bad_node(self, node: ast.AST) -> None: key = f"{parameter_name}_as_{c_name}" if c_name else parameter_name self.function.parameters[key] = p + if is_vararg: + self.keyword_only = True + @staticmethod def parse_converter( annotation: ast.expr | None @@ -1165,8 +1168,6 @@ def parse_star(self, function: Function, version: VersionTuple | None) -> None: the marker will take effect (None means it is already in effect). """ if version is None: - if self.keyword_only: - fail(f"Function {function.name!r} uses '*' more than once.") self.check_previous_star() self.check_remaining_star() self.keyword_only = True @@ -1456,6 +1457,7 @@ def add_parameter(text: str) -> None: if p.is_vararg(): p_lines.append("*") + added_star = True name = p.converter.signature_name or p.name p_lines.append(name) @@ -1565,7 +1567,8 @@ def check_remaining_star(self, lineno: int | None = None) -> None: for p in reversed(self.function.parameters.values()): if self.keyword_only: - if p.kind == inspect.Parameter.KEYWORD_ONLY: + if (p.kind == inspect.Parameter.KEYWORD_ONLY or + p.kind == inspect.Parameter.VAR_POSITIONAL): return elif self.deprecated_positional: if p.deprecated_positional == self.deprecated_positional: @@ -1575,12 +1578,11 @@ def check_remaining_star(self, lineno: int | None = None) -> None: fail(f"Function {self.function.name!r} specifies {symbol!r} " f"without following parameters.", line_number=lineno) - def check_previous_star(self, lineno: int | None = None) -> None: + def check_previous_star(self) -> None: assert isinstance(self.function, Function) - for p in self.function.parameters.values(): - if p.kind == inspect.Parameter.VAR_POSITIONAL: - fail(f"Function {self.function.name!r} uses '*' more than once.") + if self.keyword_only: + fail(f"Function {self.function.name!r} uses '*' more than once.") def do_post_block_processing_cleanup(self, lineno: int) -> None: diff --git a/Tools/clinic/libclinic/parse_args.py b/Tools/clinic/libclinic/parse_args.py index 0f67901dd86..96c9b919bff 100644 --- a/Tools/clinic/libclinic/parse_args.py +++ b/Tools/clinic/libclinic/parse_args.py @@ -262,7 +262,7 @@ def __init__(self, func: Function, codegen: CodeGen) -> None: if p.is_keyword_only(): assert not p.is_positional_only() if not p.is_optional(): - self.min_kw_only = i - self.max_pos + self.min_kw_only = i - self.max_pos - int(self.vararg != NO_VARARG) elif p.is_vararg(): self.pseudo_args += 1 self.vararg = i - 1 diff --git a/Tools/jit/README.md b/Tools/jit/README.md index 73d2deebbbc..bc6f793b296 100644 --- a/Tools/jit/README.md +++ b/Tools/jit/README.md @@ -1,7 +1,7 @@ The JIT Compiler ================ -This version of CPython can be built with an experimental just-in-time compiler. While most everything you already know about building and using CPython is unchanged, you will probably need to install a compatible version of LLVM first. +This version of CPython can be built with an experimental just-in-time compiler[^pep-744]. While most everything you already know about building and using CPython is unchanged, you will probably need to install a compatible version of LLVM first. ## Installing LLVM @@ -57,6 +57,10 @@ For `PCbuild`-based builds, pass the new `--experimental-jit` option to `build.b For all other builds, pass the new `--enable-experimental-jit` option to `configure`. -Otherwise, just configure and build as you normally would. Cross-compiling "just works", since the JIT is built for the host platform. +Otherwise, just configure and build as you normally would. Cross-compiling "just works", since the JIT is built for the host platform. + +The JIT can also be enabled or disabled using the `PYTHON_JIT` environment variable, even on builds where it is enabled or disabled by default. More details about configuring CPython with the JIT and optional values for `--enable-experimental-jit` can be found [here](https://docs.python.org/dev/whatsnew/3.13.html#experimental-jit-compiler). + +[^pep-744]: [PEP 744](https://peps.python.org/pep-0744/) [^why-llvm]: Clang is specifically needed because it's the only C compiler with support for guaranteed tail calls (`musttail`), which are required by CPython's continuation-passing-style approach to JIT compilation. Since LLVM also includes other functionalities we need (namely, object file parsing and disassembly), it's convenient to only support one toolchain at this time. diff --git a/Tools/jit/_targets.py b/Tools/jit/_targets.py index 5604c429bcf..73d10a12875 100644 --- a/Tools/jit/_targets.py +++ b/Tools/jit/_targets.py @@ -221,7 +221,12 @@ def build( file.write("\n") for line in _writer.dump(stencil_groups): file.write(f"{line}\n") - jit_stencils_new.replace(jit_stencils) + try: + jit_stencils_new.replace(jit_stencils) + except FileNotFoundError: + # another process probably already moved the file + if not jit_stencils.is_file(): + raise finally: jit_stencils_new.unlink(missing_ok=True) diff --git a/Tools/jit/ignore-tests-emulated-linux.txt b/Tools/jit/ignore-tests-emulated-linux.txt index 9e0f13f4050..dbb364673b5 100644 --- a/Tools/jit/ignore-tests-emulated-linux.txt +++ b/Tools/jit/ignore-tests-emulated-linux.txt @@ -14,6 +14,7 @@ test.test_init.ProcessPoolForkFailingInitializerTest.test_initializer test.test_logging.ConfigDictTest.test_111615 test.test_logging.ConfigDictTest.test_config_queue_handler test.test_logging.ConfigDictTest.test_multiprocessing_queues +test.test_logging.ConfigDictTest.test_config_queue_handler_multiprocessing_context test.test_os.ForkTests.test_fork_warns_when_non_python_thread_exists test.test_os.TimerfdTests.test_timerfd_initval test.test_os.TimerfdTests.test_timerfd_interval diff --git a/Tools/msi/buildrelease.bat b/Tools/msi/buildrelease.bat index 839f6204d9e..77fb4779208 100644 --- a/Tools/msi/buildrelease.bat +++ b/Tools/msi/buildrelease.bat @@ -127,7 +127,7 @@ if "%1" EQU "x86" ( set OUTDIR_PLAT=amd64 set OBJDIR_PLAT=x64 ) else if "%1" EQU "ARM64" ( - set BUILD=%Py_OutDir%amd64\ + set BUILD=%Py_OutDir%arm64\ set PGO=%~2 set BUILD_PLAT=ARM64 set OUTDIR_PLAT=arm64 diff --git a/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp b/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp index 7cddda9b065..094ddba4f1a 100644 --- a/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp +++ b/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp @@ -213,6 +213,7 @@ static struct { LPCWSTR regName; LPCWSTR variableName; } OPTIONAL_FEATURES[] = { { L"Shortcuts", L"Shortcuts" }, // Include_launcher and AssociateFiles are handled separately and so do // not need to be included in this list. + { L"freethreaded", L"Include_freethreaded" }, { nullptr, nullptr } }; diff --git a/Tools/peg_generator/peg_extension/peg_extension.c b/Tools/peg_generator/peg_extension/peg_extension.c index b081240ffff..1587d53d594 100644 --- a/Tools/peg_generator/peg_extension/peg_extension.c +++ b/Tools/peg_generator/peg_extension/peg_extension.c @@ -108,7 +108,7 @@ parse_string(PyObject *self, PyObject *args, PyObject *kwds) static PyObject * clear_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) { -#if defined(PY_DEBUG) +#if defined(Py_DEBUG) _PyPegen_clear_memo_statistics(); #endif Py_RETURN_NONE; @@ -117,7 +117,7 @@ clear_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) static PyObject * get_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) { -#if defined(PY_DEBUG) +#if defined(Py_DEBUG) return _PyPegen_get_memo_statistics(); #else Py_RETURN_NONE; @@ -128,7 +128,7 @@ get_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) static PyObject * dump_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) { -#if defined(PY_DEBUG) +#if defined(Py_DEBUG) PyObject *list = _PyPegen_get_memo_statistics(); if (list == NULL) { return NULL; diff --git a/Tools/requirements-hypothesis.txt b/Tools/requirements-hypothesis.txt index 9d5a18c881b..66898885c0a 100644 --- a/Tools/requirements-hypothesis.txt +++ b/Tools/requirements-hypothesis.txt @@ -1,4 +1,4 @@ # Requirements file for hypothesis that # we use to run our property-based tests in CI. -hypothesis==6.100.2 +hypothesis==6.111.2 diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index baa16102068..eae0e0c5e87 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -43,13 +43,14 @@ log = logging.getLogger("multissl") OPENSSL_OLD_VERSIONS = [ + "1.1.1w", ] OPENSSL_RECENT_VERSIONS = [ - "1.1.1w", - "3.0.13", - "3.1.5", - "3.2.1", + "3.0.15", + "3.1.7", + "3.2.3", + "3.3.2", ] LIBRESSL_OLD_VERSIONS = [ @@ -397,6 +398,7 @@ def run_python_tests(self, tests, network=True): class BuildOpenSSL(AbstractBuilder): library = "OpenSSL" url_templates = ( + "https://github.com/openssl/openssl/releases/download/openssl-{v}/openssl-{v}.tar.gz", "https://www.openssl.org/source/openssl-{v}.tar.gz", "https://www.openssl.org/source/old/{s}/openssl-{v}.tar.gz" ) @@ -439,6 +441,7 @@ def short_version(self): parsed = parsed[:2] return ".".join(str(i) for i in parsed) + class BuildLibreSSL(AbstractBuilder): library = "LibreSSL" url_templates = ( diff --git a/Tools/tsan/suppressions_free_threading.txt b/Tools/tsan/suppressions_free_threading.txt index 2986efe6774..6add088daef 100644 --- a/Tools/tsan/suppressions_free_threading.txt +++ b/Tools/tsan/suppressions_free_threading.txt @@ -23,26 +23,10 @@ race:free_threadstate # These warnings trigger directly in a CPython function. -race_top:_add_to_weak_set -race_top:_in_weak_set -race_top:_PyEval_EvalFrameDefault race_top:assign_version_tag -race_top:insertdict -race_top:lookup_tp_dict race_top:new_reference -race_top:set_contains_key -# https://gist.github.com/colesbury/d13d033f413b4ad07929d044bed86c35 -race_top:set_discard_entry -race_top:_PyDict_CheckConsistency -race_top:_Py_dict_lookup_threadsafe race_top:_multiprocessing_SemLock_acquire_impl -race_top:dictiter_new -race_top:dictresize -race_top:insert_to_emptydict -race_top:insertdict race_top:list_get_item_ref -race_top:make_pending_calls -race_top:set_add_entry race_top:_Py_slot_tp_getattr_hook race_top:add_threadstate race_top:dump_traceback diff --git a/Tools/wasm/config.site-wasm32-wasi b/Tools/wasm/config.site-wasm32-wasi index 4a1a466a4ab..c5d8b3e205d 100644 --- a/Tools/wasm/config.site-wasm32-wasi +++ b/Tools/wasm/config.site-wasm32-wasi @@ -49,3 +49,11 @@ ac_cv_func_preadv=no ac_cv_func_readv=no ac_cv_func_pwritev=no ac_cv_func_writev=no + +# WASI SDK 22 added multiple stubs which we don't implement. +# https://github.com/python/cpython/issues/120371 +ac_cv_func_chmod=no +ac_cv_func_fchmod=no +ac_cv_func_fchmodat=no +ac_cv_func_statvfs=no +ac_cv_func_fstatvfs=no diff --git a/Tools/wasm/wasi.py b/Tools/wasm/wasi.py index efb005e53ab..d4394d7dc1d 100644 --- a/Tools/wasm/wasi.py +++ b/Tools/wasm/wasi.py @@ -26,6 +26,9 @@ LOCAL_SETUP = CHECKOUT / "Modules" / "Setup.local" LOCAL_SETUP_MARKER = "# Generated by Tools/wasm/wasi.py\n".encode("utf-8") +WASMTIME_VAR_NAME = "WASMTIME" +WASMTIME_HOST_RUNNER_VAR = f"{{{WASMTIME_VAR_NAME}}}" + def updated_env(updates={}): """Create a new dict representing the environment to use. @@ -215,11 +218,20 @@ def configure_wasi_python(context, working_dir): # Use PYTHONPATH to include sysconfig data which must be anchored to the # WASI guest's `/` directory. - host_runner = context.host_runner.format(GUEST_DIR="/", - HOST_DIR=CHECKOUT, - ENV_VAR_NAME="PYTHONPATH", - ENV_VAR_VALUE=f"/{sysconfig_data}", - PYTHON_WASM=working_dir / "python.wasm") + args = {"GUEST_DIR": "/", + "HOST_DIR": CHECKOUT, + "ENV_VAR_NAME": "PYTHONPATH", + "ENV_VAR_VALUE": f"/{sysconfig_data}", + "PYTHON_WASM": working_dir / "python.wasm"} + # Check dynamically for wasmtime in case it was specified manually via + # `--host-runner`. + if WASMTIME_HOST_RUNNER_VAR in context.host_runner: + if wasmtime := shutil.which("wasmtime"): + args[WASMTIME_VAR_NAME] = wasmtime + else: + raise FileNotFoundError("wasmtime not found; download from " + "https://github.com/bytecodealliance/wasmtime") + host_runner = context.host_runner.format_map(args) env_additions = {"CONFIG_SITE": config_site, "HOSTRUNNER": host_runner} build_python = os.fsdecode(build_python_path()) # The path to `configure` MUST be relative, else `python.wasm` is unable @@ -277,7 +289,7 @@ def clean_contents(context): def main(): - default_host_runner = (f"{shutil.which('wasmtime')} run " + default_host_runner = (f"{WASMTIME_HOST_RUNNER_VAR} run " # Make sure the stack size will work for a pydebug # build. # The 8388608 value comes from `ulimit -s` under Linux diff --git a/configure b/configure index e90f4759d3f..7cdd386c387 100755 --- a/configure +++ b/configure @@ -840,7 +840,7 @@ LIBPL PY_ENABLE_SHARED PLATLIBDIR BINLIBDEST -MODULE_LDFLAGS +LIBPYTHON MODULE_DEPS_SHARED EXT_SUFFIX ALT_SOABI @@ -930,6 +930,7 @@ DEF_MAKE_RULE DEF_MAKE_ALL_RULE JIT_STENCILS_H REGEN_JIT_COMMAND +ABI_THREAD ABIFLAGS LN MKDIR_P @@ -981,6 +982,7 @@ IPHONEOS_DEPLOYMENT_TARGET EXPORT_MACOSX_DEPLOYMENT_TARGET CONFIGURE_MACOSX_DEPLOYMENT_TARGET _PYTHON_HOST_PLATFORM +APP_STORE_COMPLIANCE_PATCH INSTALLTARGETS FRAMEWORKINSTALLAPPSPREFIX FRAMEWORKUNIXTOOLSPREFIX @@ -1076,6 +1078,7 @@ enable_universalsdk with_universal_archs with_framework_name enable_framework +with_app_store_compliance with_emscripten_target enable_wasm_dynamic_linking enable_wasm_pthreads @@ -1855,6 +1858,10 @@ Optional Packages: specify the name for the python framework on macOS only valid when --enable-framework is set. see Mac/README.rst (default is 'Python') + --with-app-store-compliance=[PATCH-FILE] + Enable any patches required for compiliance with app + stores. Optional PATCH-FILE specifies the custom + patch to apply. --with-emscripten-target=[browser|node] Emscripten platform --with-suffix=SUFFIX set executable suffix to SUFFIX (default is empty, @@ -4101,7 +4108,7 @@ printf "%s\n" "\"$MACHDEP\"" >&6; } # On cross-compile builds, configure will look for a host-specific compiler by # prepending the user-provided host triple to the required binary name. # -# On iOS, this results in binaries like "arm64-apple-ios12.0-simulator-gcc", +# On iOS, this results in binaries like "arm64-apple-ios13.0-simulator-gcc", # which isn't a binary that exists, and isn't very convenient, as it contains the # iOS version. As the default cross-compiler name won't exist, configure falls # back to gcc, which *definitely* won't work. We're providing wrapper scripts for @@ -4137,9 +4144,9 @@ if test -z "$CPP"; then fi if test -z "$CXX"; then case "$host" in - aarch64-apple-ios*-simulator) CXX=arm64-apple-ios-simulator-clang ;; - aarch64-apple-ios*) CXX=arm64-apple-ios-clang ;; - x86_64-apple-ios*-simulator) CXX=x86_64-apple-ios-simulator-clang ;; + aarch64-apple-ios*-simulator) CXX=arm64-apple-ios-simulator-clang++ ;; + aarch64-apple-ios*) CXX=arm64-apple-ios-clang++ ;; + x86_64-apple-ios*-simulator) CXX=x86_64-apple-ios-simulator-clang++ ;; *) esac fi @@ -4430,6 +4437,53 @@ fi printf "%s\n" "#define _PYTHONFRAMEWORK \"${PYTHONFRAMEWORK}\"" >>confdefs.h +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-app-store-compliance" >&5 +printf %s "checking for --with-app-store-compliance... " >&6; } + +# Check whether --with-app_store_compliance was given. +if test ${with_app_store_compliance+y} +then : + withval=$with_app_store_compliance; + case "$withval" in + yes) + case $ac_sys_system in + Darwin|iOS) + # iOS is able to share the macOS patch + APP_STORE_COMPLIANCE_PATCH="Mac/Resources/app-store-compliance.patch" + ;; + *) as_fn_error $? "no default app store compliance patch available for $ac_sys_system" "$LINENO" 5 ;; + esac + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: applying default app store compliance patch" >&5 +printf "%s\n" "applying default app store compliance patch" >&6; } + ;; + *) + APP_STORE_COMPLIANCE_PATCH="${withval}" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: applying custom app store compliance patch" >&5 +printf "%s\n" "applying custom app store compliance patch" >&6; } + ;; + esac + +else $as_nop + + case $ac_sys_system in + iOS) + # Always apply the compliance patch on iOS; we can use the macOS patch + APP_STORE_COMPLIANCE_PATCH="Mac/Resources/app-store-compliance.patch" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: applying default app store compliance patch" >&5 +printf "%s\n" "applying default app store compliance patch" >&6; } + ;; + *) + # No default app compliance patching on any other platform + APP_STORE_COMPLIANCE_PATCH= + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: not patching for app store compliance" >&5 +printf "%s\n" "not patching for app store compliance" >&6; } + ;; + esac + +fi + + + if test "$cross_compiling" = yes; then case "$host" in @@ -4451,8 +4505,12 @@ if test "$cross_compiling" = yes; then _host_device=${_host_device:=os} # IPHONEOS_DEPLOYMENT_TARGET is the minimum supported iOS version - IPHONEOS_DEPLOYMENT_TARGET=${_host_os:3} - IPHONEOS_DEPLOYMENT_TARGET=${IPHONEOS_DEPLOYMENT_TARGET:=12.0} + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking iOS deployment target" >&5 +printf %s "checking iOS deployment target... " >&6; } + IPHONEOS_DEPLOYMENT_TARGET=$(echo ${_host_os} | cut -c4-) + IPHONEOS_DEPLOYMENT_TARGET=${IPHONEOS_DEPLOYMENT_TARGET:=13.0} + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $IPHONEOS_DEPLOYMENT_TARGET" >&5 +printf "%s\n" "$IPHONEOS_DEPLOYMENT_TARGET" >&6; } case "$host_cpu" in aarch64) @@ -8089,7 +8147,9 @@ fi # For calculating the .so ABI tag. + ABIFLAGS="" +ABI_THREAD="" # Check for --disable-gil # --disable-gil @@ -8119,6 +8179,7 @@ printf "%s\n" "#define Py_GIL_DISABLED 1" >>confdefs.h # Add "t" for "threaded" ABIFLAGS="${ABIFLAGS}t" + ABI_THREAD="t" fi # Check for --with-pydebug @@ -8171,6 +8232,10 @@ printf "%s\n" "#define Py_TRACE_REFS 1" >>confdefs.h fi +if test "$disable_gil" = "yes" -a "$with_trace_refs" = "yes"; +then + as_fn_error $? "--disable-gil cannot be used with --with-trace-refs" "$LINENO" 5 +fi # Check for --enable-pystats { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --enable-pystats" >&5 @@ -13133,8 +13198,6 @@ case $PLATFORM_TRIPLET in #( perf_trampoline=yes ;; #( aarch64-linux-gnu) : perf_trampoline=yes ;; #( - riscv64-linux-gnu) : - perf_trampoline=yes ;; #( *) : perf_trampoline=no ;; @@ -24519,16 +24582,19 @@ LDVERSION='$(VERSION)$(ABIFLAGS)' { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $LDVERSION" >&5 printf "%s\n" "$LDVERSION" >&6; } -# Configure the flags and dependencies used when compiling shared modules +# Configure the flags and dependencies used when compiling shared modules. +# Do not rename LIBPYTHON - it's accessed via sysconfig by package build +# systems (e.g. Meson) to decide whether to link extension modules against +# libpython. MODULE_DEPS_SHARED='$(MODULE_DEPS_STATIC) $(EXPORTSYMS)' -MODULE_LDFLAGS='' +LIBPYTHON='' # On Android and Cygwin the shared libraries must be linked with libpython. if test "$PY_ENABLE_SHARED" = "1" && ( test -n "$ANDROID_API_LEVEL" || test "$MACHDEP" = "cygwin"); then MODULE_DEPS_SHARED="$MODULE_DEPS_SHARED \$(LDLIBRARY)" - MODULE_LDFLAGS="\$(BLDLIBRARY)" + LIBPYTHON="\$(BLDLIBRARY)" fi # On iOS the shared libraries must be linked with the Python framework @@ -24538,11 +24604,11 @@ fi -BINLIBDEST='$(LIBDIR)/python$(VERSION)' +BINLIBDEST='$(LIBDIR)/python$(VERSION)$(ABI_THREAD)' # Check for --with-platlibdir -# /usr/$LIDIRNAME/python$VERSION +# /usr/$PLATLIBDIR/python$(VERSION)$(ABI_THREAD) PLATLIBDIR="lib" { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-platlibdir" >&5 @@ -24561,7 +24627,7 @@ then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } PLATLIBDIR="$withval" - BINLIBDEST='${exec_prefix}/${PLATLIBDIR}/python$(VERSION)' + BINLIBDEST='${exec_prefix}/${PLATLIBDIR}/python$(VERSION)$(ABI_THREAD)' else { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } @@ -24575,9 +24641,9 @@ fi if test x$PLATFORM_TRIPLET = x; then - LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}/config-${LDVERSION}" + LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}${ABI_THREAD}/config-${LDVERSION}" else - LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}" + LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}${ABI_THREAD}/config-${LDVERSION}-${PLATFORM_TRIPLET}" fi @@ -25870,24 +25936,10 @@ have_curses=no have_panel=no -ac_fn_c_check_header_compile "$LINENO" "curses.h" "ac_cv_header_curses_h" "$ac_includes_default" -if test "x$ac_cv_header_curses_h" = xyes -then : - printf "%s\n" "#define HAVE_CURSES_H 1" >>confdefs.h - -fi -ac_fn_c_check_header_compile "$LINENO" "ncurses.h" "ac_cv_header_ncurses_h" "$ac_includes_default" -if test "x$ac_cv_header_ncurses_h" = xyes -then : - printf "%s\n" "#define HAVE_NCURSES_H 1" >>confdefs.h - -fi +# Check for ncursesw/panelw first. If that fails, try ncurses/panel. -if test "x$ac_cv_header_ncurses_h" = xyes -then : - if test "$ac_sys_system" != "Darwin"; then pkg_failed=no { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for ncursesw" >&5 @@ -25947,148 +25999,102 @@ fi # Put the nasty error message in config.log where it belongs echo "$CURSES_PKG_ERRORS" >&5 - - save_CFLAGS=$CFLAGS -save_CPPFLAGS=$CPPFLAGS -save_LDFLAGS=$LDFLAGS -save_LIBS=$LIBS - - - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for initscr in -lncursesw" >&5 -printf %s "checking for initscr in -lncursesw... " >&6; } -if test ${ac_cv_lib_ncursesw_initscr+y} -then : - printf %s "(cached) " >&6 -else $as_nop - ac_check_lib_save_LIBS=$LIBS -LIBS="-lncursesw $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -char initscr (); -int -main (void) -{ -return initscr (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO" -then : - ac_cv_lib_ncursesw_initscr=yes -else $as_nop - ac_cv_lib_ncursesw_initscr=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.beam \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ncursesw_initscr" >&5 -printf "%s\n" "$ac_cv_lib_ncursesw_initscr" >&6; } -if test "x$ac_cv_lib_ncursesw_initscr" = xyes -then : - - printf "%s\n" "#define HAVE_NCURSESW 1" >>confdefs.h - - have_curses=ncursesw - CURSES_CFLAGS=${CURSES_CFLAGS-""} - CURSES_LIBS=${CURSES_LIBS-"-lncursesw"} - -fi - - -CFLAGS=$save_CFLAGS -CPPFLAGS=$save_CPPFLAGS -LDFLAGS=$save_LDFLAGS -LIBS=$save_LIBS - - - + have_curses=no elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } + have_curses=no +else + CURSES_CFLAGS=$pkg_cv_CURSES_CFLAGS + CURSES_LIBS=$pkg_cv_CURSES_LIBS + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } - save_CFLAGS=$CFLAGS -save_CPPFLAGS=$CPPFLAGS -save_LDFLAGS=$LDFLAGS -save_LIBS=$LIBS +printf "%s\n" "#define HAVE_NCURSESW 1" >>confdefs.h + have_curses=yes - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for initscr in -lncursesw" >&5 -printf %s "checking for initscr in -lncursesw... " >&6; } -if test ${ac_cv_lib_ncursesw_initscr+y} -then : - printf %s "(cached) " >&6 -else $as_nop - ac_check_lib_save_LIBS=$LIBS -LIBS="-lncursesw $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ +pkg_failed=no +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for panelw" >&5 +printf %s "checking for panelw... " >&6; } -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -char initscr (); -int -main (void) -{ -return initscr (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO" -then : - ac_cv_lib_ncursesw_initscr=yes -else $as_nop - ac_cv_lib_ncursesw_initscr=no +if test -n "$PANEL_CFLAGS"; then + pkg_cv_PANEL_CFLAGS="$PANEL_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"panelw\""; } >&5 + ($PKG_CONFIG --exists --print-errors "panelw") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_PANEL_CFLAGS=`$PKG_CONFIG --cflags "panelw" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes fi -rm -f core conftest.err conftest.$ac_objext conftest.beam \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS + else + pkg_failed=untried fi -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ncursesw_initscr" >&5 -printf "%s\n" "$ac_cv_lib_ncursesw_initscr" >&6; } -if test "x$ac_cv_lib_ncursesw_initscr" = xyes -then : - - printf "%s\n" "#define HAVE_NCURSESW 1" >>confdefs.h - - have_curses=ncursesw - CURSES_CFLAGS=${CURSES_CFLAGS-""} - CURSES_LIBS=${CURSES_LIBS-"-lncursesw"} - +if test -n "$PANEL_LIBS"; then + pkg_cv_PANEL_LIBS="$PANEL_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"panelw\""; } >&5 + ($PKG_CONFIG --exists --print-errors "panelw") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_PANEL_LIBS=`$PKG_CONFIG --libs "panelw" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried fi -CFLAGS=$save_CFLAGS -CPPFLAGS=$save_CPPFLAGS -LDFLAGS=$save_LDFLAGS -LIBS=$save_LIBS +if test $pkg_failed = yes; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + PANEL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "panelw" 2>&1` + else + PANEL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "panelw" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$PANEL_PKG_ERRORS" >&5 + have_panel=no +elif test $pkg_failed = untried; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + have_panel=no else - CURSES_CFLAGS=$pkg_cv_CURSES_CFLAGS - CURSES_LIBS=$pkg_cv_CURSES_LIBS + PANEL_CFLAGS=$pkg_cv_PANEL_CFLAGS + PANEL_LIBS=$pkg_cv_PANEL_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } - printf "%s\n" "#define HAVE_NCURSESW 1" >>confdefs.h - - have_curses=ncursesw +printf "%s\n" "#define HAVE_PANELW 1" >>confdefs.h + have_panel=yes +fi fi - fi - if test "x$have_curses" = xno + +if test "x$have_curses" = xno then : + pkg_failed=no { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for ncurses" >&5 printf %s "checking for ncurses... " >&6; } @@ -26147,197 +26153,35 @@ fi # Put the nasty error message in config.log where it belongs echo "$CURSES_PKG_ERRORS" >&5 - - save_CFLAGS=$CFLAGS -save_CPPFLAGS=$CPPFLAGS -save_LDFLAGS=$LDFLAGS -save_LIBS=$LIBS - - - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for initscr in -lncurses" >&5 -printf %s "checking for initscr in -lncurses... " >&6; } -if test ${ac_cv_lib_ncurses_initscr+y} -then : - printf %s "(cached) " >&6 -else $as_nop - ac_check_lib_save_LIBS=$LIBS -LIBS="-lncurses $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -char initscr (); -int -main (void) -{ -return initscr (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO" -then : - ac_cv_lib_ncurses_initscr=yes -else $as_nop - ac_cv_lib_ncurses_initscr=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.beam \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ncurses_initscr" >&5 -printf "%s\n" "$ac_cv_lib_ncurses_initscr" >&6; } -if test "x$ac_cv_lib_ncurses_initscr" = xyes -then : - - have_curses=ncurses - CURSES_CFLAGS=${CURSES_CFLAGS-""} - CURSES_LIBS=${CURSES_LIBS-"-lncurses"} - -fi - - -CFLAGS=$save_CFLAGS -CPPFLAGS=$save_CPPFLAGS -LDFLAGS=$save_LDFLAGS -LIBS=$save_LIBS - - - + have_curses=no elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } + have_curses=no +else + CURSES_CFLAGS=$pkg_cv_CURSES_CFLAGS + CURSES_LIBS=$pkg_cv_CURSES_LIBS + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } - save_CFLAGS=$CFLAGS -save_CPPFLAGS=$CPPFLAGS -save_LDFLAGS=$LDFLAGS -save_LIBS=$LIBS - - - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for initscr in -lncurses" >&5 -printf %s "checking for initscr in -lncurses... " >&6; } -if test ${ac_cv_lib_ncurses_initscr+y} -then : - printf %s "(cached) " >&6 -else $as_nop - ac_check_lib_save_LIBS=$LIBS -LIBS="-lncurses $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -char initscr (); -int -main (void) -{ -return initscr (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO" -then : - ac_cv_lib_ncurses_initscr=yes -else $as_nop - ac_cv_lib_ncurses_initscr=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.beam \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ncurses_initscr" >&5 -printf "%s\n" "$ac_cv_lib_ncurses_initscr" >&6; } -if test "x$ac_cv_lib_ncurses_initscr" = xyes -then : - - have_curses=ncurses - CURSES_CFLAGS=${CURSES_CFLAGS-""} - CURSES_LIBS=${CURSES_LIBS-"-lncurses"} - -fi - - -CFLAGS=$save_CFLAGS -CPPFLAGS=$save_CPPFLAGS -LDFLAGS=$save_LDFLAGS -LIBS=$save_LIBS - - - -else - CURSES_CFLAGS=$pkg_cv_CURSES_CFLAGS - CURSES_LIBS=$pkg_cv_CURSES_LIBS - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -printf "%s\n" "yes" >&6; } - - have_curses=ncurses - -fi - -fi - - -fi -CURSES_CFLAGS=$(echo $CURSES_CFLAGS | sed 's/-D_XOPEN_SOURCE=600//g') - -if test "$have_curses" != no -a "$ac_sys_system" = "Darwin"; then - - as_fn_append CURSES_CFLAGS " -D_XOPEN_SOURCE_EXTENDED=1" - printf "%s\n" "#define HAVE_NCURSESW 1" >>confdefs.h - -fi - - -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking curses module flags" >&5 -printf %s "checking curses module flags... " >&6; } -if test "x$have_curses" = xno -then : - - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 -printf "%s\n" "no" >&6; } - -else $as_nop - - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $have_curses (CFLAGS: $CURSES_CFLAGS, LIBS: $CURSES_LIBS)" >&5 -printf "%s\n" "$have_curses (CFLAGS: $CURSES_CFLAGS, LIBS: $CURSES_LIBS)" >&6; } - -fi - -ac_fn_c_check_header_compile "$LINENO" "panel.h" "ac_cv_header_panel_h" "$ac_includes_default" -if test "x$ac_cv_header_panel_h" = xyes -then : - printf "%s\n" "#define HAVE_PANEL_H 1" >>confdefs.h - -fi - - -if test "x$ac_cv_header_panel_h" = xyes -then : - - - if test "$ac_sys_system" != "Darwin"; then - if test "x$have_curses" = xncursesw -then : +printf "%s\n" "#define HAVE_NCURSES 1" >>confdefs.h + have_curses=yes pkg_failed=no -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for panelw" >&5 -printf %s "checking for panelw... " >&6; } +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for panel" >&5 +printf %s "checking for panel... " >&6; } if test -n "$PANEL_CFLAGS"; then pkg_cv_PANEL_CFLAGS="$PANEL_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ - { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"panelw\""; } >&5 - ($PKG_CONFIG --exists --print-errors "panelw") 2>&5 + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"panel\""; } >&5 + ($PKG_CONFIG --exists --print-errors "panel") 2>&5 ac_status=$? printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then - pkg_cv_PANEL_CFLAGS=`$PKG_CONFIG --cflags "panelw" 2>/dev/null` + pkg_cv_PANEL_CFLAGS=`$PKG_CONFIG --cflags "panel" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes @@ -26349,12 +26193,12 @@ if test -n "$PANEL_LIBS"; then pkg_cv_PANEL_LIBS="$PANEL_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ - { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"panelw\""; } >&5 - ($PKG_CONFIG --exists --print-errors "panelw") 2>&5 + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"panel\""; } >&5 + ($PKG_CONFIG --exists --print-errors "panel") 2>&5 ac_status=$? printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then - pkg_cv_PANEL_LIBS=`$PKG_CONFIG --libs "panelw" 2>/dev/null` + pkg_cv_PANEL_LIBS=`$PKG_CONFIG --libs "panel" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes @@ -26375,286 +26219,172 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - PANEL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "panelw" 2>&1` + PANEL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "panel" 2>&1` else - PANEL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "panelw" 2>&1` + PANEL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "panel" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PANEL_PKG_ERRORS" >&5 + have_panel=no +elif test $pkg_failed = untried; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + have_panel=no +else + PANEL_CFLAGS=$pkg_cv_PANEL_CFLAGS + PANEL_LIBS=$pkg_cv_PANEL_LIBS + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } - save_CFLAGS=$CFLAGS -save_CPPFLAGS=$CPPFLAGS -save_LDFLAGS=$LDFLAGS -save_LIBS=$LIBS - - - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for update_panels in -lpanelw" >&5 -printf %s "checking for update_panels in -lpanelw... " >&6; } -if test ${ac_cv_lib_panelw_update_panels+y} -then : - printf %s "(cached) " >&6 -else $as_nop - ac_check_lib_save_LIBS=$LIBS -LIBS="-lpanelw $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ +printf "%s\n" "#define HAVE_PANEL 1" >>confdefs.h -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -char update_panels (); -int -main (void) -{ -return update_panels (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO" -then : - ac_cv_lib_panelw_update_panels=yes -else $as_nop - ac_cv_lib_panelw_update_panels=no + have_panel=yes fi -rm -f core conftest.err conftest.$ac_objext conftest.beam \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS fi -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_panelw_update_panels" >&5 -printf "%s\n" "$ac_cv_lib_panelw_update_panels" >&6; } -if test "x$ac_cv_lib_panelw_update_panels" = xyes -then : - have_panel=panelw - PANEL_CFLAGS=${PANEL_CFLAGS-""} - PANEL_LIBS=${PANEL_LIBS-"-lpanelw"} fi - -CFLAGS=$save_CFLAGS -CPPFLAGS=$save_CPPFLAGS -LDFLAGS=$save_LDFLAGS -LIBS=$save_LIBS - - - -elif test $pkg_failed = untried; then - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 -printf "%s\n" "no" >&6; } - - save_CFLAGS=$CFLAGS +save_CFLAGS=$CFLAGS save_CPPFLAGS=$CPPFLAGS save_LDFLAGS=$LDFLAGS save_LIBS=$LIBS - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for update_panels in -lpanelw" >&5 -printf %s "checking for update_panels in -lpanelw... " >&6; } -if test ${ac_cv_lib_panelw_update_panels+y} + # Make sure we've got the header defines. + as_fn_append CPPFLAGS " $CURSES_CFLAGS $PANEL_CFLAGS" + ac_fn_c_check_header_compile "$LINENO" "ncursesw/curses.h" "ac_cv_header_ncursesw_curses_h" "$ac_includes_default" +if test "x$ac_cv_header_ncursesw_curses_h" = xyes then : - printf %s "(cached) " >&6 -else $as_nop - ac_check_lib_save_LIBS=$LIBS -LIBS="-lpanelw $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ + printf "%s\n" "#define HAVE_NCURSESW_CURSES_H 1" >>confdefs.h -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -char update_panels (); -int -main (void) -{ -return update_panels (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO" -then : - ac_cv_lib_panelw_update_panels=yes -else $as_nop - ac_cv_lib_panelw_update_panels=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.beam \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS fi -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_panelw_update_panels" >&5 -printf "%s\n" "$ac_cv_lib_panelw_update_panels" >&6; } -if test "x$ac_cv_lib_panelw_update_panels" = xyes +ac_fn_c_check_header_compile "$LINENO" "ncursesw/ncurses.h" "ac_cv_header_ncursesw_ncurses_h" "$ac_includes_default" +if test "x$ac_cv_header_ncursesw_ncurses_h" = xyes then : - - have_panel=panelw - PANEL_CFLAGS=${PANEL_CFLAGS-""} - PANEL_LIBS=${PANEL_LIBS-"-lpanelw"} + printf "%s\n" "#define HAVE_NCURSESW_NCURSES_H 1" >>confdefs.h fi - - -CFLAGS=$save_CFLAGS -CPPFLAGS=$save_CPPFLAGS -LDFLAGS=$save_LDFLAGS -LIBS=$save_LIBS - - - -else - PANEL_CFLAGS=$pkg_cv_PANEL_CFLAGS - PANEL_LIBS=$pkg_cv_PANEL_LIBS - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -printf "%s\n" "yes" >&6; } - - have_panel=panelw +ac_fn_c_check_header_compile "$LINENO" "ncursesw/panel.h" "ac_cv_header_ncursesw_panel_h" "$ac_includes_default" +if test "x$ac_cv_header_ncursesw_panel_h" = xyes +then : + printf "%s\n" "#define HAVE_NCURSESW_PANEL_H 1" >>confdefs.h fi +ac_fn_c_check_header_compile "$LINENO" "ncurses/curses.h" "ac_cv_header_ncurses_curses_h" "$ac_includes_default" +if test "x$ac_cv_header_ncurses_curses_h" = xyes +then : + printf "%s\n" "#define HAVE_NCURSES_CURSES_H 1" >>confdefs.h fi - fi - - if test "x$have_curses" = xncurses +ac_fn_c_check_header_compile "$LINENO" "ncurses/ncurses.h" "ac_cv_header_ncurses_ncurses_h" "$ac_includes_default" +if test "x$ac_cv_header_ncurses_ncurses_h" = xyes then : + printf "%s\n" "#define HAVE_NCURSES_NCURSES_H 1" >>confdefs.h - -pkg_failed=no -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for panel" >&5 -printf %s "checking for panel... " >&6; } - -if test -n "$PANEL_CFLAGS"; then - pkg_cv_PANEL_CFLAGS="$PANEL_CFLAGS" - elif test -n "$PKG_CONFIG"; then - if test -n "$PKG_CONFIG" && \ - { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"panel\""; } >&5 - ($PKG_CONFIG --exists --print-errors "panel") 2>&5 - ac_status=$? - printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - pkg_cv_PANEL_CFLAGS=`$PKG_CONFIG --cflags "panel" 2>/dev/null` - test "x$?" != "x0" && pkg_failed=yes -else - pkg_failed=yes -fi - else - pkg_failed=untried fi -if test -n "$PANEL_LIBS"; then - pkg_cv_PANEL_LIBS="$PANEL_LIBS" - elif test -n "$PKG_CONFIG"; then - if test -n "$PKG_CONFIG" && \ - { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"panel\""; } >&5 - ($PKG_CONFIG --exists --print-errors "panel") 2>&5 - ac_status=$? - printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - pkg_cv_PANEL_LIBS=`$PKG_CONFIG --libs "panel" 2>/dev/null` - test "x$?" != "x0" && pkg_failed=yes -else - pkg_failed=yes -fi - else - pkg_failed=untried -fi - - +ac_fn_c_check_header_compile "$LINENO" "ncurses/panel.h" "ac_cv_header_ncurses_panel_h" "$ac_includes_default" +if test "x$ac_cv_header_ncurses_panel_h" = xyes +then : + printf "%s\n" "#define HAVE_NCURSES_PANEL_H 1" >>confdefs.h -if test $pkg_failed = yes; then - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 -printf "%s\n" "no" >&6; } +fi +ac_fn_c_check_header_compile "$LINENO" "curses.h" "ac_cv_header_curses_h" "$ac_includes_default" +if test "x$ac_cv_header_curses_h" = xyes +then : + printf "%s\n" "#define HAVE_CURSES_H 1" >>confdefs.h -if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then - _pkg_short_errors_supported=yes -else - _pkg_short_errors_supported=no fi - if test $_pkg_short_errors_supported = yes; then - PANEL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "panel" 2>&1` - else - PANEL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "panel" 2>&1` - fi - # Put the nasty error message in config.log where it belongs - echo "$PANEL_PKG_ERRORS" >&5 +ac_fn_c_check_header_compile "$LINENO" "ncurses.h" "ac_cv_header_ncurses_h" "$ac_includes_default" +if test "x$ac_cv_header_ncurses_h" = xyes +then : + printf "%s\n" "#define HAVE_NCURSES_H 1" >>confdefs.h +fi +ac_fn_c_check_header_compile "$LINENO" "panel.h" "ac_cv_header_panel_h" "$ac_includes_default" +if test "x$ac_cv_header_panel_h" = xyes +then : + printf "%s\n" "#define HAVE_PANEL_H 1" >>confdefs.h - save_CFLAGS=$CFLAGS -save_CPPFLAGS=$CPPFLAGS -save_LDFLAGS=$LDFLAGS -save_LIBS=$LIBS +fi - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for update_panels in -lpanel" >&5 -printf %s "checking for update_panels in -lpanel... " >&6; } -if test ${ac_cv_lib_panel_update_panels+y} + # Check that we're able to link with crucial curses/panel functions. This + # also serves as a fallback in case pkg-config failed. + as_fn_append LIBS " $CURSES_LIBS $PANEL_LIBS" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for library containing initscr" >&5 +printf %s "checking for library containing initscr... " >&6; } +if test ${ac_cv_search_initscr+y} then : printf %s "(cached) " >&6 else $as_nop - ac_check_lib_save_LIBS=$LIBS -LIBS="-lpanel $LIBS" + ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ -char update_panels (); +char initscr (); int main (void) { -return update_panels (); +return initscr (); ; return 0; } _ACEOF -if ac_fn_c_try_link "$LINENO" +for ac_lib in '' ncursesw ncurses +do + if test -z "$ac_lib"; then + ac_res="none required" + else + ac_res=-l$ac_lib + LIBS="-l$ac_lib $ac_func_search_save_LIBS" + fi + if ac_fn_c_try_link "$LINENO" then : - ac_cv_lib_panel_update_panels=yes -else $as_nop - ac_cv_lib_panel_update_panels=no + ac_cv_search_initscr=$ac_res fi rm -f core conftest.err conftest.$ac_objext conftest.beam \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS + conftest$ac_exeext + if test ${ac_cv_search_initscr+y} +then : + break fi -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_panel_update_panels" >&5 -printf "%s\n" "$ac_cv_lib_panel_update_panels" >&6; } -if test "x$ac_cv_lib_panel_update_panels" = xyes +done +if test ${ac_cv_search_initscr+y} then : - have_panel=panel - PANEL_CFLAGS=${PANEL_CFLAGS-""} - PANEL_LIBS=${PANEL_LIBS-"-lpanel"} - +else $as_nop + ac_cv_search_initscr=no +fi +rm conftest.$ac_ext +LIBS=$ac_func_search_save_LIBS +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_initscr" >&5 +printf "%s\n" "$ac_cv_search_initscr" >&6; } +ac_res=$ac_cv_search_initscr +if test "$ac_res" != no +then : + test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" + if test "x$have_curses" = xno +then : + have_curses=yes + CURSES_LIBS=${CURSES_LIBS-"$ac_cv_search_initscr"} +fi +else $as_nop + have_curses=no fi - -CFLAGS=$save_CFLAGS -CPPFLAGS=$save_CPPFLAGS -LDFLAGS=$save_LDFLAGS -LIBS=$save_LIBS - - - -elif test $pkg_failed = untried; then - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 -printf "%s\n" "no" >&6; } - - save_CFLAGS=$CFLAGS -save_CPPFLAGS=$CPPFLAGS -save_LDFLAGS=$LDFLAGS -save_LIBS=$LIBS - - - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for update_panels in -lpanel" >&5 -printf %s "checking for update_panels in -lpanel... " >&6; } -if test ${ac_cv_lib_panel_update_panels+y} + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for library containing update_panels" >&5 +printf %s "checking for library containing update_panels... " >&6; } +if test ${ac_cv_search_update_panels+y} then : printf %s "(cached) " >&6 else $as_nop - ac_check_lib_save_LIBS=$LIBS -LIBS="-lpanel $LIBS" + ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ @@ -26670,76 +26400,82 @@ return update_panels (); return 0; } _ACEOF -if ac_fn_c_try_link "$LINENO" +for ac_lib in '' panelw panel +do + if test -z "$ac_lib"; then + ac_res="none required" + else + ac_res=-l$ac_lib + LIBS="-l$ac_lib $ac_func_search_save_LIBS" + fi + if ac_fn_c_try_link "$LINENO" then : - ac_cv_lib_panel_update_panels=yes -else $as_nop - ac_cv_lib_panel_update_panels=no + ac_cv_search_update_panels=$ac_res fi rm -f core conftest.err conftest.$ac_objext conftest.beam \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS + conftest$ac_exeext + if test ${ac_cv_search_update_panels+y} +then : + break fi -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_panel_update_panels" >&5 -printf "%s\n" "$ac_cv_lib_panel_update_panels" >&6; } -if test "x$ac_cv_lib_panel_update_panels" = xyes +done +if test ${ac_cv_search_update_panels+y} then : - have_panel=panel - PANEL_CFLAGS=${PANEL_CFLAGS-""} - PANEL_LIBS=${PANEL_LIBS-"-lpanel"} - +else $as_nop + ac_cv_search_update_panels=no fi - - -CFLAGS=$save_CFLAGS -CPPFLAGS=$save_CPPFLAGS -LDFLAGS=$save_LDFLAGS -LIBS=$save_LIBS - - - -else - PANEL_CFLAGS=$pkg_cv_PANEL_CFLAGS - PANEL_LIBS=$pkg_cv_PANEL_LIBS - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -printf "%s\n" "yes" >&6; } - - have_panel=panel - +rm conftest.$ac_ext +LIBS=$ac_func_search_save_LIBS fi - +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_update_panels" >&5 +printf "%s\n" "$ac_cv_search_update_panels" >&6; } +ac_res=$ac_cv_search_update_panels +if test "$ac_res" != no +then : + test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" + if test "x$have_panel" = xno +then : + have_panel=yes + PANEL_LIBS=${PANEL_LIBS-"$ac_cv_search_update_panels"} +fi +else $as_nop + have_panel=no fi -fi -PANEL_CFLAGS=$(echo $PANEL_CFLAGS | sed 's/-D_XOPEN_SOURCE=600//g') -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking panel flags" >&5 -printf %s "checking panel flags... " >&6; } -if test "x$have_panel" = xno + +if test "have_curses" != "no" then : - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 -printf "%s\n" "no" >&6; } +CURSES_CFLAGS=$(echo $CURSES_CFLAGS | sed 's/-D_XOPEN_SOURCE=600//g') -else $as_nop +if test "x$ac_sys_system" = xDarwin +then : - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $have_panel (CFLAGS: $PANEL_CFLAGS, LIBS: $PANEL_LIBS)" >&5 -printf "%s\n" "$have_panel (CFLAGS: $PANEL_CFLAGS, LIBS: $PANEL_LIBS)" >&6; } -fi + as_fn_append CURSES_CFLAGS " -D_XOPEN_SOURCE_EXTENDED=1" -# first curses header check -ac_save_cppflags="$CPPFLAGS" -if test "$cross_compiling" = no; then - CPPFLAGS="$CPPFLAGS -I/usr/include/ncursesw" fi +PANEL_CFLAGS=$(echo $PANEL_CFLAGS | sed 's/-D_XOPEN_SOURCE=600//g') + # On Solaris, term.h requires curses.h ac_fn_c_check_header_compile "$LINENO" "term.h" "ac_cv_header_term_h" " -#ifdef HAVE_CURSES_H -#include +#define NCURSES_OPAQUE 0 +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include #endif " @@ -26759,7 +26495,22 @@ then : else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ -#include + +#define NCURSES_OPAQUE 0 +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include +#endif + int main (void) { @@ -26789,10 +26540,6 @@ printf "%s\n" "#define MVWDELCH_IS_EXPRESSION 1" >>confdefs.h fi -# Issue #25720: ncurses has introduced the NCURSES_OPAQUE symbol making opaque -# structs since version 5.7. If the macro is defined as zero before including -# [n]curses.h, ncurses will expose fields of the structs regardless of the -# configuration. { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether WINDOW has _flags" >&5 printf %s "checking whether WINDOW has _flags... " >&6; } if test ${ac_cv_window_has_flags+y} @@ -26802,8 +26549,20 @@ else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ - #define NCURSES_OPAQUE 0 - #include +#define NCURSES_OPAQUE 0 +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include +#endif int main (void) @@ -26848,8 +26607,20 @@ else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ - #define NCURSES_OPAQUE 0 - #include +#define NCURSES_OPAQUE 0 +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include +#endif int main (void) @@ -26894,8 +26665,20 @@ else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ - #define NCURSES_OPAQUE 0 - #include +#define NCURSES_OPAQUE 0 +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include +#endif int main (void) @@ -26940,8 +26723,20 @@ else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ - #define NCURSES_OPAQUE 0 - #include +#define NCURSES_OPAQUE 0 +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include +#endif int main (void) @@ -26986,8 +26781,20 @@ else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ - #define NCURSES_OPAQUE 0 - #include +#define NCURSES_OPAQUE 0 +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include +#endif int main (void) @@ -27032,8 +26839,20 @@ else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ - #define NCURSES_OPAQUE 0 - #include +#define NCURSES_OPAQUE 0 +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include +#endif int main (void) @@ -27078,8 +26897,20 @@ else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ - #define NCURSES_OPAQUE 0 - #include +#define NCURSES_OPAQUE 0 +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include +#endif int main (void) @@ -27124,8 +26955,20 @@ else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ - #define NCURSES_OPAQUE 0 - #include +#define NCURSES_OPAQUE 0 +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include +#endif int main (void) @@ -27170,8 +27013,20 @@ else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ - #define NCURSES_OPAQUE 0 - #include +#define NCURSES_OPAQUE 0 +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include +#endif int main (void) @@ -27216,8 +27071,20 @@ else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ - #define NCURSES_OPAQUE 0 - #include +#define NCURSES_OPAQUE 0 +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include +#endif int main (void) @@ -27262,8 +27129,20 @@ else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ - #define NCURSES_OPAQUE 0 - #include +#define NCURSES_OPAQUE 0 +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include +#endif int main (void) @@ -27308,8 +27187,20 @@ else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ - #define NCURSES_OPAQUE 0 - #include +#define NCURSES_OPAQUE 0 +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include +#endif int main (void) @@ -27345,6 +27236,13 @@ fi CPPFLAGS=$ac_save_cppflags +fi +CFLAGS=$save_CFLAGS +CPPFLAGS=$save_CPPFLAGS +LDFLAGS=$save_LDFLAGS +LIBS=$save_LIBS + + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for device files" >&5 printf "%s\n" "$as_me: checking for device files" >&6;} @@ -28536,9 +28434,6 @@ fi # builtin hash modules default_hashlib_hashes="md5,sha1,sha2,sha3,blake2" - -printf "%s\n" "#define PY_BUILTIN_HASHLIB_HASHES /**/" >>confdefs.h - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-builtin-hashlib-hashes" >&5 printf %s "checking for --with-builtin-hashlib-hashes... " >&6; } @@ -28563,6 +28458,7 @@ fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $with_builtin_hashlib_hashes" >&5 printf "%s\n" "$with_builtin_hashlib_hashes" >&6; } + printf "%s\n" "#define PY_BUILTIN_HASHLIB_HASHES \"$with_builtin_hashlib_hashes\"" >>confdefs.h @@ -30414,7 +30310,7 @@ then : if true then : - if test "$have_curses" != "no" + if test "$have_curses" = "yes" then : py_cv_module__curses=yes else $as_nop @@ -30453,7 +30349,7 @@ then : if true then : - if test "$have_panel" != "no" + if test "$have_curses" = "yes" && test "$have_panel" = "yes" then : py_cv_module__curses_panel=yes else $as_nop diff --git a/configure.ac b/configure.ac index 93dd489d14b..24e28a1e2de 100644 --- a/configure.ac +++ b/configure.ac @@ -382,7 +382,7 @@ AC_MSG_RESULT(["$MACHDEP"]) # On cross-compile builds, configure will look for a host-specific compiler by # prepending the user-provided host triple to the required binary name. # -# On iOS, this results in binaries like "arm64-apple-ios12.0-simulator-gcc", +# On iOS, this results in binaries like "arm64-apple-ios13.0-simulator-gcc", # which isn't a binary that exists, and isn't very convenient, as it contains the # iOS version. As the default cross-compiler name won't exist, configure falls # back to gcc, which *definitely* won't work. We're providing wrapper scripts for @@ -418,9 +418,9 @@ if test -z "$CPP"; then fi if test -z "$CXX"; then case "$host" in - aarch64-apple-ios*-simulator) CXX=arm64-apple-ios-simulator-clang ;; - aarch64-apple-ios*) CXX=arm64-apple-ios-clang ;; - x86_64-apple-ios*-simulator) CXX=x86_64-apple-ios-simulator-clang ;; + aarch64-apple-ios*-simulator) CXX=arm64-apple-ios-simulator-clang++ ;; + aarch64-apple-ios*) CXX=arm64-apple-ios-clang++ ;; + x86_64-apple-ios*-simulator) CXX=x86_64-apple-ios-simulator-clang++ ;; *) esac fi @@ -695,6 +695,47 @@ AC_SUBST([INSTALLTARGETS]) AC_DEFINE_UNQUOTED([_PYTHONFRAMEWORK], ["${PYTHONFRAMEWORK}"], [framework name]) +dnl quadrigraphs "@<:@" and "@:>@" produce "[" and "]" in the output +AC_MSG_CHECKING([for --with-app-store-compliance]) +AC_ARG_WITH( + [app_store_compliance], + [AS_HELP_STRING( + [--with-app-store-compliance=@<:@PATCH-FILE@:>@], + [Enable any patches required for compiliance with app stores. + Optional PATCH-FILE specifies the custom patch to apply.] + )],[ + case "$withval" in + yes) + case $ac_sys_system in + Darwin|iOS) + # iOS is able to share the macOS patch + APP_STORE_COMPLIANCE_PATCH="Mac/Resources/app-store-compliance.patch" + ;; + *) AC_MSG_ERROR([no default app store compliance patch available for $ac_sys_system]) ;; + esac + AC_MSG_RESULT([applying default app store compliance patch]) + ;; + *) + APP_STORE_COMPLIANCE_PATCH="${withval}" + AC_MSG_RESULT([applying custom app store compliance patch]) + ;; + esac + ],[ + case $ac_sys_system in + iOS) + # Always apply the compliance patch on iOS; we can use the macOS patch + APP_STORE_COMPLIANCE_PATCH="Mac/Resources/app-store-compliance.patch" + AC_MSG_RESULT([applying default app store compliance patch]) + ;; + *) + # No default app compliance patching on any other platform + APP_STORE_COMPLIANCE_PATCH= + AC_MSG_RESULT([not patching for app store compliance]) + ;; + esac +]) +AC_SUBST([APP_STORE_COMPLIANCE_PATCH]) + AC_SUBST([_PYTHON_HOST_PLATFORM]) if test "$cross_compiling" = yes; then case "$host" in @@ -716,8 +757,10 @@ if test "$cross_compiling" = yes; then _host_device=${_host_device:=os} # IPHONEOS_DEPLOYMENT_TARGET is the minimum supported iOS version - IPHONEOS_DEPLOYMENT_TARGET=${_host_os:3} - IPHONEOS_DEPLOYMENT_TARGET=${IPHONEOS_DEPLOYMENT_TARGET:=12.0} + AC_MSG_CHECKING([iOS deployment target]) + IPHONEOS_DEPLOYMENT_TARGET=$(echo ${_host_os} | cut -c4-) + IPHONEOS_DEPLOYMENT_TARGET=${IPHONEOS_DEPLOYMENT_TARGET:=13.0} + AC_MSG_RESULT([$IPHONEOS_DEPLOYMENT_TARGET]) case "$host_cpu" in aarch64) @@ -1681,7 +1724,9 @@ fi # For calculating the .so ABI tag. AC_SUBST([ABIFLAGS]) +AC_SUBST([ABI_THREAD]) ABIFLAGS="" +ABI_THREAD="" # Check for --disable-gil # --disable-gil @@ -1698,6 +1743,7 @@ then [Define if you want to disable the GIL]) # Add "t" for "threaded" ABIFLAGS="${ABIFLAGS}t" + ABI_THREAD="t" fi # Check for --with-pydebug @@ -1731,6 +1777,10 @@ then [Define if you want to enable tracing references for debugging purpose]) fi +if test "$disable_gil" = "yes" -a "$with_trace_refs" = "yes"; +then + AC_MSG_ERROR([--disable-gil cannot be used with --with-trace-refs]) +fi # Check for --enable-pystats AC_MSG_CHECKING([for --enable-pystats]) @@ -3641,7 +3691,6 @@ AC_MSG_CHECKING([perf trampoline]) AS_CASE([$PLATFORM_TRIPLET], [x86_64-linux-gnu], [perf_trampoline=yes], [aarch64-linux-gnu], [perf_trampoline=yes], - [riscv64-linux-gnu], [perf_trampoline=yes], [perf_trampoline=no] ) AC_MSG_RESULT([$perf_trampoline]) @@ -6119,16 +6168,19 @@ AC_MSG_CHECKING([LDVERSION]) LDVERSION='$(VERSION)$(ABIFLAGS)' AC_MSG_RESULT([$LDVERSION]) -# Configure the flags and dependencies used when compiling shared modules +# Configure the flags and dependencies used when compiling shared modules. +# Do not rename LIBPYTHON - it's accessed via sysconfig by package build +# systems (e.g. Meson) to decide whether to link extension modules against +# libpython. AC_SUBST([MODULE_DEPS_SHARED]) -AC_SUBST([MODULE_LDFLAGS]) +AC_SUBST([LIBPYTHON]) MODULE_DEPS_SHARED='$(MODULE_DEPS_STATIC) $(EXPORTSYMS)' -MODULE_LDFLAGS='' +LIBPYTHON='' # On Android and Cygwin the shared libraries must be linked with libpython. if test "$PY_ENABLE_SHARED" = "1" && ( test -n "$ANDROID_API_LEVEL" || test "$MACHDEP" = "cygwin"); then MODULE_DEPS_SHARED="$MODULE_DEPS_SHARED \$(LDLIBRARY)" - MODULE_LDFLAGS="\$(BLDLIBRARY)" + LIBPYTHON="\$(BLDLIBRARY)" fi # On iOS the shared libraries must be linked with the Python framework @@ -6138,11 +6190,11 @@ fi AC_SUBST([BINLIBDEST]) -BINLIBDEST='$(LIBDIR)/python$(VERSION)' +BINLIBDEST='$(LIBDIR)/python$(VERSION)$(ABI_THREAD)' # Check for --with-platlibdir -# /usr/$LIDIRNAME/python$VERSION +# /usr/$PLATLIBDIR/python$(VERSION)$(ABI_THREAD) AC_SUBST([PLATLIBDIR]) PLATLIBDIR="lib" AC_MSG_CHECKING([for --with-platlibdir]) @@ -6161,7 +6213,7 @@ if test -n "$withval" -a "$withval" != yes -a "$withval" != no then AC_MSG_RESULT([yes]) PLATLIBDIR="$withval" - BINLIBDEST='${exec_prefix}/${PLATLIBDIR}/python$(VERSION)' + BINLIBDEST='${exec_prefix}/${PLATLIBDIR}/python$(VERSION)$(ABI_THREAD)' else AC_MSG_RESULT([no]) fi], @@ -6171,9 +6223,9 @@ fi], dnl define LIBPL after ABIFLAGS and LDVERSION is defined. AC_SUBST([PY_ENABLE_SHARED]) if test x$PLATFORM_TRIPLET = x; then - LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}/config-${LDVERSION}" + LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}${ABI_THREAD}/config-${LDVERSION}" else - LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}" + LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}${ABI_THREAD}/config-${LDVERSION}-${PLATFORM_TRIPLET}" fi AC_SUBST([LIBPL]) @@ -6567,55 +6619,88 @@ then [Define if you have struct stat.st_mtimensec]) fi -dnl check for ncurses/ncursesw and panel/panelw +dnl check for ncursesw/ncurses and panelw/panel dnl NOTE: old curses is not detected. -dnl have_curses=[no, ncursesw, ncurses] -dnl have_panel=[no, panelw, panel] +dnl have_curses=[no, yes] +dnl have_panel=[no, yes] have_curses=no have_panel=no -AH_TEMPLATE([HAVE_NCURSESW], [Define to 1 if you have the `ncursesw' library.]) -AC_CHECK_HEADERS([curses.h ncurses.h]) - -AS_VAR_IF([ac_cv_header_ncurses_h], [yes], [ - if test "$ac_sys_system" != "Darwin"; then - dnl On macOS, there is no separate /usr/lib/libncursesw nor libpanelw. - PKG_CHECK_MODULES([CURSES], [ncursesw], [ - AC_DEFINE([HAVE_NCURSESW], [1]) - have_curses=ncursesw - ], [ - WITH_SAVE_ENV([ - AC_CHECK_LIB([ncursesw], [initscr], [ - AC_DEFINE([HAVE_NCURSESW], [1]) - have_curses=ncursesw - CURSES_CFLAGS=${CURSES_CFLAGS-""} - CURSES_LIBS=${CURSES_LIBS-"-lncursesw"} - ]) - ]) - ]) - fi +dnl PY_CHECK_CURSES(LIBCURSES, LIBPANEL) +dnl Sets 'have_curses' and 'have_panel'. +dnl For the PKG_CHECK_MODULES() calls, we can safely reuse the first variable +dnl here, since we're only calling the macro a second time if the first call +dnl fails. +AC_DEFUN([PY_CHECK_CURSES], [dnl +AS_VAR_PUSHDEF([curses_var], [m4_toupper([$1])]) +AS_VAR_PUSHDEF([panel_var], [m4_toupper([$2])]) +PKG_CHECK_MODULES([CURSES], [$1], + [AC_DEFINE([HAVE_]curses_var, [1], [Define if you have the '$1' library]) + AS_VAR_SET([have_curses], [yes]) + PKG_CHECK_MODULES([PANEL], [$2], + [AC_DEFINE([HAVE_]panel_var, [1], [Define if you have the '$2' library]) + AS_VAR_SET([have_panel], [yes])], + [AS_VAR_SET([have_panel], [no])])], + [AS_VAR_SET([have_curses], [no])]) +AS_VAR_POPDEF([curses_var]) +AS_VAR_POPDEF([panel_var])]) + +# Check for ncursesw/panelw first. If that fails, try ncurses/panel. +PY_CHECK_CURSES([ncursesw], [panelw]) +AS_VAR_IF([have_curses], [no], + [PY_CHECK_CURSES([ncurses], [panel])]) - AS_VAR_IF([have_curses], [no], [ - PKG_CHECK_MODULES([CURSES], [ncurses], [ - have_curses=ncurses - ], [ - WITH_SAVE_ENV([ - AC_CHECK_LIB([ncurses], [initscr], [ - have_curses=ncurses - CURSES_CFLAGS=${CURSES_CFLAGS-""} - CURSES_LIBS=${CURSES_LIBS-"-lncurses"} - ]) - ]) - ]) - ]) +WITH_SAVE_ENV([ + # Make sure we've got the header defines. + AS_VAR_APPEND([CPPFLAGS], [" $CURSES_CFLAGS $PANEL_CFLAGS"]) + AC_CHECK_HEADERS(m4_normalize([ + ncursesw/curses.h ncursesw/ncurses.h ncursesw/panel.h + ncurses/curses.h ncurses/ncurses.h ncurses/panel.h + curses.h ncurses.h panel.h + ])) -])dnl ac_cv_header_ncurses_h = yes + # Check that we're able to link with crucial curses/panel functions. This + # also serves as a fallback in case pkg-config failed. + AS_VAR_APPEND([LIBS], [" $CURSES_LIBS $PANEL_LIBS"]) + AC_SEARCH_LIBS([initscr], [ncursesw ncurses], + [AS_VAR_IF([have_curses], [no], + [AS_VAR_SET([have_curses], [yes]) + CURSES_LIBS=${CURSES_LIBS-"$ac_cv_search_initscr"}])], + [AS_VAR_SET([have_curses], [no])]) + AC_SEARCH_LIBS([update_panels], [panelw panel], + [AS_VAR_IF([have_panel], [no], + [AS_VAR_SET([have_panel], [yes]) + PANEL_LIBS=${PANEL_LIBS-"$ac_cv_search_update_panels"}])], + [AS_VAR_SET([have_panel], [no])]) + +dnl Issue #25720: ncurses has introduced the NCURSES_OPAQUE symbol making opaque +dnl structs since version 5.7. If the macro is defined as zero before including +dnl [n]curses.h, ncurses will expose fields of the structs regardless of the +dnl configuration. +AC_DEFUN([_CURSES_INCLUDES],dnl +[ +#define NCURSES_OPAQUE 0 +#if defined(HAVE_NCURSESW_NCURSES_H) +# include +#elif defined(HAVE_NCURSESW_CURSES_H) +# include +#elif defined(HAVE_NCURSES_NCURSES_H) +# include +#elif defined(HAVE_NCURSES_CURSES_H) +# include +#elif defined(HAVE_NCURSES_H) +# include +#elif defined(HAVE_CURSES_H) +# include +#endif +]) +AS_IF([test "have_curses" != "no"], [ dnl remove _XOPEN_SOURCE macro from curses cflags. pyconfig.h sets dnl the macro to 700. CURSES_CFLAGS=$(echo $CURSES_CFLAGS | sed 's/-D_XOPEN_SOURCE=600//g') -if test "$have_curses" != no -a "$ac_sys_system" = "Darwin"; then +AS_VAR_IF([ac_sys_system], [Darwin], [ dnl On macOS, there is no separate /usr/lib/libncursesw nor libpanelw. dnl System-supplied ncurses combines libncurses/libpanel and supports wide dnl characters, so we can use it like ncursesw. @@ -6625,82 +6710,17 @@ if test "$have_curses" != no -a "$ac_sys_system" = "Darwin"; then dnl _XOPEN_SOURCE_EXTENDED here for ncurses wide char support. AS_VAR_APPEND([CURSES_CFLAGS], [" -D_XOPEN_SOURCE_EXTENDED=1"]) - AC_DEFINE([HAVE_NCURSESW], [1]) -fi - -dnl TODO: detect "curses" and special cases tinfo, terminfo, or termcap - -AC_MSG_CHECKING([curses module flags]) -AS_VAR_IF([have_curses], [no], [ - AC_MSG_RESULT([no]) -], [ - AC_MSG_RESULT([$have_curses (CFLAGS: $CURSES_CFLAGS, LIBS: $CURSES_LIBS)]) ]) -dnl check for ncurses' panel/panelw library -AC_CHECK_HEADERS([panel.h]) - -AS_VAR_IF([ac_cv_header_panel_h], [yes], [ - - if test "$ac_sys_system" != "Darwin"; then - dnl On macOS, there is no separate /usr/lib/libncursesw nor libpanelw. - AS_VAR_IF([have_curses], [ncursesw], [ - PKG_CHECK_MODULES([PANEL], [panelw], [ - have_panel=panelw - ], [ - WITH_SAVE_ENV([ - AC_CHECK_LIB([panelw], [update_panels], [ - have_panel=panelw - PANEL_CFLAGS=${PANEL_CFLAGS-""} - PANEL_LIBS=${PANEL_LIBS-"-lpanelw"} - ]) - ]) - ]) - ]) - fi - - AS_VAR_IF([have_curses], [ncurses], [ - PKG_CHECK_MODULES([PANEL], [panel], [ - have_panel=panel - ], [ - WITH_SAVE_ENV([ - AC_CHECK_LIB([panel], [update_panels], [ - have_panel=panel - PANEL_CFLAGS=${PANEL_CFLAGS-""} - PANEL_LIBS=${PANEL_LIBS-"-lpanel"} - ]) - ]) - ]) - ]) - -])dnl ac_cv_header_panel_h = yes - dnl pyconfig.h defines _XOPEN_SOURCE=700 PANEL_CFLAGS=$(echo $PANEL_CFLAGS | sed 's/-D_XOPEN_SOURCE=600//g') -AC_MSG_CHECKING([panel flags]) -AS_VAR_IF([have_panel], [no], [ - AC_MSG_RESULT([no]) -], [ - AC_MSG_RESULT([$have_panel (CFLAGS: $PANEL_CFLAGS, LIBS: $PANEL_LIBS)]) -]) - -# first curses header check -ac_save_cppflags="$CPPFLAGS" -if test "$cross_compiling" = no; then - CPPFLAGS="$CPPFLAGS -I/usr/include/ncursesw" -fi - # On Solaris, term.h requires curses.h -AC_CHECK_HEADERS([term.h], [], [], [ -#ifdef HAVE_CURSES_H -#include -#endif -]) +AC_CHECK_HEADERS([term.h], [], [], _CURSES_INCLUDES) # On HP/UX 11.0, mvwdelch is a block with a return statement AC_CACHE_CHECK([whether mvwdelch is an expression], [ac_cv_mvwdelch_is_expression], -AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[@%:@include ]], [[ +AC_COMPILE_IFELSE([AC_LANG_PROGRAM(_CURSES_INCLUDES, [[ int rtn; rtn = mvwdelch(0,0,0); ]])], @@ -6713,15 +6733,8 @@ then [Define if mvwdelch in curses.h is an expression.]) fi -# Issue #25720: ncurses has introduced the NCURSES_OPAQUE symbol making opaque -# structs since version 5.7. If the macro is defined as zero before including -# [n]curses.h, ncurses will expose fields of the structs regardless of the -# configuration. AC_CACHE_CHECK([whether WINDOW has _flags], [ac_cv_window_has_flags], -AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ - #define NCURSES_OPAQUE 0 - #include -]], [[ +AC_COMPILE_IFELSE([AC_LANG_PROGRAM(_CURSES_INCLUDES, [[ WINDOW *w; w->_flags = 0; ]])], @@ -6743,11 +6756,7 @@ AC_DEFUN([PY_CHECK_CURSES_FUNC], [for curses function $1], [py_var], [AC_COMPILE_IFELSE( - [AC_LANG_PROGRAM( - [ - #define NCURSES_OPAQUE 0 - #include - ], [ + [AC_LANG_PROGRAM(_CURSES_INCLUDES, [ #ifndef $1 void *x=$1 #endif @@ -6775,6 +6784,8 @@ PY_CHECK_CURSES_FUNC([has_key]) PY_CHECK_CURSES_FUNC([typeahead]) PY_CHECK_CURSES_FUNC([use_env]) CPPFLAGS=$ac_save_cppflags +])dnl have_curses != no +])dnl save env AC_MSG_NOTICE([checking for device files]) @@ -7311,8 +7322,6 @@ AC_DEFINE([PY_SSL_DEFAULT_CIPHERS], [1]) # builtin hash modules default_hashlib_hashes="md5,sha1,sha2,sha3,blake2" -AC_DEFINE([PY_BUILTIN_HASHLIB_HASHES], [], [enabled builtin hash modules] -) AC_MSG_CHECKING([for --with-builtin-hashlib-hashes]) AC_ARG_WITH( [builtin-hashlib-hashes], @@ -7329,7 +7338,8 @@ AC_ARG_WITH( AC_MSG_RESULT([$with_builtin_hashlib_hashes]) AC_DEFINE_UNQUOTED([PY_BUILTIN_HASHLIB_HASHES], - ["$with_builtin_hashlib_hashes"]) + ["$with_builtin_hashlib_hashes"], + [enabled builtin hash modules]) as_save_IFS=$IFS IFS=, @@ -7692,11 +7702,11 @@ PY_STDLIB_MOD([_ctypes], [], [test "$have_libffi" = yes], [$NO_STRICT_OVERFLOW_CFLAGS $LIBFFI_CFLAGS], [$LIBFFI_LIBS]) PY_STDLIB_MOD([_curses], - [], [test "$have_curses" != "no"], + [], [test "$have_curses" = "yes"], [$CURSES_CFLAGS], [$CURSES_LIBS] ) PY_STDLIB_MOD([_curses_panel], - [], [test "$have_panel" != "no"], + [], [test "$have_curses" = "yes" && test "$have_panel" = "yes"], [$PANEL_CFLAGS $CURSES_CFLAGS], [$PANEL_LIBS $CURSES_LIBS] ) PY_STDLIB_MOD([_decimal], diff --git a/iOS/README.rst b/iOS/README.rst index 96cb00eb2e9..4d7c344d5e9 100644 --- a/iOS/README.rst +++ b/iOS/README.rst @@ -188,7 +188,7 @@ especially important, as many parts of the standard library (including the ``ctypes`` module at runtime. By default, Python will be compiled with an iOS deployment target (i.e., the -minimum supported iOS version) of 12.0. To specify a different deployment +minimum supported iOS version) of 13.0. To specify a different deployment target, provide the version number as part of the ``--host`` argument - for example, ``--host=arm64-apple-ios15.4-simulator`` would compile an ARM64 simulator build with a deployment target of 15.4. diff --git a/iOS/Resources/bin/arm64-apple-ios-clang++ b/iOS/Resources/bin/arm64-apple-ios-clang++ new file mode 100644 index 00000000000..f24bec11268 --- /dev/null +++ b/iOS/Resources/bin/arm64-apple-ios-clang++ @@ -0,0 +1,2 @@ +#!/bin/sh +xcrun --sdk iphoneos${IOS_SDK_VERSION} clang++ -target arm64-apple-ios $@ diff --git a/iOS/Resources/bin/arm64-apple-ios-simulator-clang++ b/iOS/Resources/bin/arm64-apple-ios-simulator-clang++ new file mode 100644 index 00000000000..ef37d05b512 --- /dev/null +++ b/iOS/Resources/bin/arm64-apple-ios-simulator-clang++ @@ -0,0 +1,2 @@ +#!/bin/sh +xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang++ -target arm64-apple-ios-simulator $@ diff --git a/iOS/Resources/bin/x86_64-apple-ios-simulator-clang++ b/iOS/Resources/bin/x86_64-apple-ios-simulator-clang++ new file mode 100644 index 00000000000..86f03ea32bc --- /dev/null +++ b/iOS/Resources/bin/x86_64-apple-ios-simulator-clang++ @@ -0,0 +1,2 @@ +#!/bin/sh +xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang++ -target x86_64-apple-ios-simulator $@ diff --git a/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m b/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m index e6a919c304e..9bf502a808e 100644 --- a/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m +++ b/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m @@ -15,6 +15,8 @@ - (void)testPython { const char *argv[] = { "iOSTestbed", // argv[0] is the process that is running. "-uall", // Enable all resources + "--single-process", // always run all tests sequentially in a single process + "--rerun", // Re-run failed tests in verbose mode "-W", // Display test output on failure // To run a subset of tests, add the test names below; e.g., // "test_os", diff --git a/pyconfig.h.in b/pyconfig.h.in index c279b147db3..4d8b1d4f254 100644 --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -829,12 +829,33 @@ /* Define to 1 if you have the `nanosleep' function. */ #undef HAVE_NANOSLEEP -/* Define to 1 if you have the `ncursesw' library. */ +/* Define if you have the 'ncurses' library */ +#undef HAVE_NCURSES + +/* Define if you have the 'ncursesw' library */ #undef HAVE_NCURSESW +/* Define to 1 if you have the header file. */ +#undef HAVE_NCURSESW_CURSES_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_NCURSESW_NCURSES_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_NCURSESW_PANEL_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_NCURSES_CURSES_H + /* Define to 1 if you have the header file. */ #undef HAVE_NCURSES_H +/* Define to 1 if you have the header file. */ +#undef HAVE_NCURSES_NCURSES_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_NCURSES_PANEL_H + /* Define to 1 if you have the header file. */ #undef HAVE_NDBM_H @@ -878,6 +899,12 @@ /* Define to 1 if you have the `openpty' function. */ #undef HAVE_OPENPTY +/* Define if you have the 'panel' library */ +#undef HAVE_PANEL + +/* Define if you have the 'panelw' library */ +#undef HAVE_PANELW + /* Define to 1 if you have the header file. */ #undef HAVE_PANEL_H