diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 00000000000000..efbdcd402cdf67 --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,24 @@ +FROM docker.io/library/fedora:37 + +ENV CC=clang + +ENV WASI_SDK_VERSION=19 +ENV WASI_SDK_PATH=/opt/wasi-sdk + +ENV WASMTIME_HOME=/opt/wasmtime +ENV WASMTIME_VERSION=7.0.0 +ENV WASMTIME_CPU_ARCH=x86_64 + +RUN dnf -y --nodocs --setopt=install_weak_deps=False install /usr/bin/{blurb,clang,curl,git,ln,tar,xz} 'dnf-command(builddep)' && \ + dnf -y --nodocs --setopt=install_weak_deps=False builddep python3 && \ + dnf -y clean all + +RUN mkdir ${WASI_SDK_PATH} && \ + curl --location https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${WASI_SDK_VERSION}/wasi-sdk-${WASI_SDK_VERSION}.0-linux.tar.gz | \ + tar --strip-components 1 --directory ${WASI_SDK_PATH} --extract --gunzip + +RUN mkdir --parents ${WASMTIME_HOME} && \ + curl --location "https://github.com/bytecodealliance/wasmtime/releases/download/v${WASMTIME_VERSION}/wasmtime-v${WASMTIME_VERSION}-${WASMTIME_CPU_ARCH}-linux.tar.xz" | \ + xz --decompress | \ + tar --strip-components 1 --directory ${WASMTIME_HOME} -x && \ + ln -s ${WASMTIME_HOME}/wasmtime /usr/local/bin diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 00000000000000..9fbaf7fddd8514 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,81 @@ +{ + "build": { + "dockerfile": "Dockerfile" + }, + "onCreateCommand": [ + // Install common tooling. + "dnf", + "install", + "-y", + "which", + "zsh", + "fish" + ], + "updateContentCommand": { + // Using the shell for `nproc` usage. + "python": "./configure --config-cache --with-pydebug && make -s -j `nproc`", + "docs": [ + "make", + "--directory", + "Doc", + "venv", + "html" + ] + }, + "customizations": { + "vscode": { + "extensions": [ + // Highlighting for Parser/Python.asdl. + "brettcannon.zephyr-asdl", + // Highlighting for configure.ac. + "maelvalais.autoconf", + // C auto-complete. + "ms-vscode.cpptools", + // To view built docs. + "ms-vscode.live-server" + // https://github.com/microsoft/vscode-python/issues/18073 + // "ms-python.python" + ], + "settings": { + "C_Cpp.default.compilerPath": "/usr/bin/clang", + "C_Cpp.default.cStandard": "c11", + "C_Cpp.default.defines": [ + "CONFIG_64", + "Py_BUILD_CORE" + ], + "C_Cpp.default.includePath": [ + "${workspaceFolder}/*", + "${workspaceFolder}/Include/**" + ], + // https://github.com/microsoft/vscode-cpptools/issues/10732 + "C_Cpp.errorSquiggles": "disabled", + "editor.insertSpaces": true, + "editor.rulers": [ + 80 + ], + "editor.tabSize": 4, + "editor.trimAutoWhitespace": true, + "files.associations": { + "*.h": "c" + }, + "files.encoding": "utf8", + "files.eol": "\n", + "files.insertFinalNewline": true, + "files.trimTrailingWhitespace": true, + "python.analysis.diagnosticSeverityOverrides": { + // Complains about shadowing the stdlib w/ the stdlib. + "reportShadowedImports": "none", + // Doesn't like _frozen_importlib. + "reportMissingImports": "none" + }, + "python.analysis.extraPaths": [ + "Lib" + ], + "python.defaultInterpreterPath": "./python", + "[restructuredtext]": { + "editor.tabSize": 3 + } + } + } + } +} diff --git a/.gitattributes b/.gitattributes index 13289182400109..4ed95069442f3d 100644 --- a/.gitattributes +++ b/.gitattributes @@ -32,6 +32,10 @@ Lib/test/test_importlib/resources/data01/* noeol Lib/test/test_importlib/resources/namespacedata01/* noeol Lib/test/xmltestdata/* noeol +# Shell scripts should have LF even on Windows because of Cygwin +Lib/venv/scripts/common/activate text eol=lf +Lib/venv/scripts/posix/* text eol=lf + # CRLF files [attr]dos text eol=crlf diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index fc1bb3388976d5..3422ef835279bc 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,13 +5,13 @@ # https://git-scm.com/docs/gitignore#_pattern_format # GitHub -.github/** @ezio-melotti +.github/** @ezio-melotti @hugovk # Build system configure* @erlend-aasland @corona10 # asyncio -**/*asyncio* @1st1 @asvetlov @gvanrossum @kumaraditya303 +**/*asyncio* @1st1 @asvetlov @gvanrossum @kumaraditya303 @willingc # Core **/*context* @1st1 @@ -25,6 +25,8 @@ Objects/frameobject.c @markshannon Objects/call.c @markshannon Python/ceval.c @markshannon Python/compile.c @markshannon @iritkatriel +Python/assemble.c @markshannon @iritkatriel +Python/flowgraph.c @markshannon @iritkatriel Python/ast_opt.c @isidentical Lib/test/test_patma.py @brandtbucher Lib/test/test_peepholer.py @brandtbucher @@ -61,11 +63,7 @@ Python/traceback.c @iritkatriel /Tools/build/parse_html5_entities.py @ezio-melotti # Import (including importlib). -# Ignoring importlib.h so as to not get flagged on -# all pull requests that change the emitted -# bytecode. -**/*import*.c @brettcannon @encukou @ericsnowcurrently @ncoghlan @warsaw -**/*import*.py @brettcannon @encukou @ericsnowcurrently @ncoghlan @warsaw +**/*import* @brettcannon @encukou @ericsnowcurrently @ncoghlan @warsaw **/*importlib/resources/* @jaraco @warsaw @FFY00 **/importlib/metadata/* @jaraco @warsaw diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4e5328282f1224..df0f107a541614 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -33,6 +33,7 @@ jobs: check_source: name: 'Check for source changes' runs-on: ubuntu-latest + timeout-minutes: 10 outputs: run_tests: ${{ steps.check.outputs.run_tests }} steps: @@ -63,6 +64,7 @@ jobs: check_generated_files: name: 'Check if generated files are up to date' runs-on: ubuntu-latest + timeout-minutes: 60 needs: check_source if: needs.check_source.outputs.run_tests == 'true' steps: @@ -118,6 +120,7 @@ jobs: build_win32: name: 'Windows (x86)' runs-on: windows-latest + timeout-minutes: 60 needs: check_source if: needs.check_source.outputs.run_tests == 'true' env: @@ -126,7 +129,6 @@ jobs: - uses: actions/checkout@v3 - name: Build CPython run: .\PCbuild\build.bat -e -d -p Win32 - timeout-minutes: 30 - name: Display build info run: .\python.bat -m test.pythoninfo - name: Tests @@ -135,6 +137,7 @@ jobs: build_win_amd64: name: 'Windows (x64)' runs-on: windows-latest + timeout-minutes: 60 needs: check_source if: needs.check_source.outputs.run_tests == 'true' env: @@ -145,7 +148,6 @@ jobs: run: echo "::add-matcher::.github/problem-matchers/msvc.json" - name: Build CPython run: .\PCbuild\build.bat -e -d -p x64 - timeout-minutes: 30 - name: Display build info run: .\python.bat -m test.pythoninfo - name: Tests @@ -154,6 +156,7 @@ jobs: build_macos: name: 'macOS' runs-on: macos-latest + timeout-minutes: 60 needs: check_source if: needs.check_source.outputs.run_tests == 'true' env: @@ -184,6 +187,7 @@ jobs: build_ubuntu: name: 'Ubuntu' runs-on: ubuntu-20.04 + timeout-minutes: 60 needs: check_source if: needs.check_source.outputs.run_tests == 'true' env: @@ -241,6 +245,7 @@ jobs: build_ubuntu_ssltests: name: 'Ubuntu SSL tests with OpenSSL' runs-on: ubuntu-20.04 + timeout-minutes: 60 needs: check_source if: needs.check_source.outputs.run_tests == 'true' strategy: @@ -290,6 +295,7 @@ jobs: build_asan: name: 'Address sanitizer' runs-on: ubuntu-20.04 + timeout-minutes: 60 needs: check_source if: needs.check_source.outputs.run_tests == 'true' env: @@ -302,6 +308,10 @@ jobs: run: echo "::add-matcher::.github/problem-matchers/gcc.json" - name: Install Dependencies run: sudo ./.github/workflows/posix-deps-apt.sh + - name: Set up GCC-10 for ASAN + uses: egor-tensin/setup-gcc@v1 + with: + version: 10 - name: Configure OpenSSL env vars run: | echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> $GITHUB_ENV diff --git a/.github/workflows/build_msi.yml b/.github/workflows/build_msi.yml index 5f1dcae190efbc..2bed09014e0ff2 100644 --- a/.github/workflows/build_msi.yml +++ b/.github/workflows/build_msi.yml @@ -26,6 +26,7 @@ jobs: build: name: Windows Installer runs-on: windows-latest + timeout-minutes: 60 strategy: matrix: type: [x86, x64, arm64] diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 465da12fa1be80..3f7550cc72943b 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -36,6 +36,7 @@ jobs: build_doc: name: 'Docs' runs-on: ubuntu-latest + timeout-minutes: 60 steps: - uses: actions/checkout@v3 - name: Register Sphinx problem matcher @@ -53,10 +54,56 @@ jobs: - name: 'Build HTML documentation' run: make -C Doc/ SPHINXOPTS="-q" SPHINXERRORHANDLING="-W --keep-going" html + # Add pull request annotations for Sphinx nitpicks (missing references) + - name: 'Get list of changed files' + if: github.event_name == 'pull_request' + id: changed_files + uses: Ana06/get-changed-files@v2.2.0 + with: + filter: "Doc/**" + - name: 'Build changed files in nit-picky mode' + if: github.event_name == 'pull_request' + continue-on-error: true + run: | + # Mark files the pull request modified + touch ${{ steps.changed_files.outputs.added_modified }} + # Build docs with the '-n' (nit-picky) option; convert warnings to annotations + make -C Doc/ PYTHON=../python SPHINXOPTS="-q -n --keep-going" html 2>&1 | + python Doc/tools/warnings-to-gh-actions.py + + # Ensure some files always pass Sphinx nit-picky mode (no missing references) + - name: 'Build known-good files in nit-picky mode' + run: | + # Mark files that must pass nit-picky + python Doc/tools/touch-clean-files.py + # Build docs with the '-n' (nit-picky) option, convert warnings to errors (-W) + make -C Doc/ PYTHON=../python SPHINXOPTS="-q -n -W --keep-going" html 2>&1 + + # This build doesn't use problem matchers or check annotations + # It also does not run 'make check', as sphinx-lint is not installed into the + # environment. + build_doc_oldest_supported_sphinx: + name: 'Docs (Oldest Sphinx)' + runs-on: ubuntu-latest + timeout-minutes: 60 + steps: + - uses: actions/checkout@v3 + - name: 'Set up Python' + uses: actions/setup-python@v4 + with: + python-version: '3.11' # known to work with Sphinx 3.2 + cache: 'pip' + cache-dependency-path: 'Doc/requirements-oldest-sphinx.txt' + - name: 'Install build dependencies' + run: make -C Doc/ venv REQUIREMENTS="requirements-oldest-sphinx.txt" + - name: 'Build HTML documentation' + run: make -C Doc/ SPHINXOPTS="-q" SPHINXERRORHANDLING="-W --keep-going" html + # Run "doctest" on HEAD as new syntax doesn't exist in the latest stable release doctest: name: 'Doctest' runs-on: ubuntu-latest + timeout-minutes: 60 steps: - uses: actions/checkout@v3 - name: Register Sphinx problem matcher diff --git a/.github/workflows/documentation-links.yml b/.github/workflows/documentation-links.yml new file mode 100644 index 00000000000000..43a7afec73884e --- /dev/null +++ b/.github/workflows/documentation-links.yml @@ -0,0 +1,27 @@ +name: Read the Docs PR preview +# Automatically edits a pull request's descriptions with a link +# to the documentation's preview on Read the Docs. + +on: + pull_request_target: + types: + - opened + paths: + - 'Doc/**' + - '.github/workflows/doc.yml' + +permissions: + pull-requests: write + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + documentation-links: + runs-on: ubuntu-latest + steps: + - uses: readthedocs/actions/preview@v1 + with: + project-slug: "cpython-previews" + single-version: "true" diff --git a/.github/workflows/new-bugs-announce-notifier.yml b/.github/workflows/new-bugs-announce-notifier.yml index b2a76ef7d36153..73806c5d6d58af 100644 --- a/.github/workflows/new-bugs-announce-notifier.yml +++ b/.github/workflows/new-bugs-announce-notifier.yml @@ -11,6 +11,7 @@ permissions: jobs: notify-new-bugs-announce: runs-on: ubuntu-latest + timeout-minutes: 10 steps: - uses: actions/setup-node@v3 with: diff --git a/.github/workflows/project-updater.yml b/.github/workflows/project-updater.yml index 99c7a05ae8cab0..7574bfc208ff76 100644 --- a/.github/workflows/project-updater.yml +++ b/.github/workflows/project-updater.yml @@ -13,16 +13,15 @@ jobs: add-to-project: name: Add issues to projects runs-on: ubuntu-latest + timeout-minutes: 10 strategy: matrix: include: # if an issue has any of these labels, it will be added # to the corresponding project - { project: 2, label: "release-blocker, deferred-blocker" } - - { project: 3, label: expert-subinterpreters } - - { project: 29, label: expert-asyncio } - { project: 32, label: sprint } - + steps: - uses: actions/add-to-project@v0.1.0 with: diff --git a/.github/workflows/require-pr-label.yml b/.github/workflows/require-pr-label.yml new file mode 100644 index 00000000000000..916bbeb4352734 --- /dev/null +++ b/.github/workflows/require-pr-label.yml @@ -0,0 +1,18 @@ +name: Check labels + +on: + pull_request: + types: [opened, reopened, labeled, unlabeled, synchronize] + +jobs: + label: + name: DO-NOT-MERGE / unresolved review + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - uses: mheap/github-action-required-labels@v4 + with: + mode: exactly + count: 0 + labels: "DO-NOT-MERGE, awaiting changes, awaiting change review" diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 07dbcfe31d6563..94676f5ee5fffc 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -12,10 +12,11 @@ jobs: if: github.repository_owner == 'python' runs-on: ubuntu-latest + timeout-minutes: 10 steps: - name: "Check PRs" - uses: actions/stale@v7 + uses: actions/stale@v8 with: repo-token: ${{ secrets.GITHUB_TOKEN }} stale-pr-message: 'This PR is stale because it has been open for 30 days with no activity.' diff --git a/.github/workflows/verify-ensurepip-wheels.yml b/.github/workflows/verify-ensurepip-wheels.yml index 969515ed287b55..17d841f1f1c54a 100644 --- a/.github/workflows/verify-ensurepip-wheels.yml +++ b/.github/workflows/verify-ensurepip-wheels.yml @@ -1,4 +1,4 @@ -name: Verify bundled pip and setuptools +name: Verify bundled wheels on: workflow_dispatch: @@ -23,10 +23,11 @@ concurrency: jobs: verify: runs-on: ubuntu-latest + timeout-minutes: 10 steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: python-version: '3' - - name: Compare checksums of bundled pip and setuptools to ones published on PyPI + - name: Compare checksum of bundled wheels to the ones published on PyPI run: ./Tools/build/verify_ensurepip_wheels.py diff --git a/.mailmap b/.mailmap new file mode 100644 index 00000000000000..013c839ed6b7a4 --- /dev/null +++ b/.mailmap @@ -0,0 +1,3 @@ +# This file sets the canonical name for contributors to the repository. +# Documentation: https://git-scm.com/docs/gitmailmap +Amethyst Reese diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 00000000000000..898a9ae89dbb92 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,18 @@ +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details +# Project page: https://readthedocs.org/projects/cpython-previews/ + +version: 2 + +sphinx: + configuration: Doc/conf.py + +build: + os: ubuntu-22.04 + tools: + python: "3" + + commands: + - make -C Doc venv html + - mkdir _readthedocs + - mv Doc/build/html _readthedocs/html diff --git a/Doc/Makefile b/Doc/Makefile index ebe7f3698000fb..c11ea6ce03e8a4 100644 --- a/Doc/Makefile +++ b/Doc/Makefile @@ -13,6 +13,7 @@ JOBS = auto PAPER = SOURCES = DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py) +REQUIREMENTS = requirements.txt SPHINXERRORHANDLING = -W # Internal variables. @@ -154,8 +155,8 @@ venv: echo "To recreate it, remove it first with \`make clean-venv'."; \ else \ $(PYTHON) -m venv $(VENVDIR); \ - $(VENVDIR)/bin/python3 -m pip install -U pip setuptools; \ - $(VENVDIR)/bin/python3 -m pip install -r requirements.txt; \ + $(VENVDIR)/bin/python3 -m pip install --upgrade pip; \ + $(VENVDIR)/bin/python3 -m pip install -r $(REQUIREMENTS); \ echo "The venv has been created in the $(VENVDIR) directory"; \ fi diff --git a/Doc/c-api/exceptions.rst b/Doc/c-api/exceptions.rst index ddf7dc780b2745..49d2f18d4573b0 100644 --- a/Doc/c-api/exceptions.rst +++ b/Doc/c-api/exceptions.rst @@ -86,7 +86,7 @@ Printing and clearing An exception must be set when calling this function. -.. c:function: void PyErr_DisplayException(PyObject *exc) +.. c:function:: void PyErr_DisplayException(PyObject *exc) Print the standard traceback display of ``exc`` to ``sys.stderr``, including chained exceptions and notes. diff --git a/Doc/c-api/import.rst b/Doc/c-api/import.rst index a51619db6d3d97..474a64800044d0 100644 --- a/Doc/c-api/import.rst +++ b/Doc/c-api/import.rst @@ -188,6 +188,8 @@ Importing Modules .. versionchanged:: 3.3 Uses :func:`imp.source_from_cache()` in calculating the source path if only the bytecode path is provided. + .. versionchanged:: 3.12 + No longer uses the removed ``imp`` module. .. c:function:: long PyImport_GetMagicNumber() diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index 84c72e7e108b64..0a12bb9e8c54f0 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -179,6 +179,15 @@ Object Protocol If *o1* and *o2* are the same object, :c:func:`PyObject_RichCompareBool` will always return ``1`` for :const:`Py_EQ` and ``0`` for :const:`Py_NE`. +.. c:function:: PyObject* PyObject_Format(PyObject *obj, PyObject *format_spec) + + Format *obj* using *format_spec*. This is equivalent to the Python + expression ``format(obj, format_spec)``. + + *format_spec* may be ``NULL``. In this case the call is equivalent + to ``format(obj)``. + Returns the formatted string on success, ``NULL`` on failure. + .. c:function:: PyObject* PyObject_Repr(PyObject *o) .. index:: builtin: repr diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst index 7b5d1fac40ed87..69b15296993301 100644 --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -232,6 +232,15 @@ Type Objects .. versionadded:: 3.11 +.. c:function:: int PyUnstable_Type_AssignVersionTag(PyTypeObject *type) + + Attempt to assign a version tag to the given type. + + Returns 1 if the type already had a valid version tag or a new one was + assigned, or 0 if a new tag could not be assigned. + + .. versionadded:: 3.12 + Creating Heap-Allocated Types ............................. diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index fd8f49ccb1caab..e963b90628aa49 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -1145,7 +1145,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) .. data:: Py_TPFLAGS_MANAGED_DICT - This bit indicates that instances of the class have a ``__dict___`` + This bit indicates that instances of the class have a ``__dict__`` attribute, and that the space for the dictionary is managed by the VM. If this flag is set, :const:`Py_TPFLAGS_HAVE_GC` should also be set. diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst index f062f14e9a7561..ab3a2e274d9395 100644 --- a/Doc/c-api/unicode.rst +++ b/Doc/c-api/unicode.rst @@ -509,6 +509,15 @@ APIs: arguments. +.. c:function:: PyObject* PyUnicode_FromObject(PyObject *obj) + + Copy an instance of a Unicode subtype to a new true Unicode object if + necessary. If *obj* is already a true Unicode object (not a subtype), + return the reference with incremented refcount. + + Objects other than Unicode or its subtypes will cause a :exc:`TypeError`. + + .. c:function:: PyObject* PyUnicode_FromEncodedObject(PyObject *obj, \ const char *encoding, const char *errors) @@ -616,15 +625,6 @@ APIs: .. versionadded:: 3.3 -.. c:function:: PyObject* PyUnicode_FromObject(PyObject *obj) - - Copy an instance of a Unicode subtype to a new true Unicode object if - necessary. If *obj* is already a true Unicode object (not a subtype), - return the reference with incremented refcount. - - Objects other than Unicode or its subtypes will cause a :exc:`TypeError`. - - Locale Encoding """"""""""""""" diff --git a/Doc/c-api/weakref.rst b/Doc/c-api/weakref.rst index ace743ba01c5f5..f27ec4411b4a26 100644 --- a/Doc/c-api/weakref.rst +++ b/Doc/c-api/weakref.rst @@ -67,3 +67,13 @@ as much as it can. .. c:function:: PyObject* PyWeakref_GET_OBJECT(PyObject *ref) Similar to :c:func:`PyWeakref_GetObject`, but does no error checking. + + +.. c:function:: void PyObject_ClearWeakRefs(PyObject *object) + + This function is called by the :c:member:`~PyTypeObject.tp_dealloc` handler + to clear weak references. + + This iterates through the weak references for *object* and calls callbacks + for those references which have one. It returns when all callbacks have + been attempted. diff --git a/Doc/conf.py b/Doc/conf.py index 29fb63cbcc8614..cef2a0e2837f6a 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -68,12 +68,21 @@ # Minimum version of sphinx required needs_sphinx = '3.2' +# Ignore any .rst files in the includes/ directory; +# they're embedded in pages but not rendered individually. # Ignore any .rst files in the venv/ directory. -exclude_patterns = ['venv/*', 'README.rst'] +exclude_patterns = ['includes/*.rst', 'venv/*', 'README.rst'] venvdir = os.getenv('VENVDIR') if venvdir is not None: exclude_patterns.append(venvdir + '/*') +nitpick_ignore = [ + # Do not error nit-picky mode builds when _SubParsersAction.add_parser cannot + # be resolved, as the method is currently undocumented. For context, see + # https://github.com/python/cpython/pull/103289. + ('py:meth', '_SubParsersAction.add_parser'), +] + # Disable Docutils smartquotes for several translations smartquotes_excludes = { 'languages': ['ja', 'fr', 'zh_TW', 'zh_CN'], 'builders': ['man', 'text'], @@ -105,12 +114,13 @@ # Short title used e.g. for HTML tags. html_short_title = '%s Documentation' % release -# Deployment preview information, from Netlify -# (See netlify.toml and https://docs.netlify.com/configure-builds/environment-variables/#git-metadata) +# Deployment preview information +# (See .readthedocs.yml and https://docs.readthedocs.io/en/stable/reference/environment-variables.html) +repository_url = os.getenv("READTHEDOCS_GIT_CLONE_URL") html_context = { - "is_deployment_preview": os.getenv("IS_DEPLOYMENT_PREVIEW"), - "repository_url": os.getenv("REPOSITORY_URL"), - "pr_id": os.getenv("REVIEW_ID") + "is_deployment_preview": os.getenv("READTHEDOCS_VERSION_TYPE") == "external", + "repository_url": repository_url.removesuffix(".git") if repository_url else None, + "pr_id": os.getenv("READTHEDOCS_VERSION") } # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, @@ -252,8 +262,31 @@ # Options for the link checker # ---------------------------- -# Ignore certain URLs. -linkcheck_ignore = [r'https://bugs.python.org/(issue)?\d+'] +linkcheck_allowed_redirects = { + # bpo-NNNN -> BPO -> GH Issues + r'https://bugs.python.org/issue\?@action=redirect&bpo=\d+': 'https://github.com/python/cpython/issues/\d+', + # GH-NNNN used to refer to pull requests + r'https://github.com/python/cpython/issues/\d+': 'https://github.com/python/cpython/pull/\d+', + # :source:`something` linking files in the repository + r'https://github.com/python/cpython/tree/.*': 'https://github.com/python/cpython/blob/.*' +} + +linkcheck_anchors_ignore = [ + # ignore anchors that start with a '/', e.g. Wikipedia media files: + # https://en.wikipedia.org/wiki/Walrus#/media/File:Pacific_Walrus_-_Bull_(8247646168).jpg + r'\/.*', +] + +linkcheck_ignore = [ + # The crawler gets "Anchor not found" + r'https://developer.apple.com/documentation/.+?#.*', + r'https://devguide.python.org.+?/#.*', + r'https://github.com.+?#.*', + # Robot crawlers not allowed: "403 Client Error: Forbidden" + r'https://support.enthought.com/hc/.*', + # SSLError CertificateError, even though it is valid + r'https://unix.org/version2/whatsnew/lp64_wp.html', +] # Options for extensions diff --git a/Doc/constraints.txt b/Doc/constraints.txt new file mode 100644 index 00000000000000..66c748eb092d83 --- /dev/null +++ b/Doc/constraints.txt @@ -0,0 +1,29 @@ +# We have upper bounds on our transitive dependencies here +# To avoid new releases unexpectedly breaking our build. +# This file can be updated on an ad-hoc basis, +# though it will probably have to be updated +# whenever Doc/requirements.txt is updated. + +# Direct dependencies of Sphinx +babel<3 +colorama<0.5 +imagesize<1.5 +Jinja2<3.2 +packaging<24 +# Pygments==2.15.0 breaks CI +Pygments<2.16,!=2.15.0 +requests<3 +snowballstemmer<3 +sphinxcontrib-applehelp<1.1 +sphinxcontrib-devhelp<1.1 +sphinxcontrib-htmlhelp<2.1 +sphinxcontrib-jsmath<1.1 +sphinxcontrib-qthelp<1.1 +sphinxcontrib-serializinghtml<1.2 + +# Direct dependencies of Jinja2 (Jinja is a dependency of Sphinx, see above) +MarkupSafe<2.2 + +# Direct dependencies of sphinx-lint +polib<1.3 +regex<2024 diff --git a/Doc/distributing/index.rst b/Doc/distributing/index.rst index 21389adedf9c15..d237f8f082d87b 100644 --- a/Doc/distributing/index.rst +++ b/Doc/distributing/index.rst @@ -129,14 +129,10 @@ involved in creating and publishing a project: * `Uploading the project to the Python Package Index`_ * `The .pypirc file`_ -.. _Project structure: \ - https://packaging.python.org/tutorials/packaging-projects/#packaging-python-projects -.. _Building and packaging the project: \ - https://packaging.python.org/tutorials/packaging-projects/#creating-the-package-files -.. _Uploading the project to the Python Package Index: \ - https://packaging.python.org/tutorials/packaging-projects/#uploading-the-distribution-archives -.. _The .pypirc file: \ - https://packaging.python.org/specifications/pypirc/ +.. _Project structure: https://packaging.python.org/tutorials/packaging-projects/#packaging-python-projects +.. _Building and packaging the project: https://packaging.python.org/tutorials/packaging-projects/#creating-the-package-files +.. _Uploading the project to the Python Package Index: https://packaging.python.org/tutorials/packaging-projects/#uploading-the-distribution-archives +.. _The .pypirc file: https://packaging.python.org/specifications/pypirc/ How do I...? diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst index 80a1387db200c2..56b40acdb69fed 100644 --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -337,7 +337,7 @@ Here is an example:: } PyErr_Format(PyExc_AttributeError, - "'%.50s' object has no attribute '%.400s'", + "'%.100s' object has no attribute '%.400s'", tp->tp_name, name); return NULL; } diff --git a/Doc/extending/newtypes_tutorial.rst b/Doc/extending/newtypes_tutorial.rst index 54de3fd42437d9..f89934a11f12a8 100644 --- a/Doc/extending/newtypes_tutorial.rst +++ b/Doc/extending/newtypes_tutorial.rst @@ -88,7 +88,7 @@ standard Python floats:: The second bit is the definition of the type object. :: static PyTypeObject CustomType = { - PyVarObject_HEAD_INIT(NULL, 0) + .ob_base = PyVarObject_HEAD_INIT(NULL, 0) .tp_name = "custom.Custom", .tp_doc = PyDoc_STR("Custom objects"), .tp_basicsize = sizeof(CustomObject), @@ -109,7 +109,7 @@ common practice to not specify them explicitly unless you need them. We're going to pick it apart, one field at a time:: - PyVarObject_HEAD_INIT(NULL, 0) + .ob_base = PyVarObject_HEAD_INIT(NULL, 0) This line is mandatory boilerplate to initialize the ``ob_base`` field mentioned above. :: diff --git a/Doc/faq/library.rst b/Doc/faq/library.rst index a9cde456575020..597caaa778e1c8 100644 --- a/Doc/faq/library.rst +++ b/Doc/faq/library.rst @@ -780,7 +780,7 @@ socket to :meth:`select.select` to check if it's writable. The :mod:`asyncio` module provides a general purpose single-threaded and concurrent asynchronous library, which can be used for writing non-blocking network code. - The third-party `Twisted <https://twistedmatrix.com/trac/>`_ library is + The third-party `Twisted <https://twisted.org/>`_ library is a popular and feature-rich alternative. diff --git a/Doc/glossary.rst b/Doc/glossary.rst index 3d74d550dc345a..53e8cdcae1cd66 100644 --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -214,7 +214,7 @@ Glossary A callable is an object that can be called, possibly with a set of arguments (see :term:`argument`), with the following syntax:: - callable(argument1, argument2, ...) + callable(argument1, argument2, argumentN) A :term:`function`, and by extension a :term:`method`, is a callable. An instance of a class that implements the :meth:`~object.__call__` diff --git a/Doc/howto/argparse.rst b/Doc/howto/argparse.rst index f682587488a227..52e98fa9620194 100644 --- a/Doc/howto/argparse.rst +++ b/Doc/howto/argparse.rst @@ -1,10 +1,12 @@ +.. _argparse-tutorial: + ***************** Argparse Tutorial ***************** :author: Tshepang Mbambo -.. _argparse-tutorial: +.. currentmodule:: argparse This tutorial is intended to be a gentle introduction to :mod:`argparse`, the recommended command-line parsing module in the Python standard library. @@ -12,7 +14,7 @@ recommended command-line parsing module in the Python standard library. .. note:: There are two other modules that fulfill the same task, namely - :mod:`getopt` (an equivalent for :c:func:`getopt` from the C + :mod:`getopt` (an equivalent for ``getopt()`` from the C language) and the deprecated :mod:`optparse`. Note also that :mod:`argparse` is based on :mod:`optparse`, and therefore very similar in terms of usage. @@ -137,13 +139,13 @@ And running the code: Here is what's happening: -* We've added the :meth:`add_argument` method, which is what we use to specify +* We've added the :meth:`~ArgumentParser.add_argument` method, which is what we use to specify which command-line options the program is willing to accept. In this case, I've named it ``echo`` so that it's in line with its function. * Calling our program now requires us to specify an option. -* The :meth:`parse_args` method actually returns some data from the +* The :meth:`~ArgumentParser.parse_args` method actually returns some data from the options specified, in this case, ``echo``. * The variable is some form of 'magic' that :mod:`argparse` performs for free @@ -256,7 +258,7 @@ Here is what is happening: * To show that the option is actually optional, there is no error when running the program without it. Note that by default, if an optional argument isn't - used, the relevant variable, in this case :attr:`args.verbosity`, is + used, the relevant variable, in this case ``args.verbosity``, is given ``None`` as a value, which is the reason it fails the truth test of the :keyword:`if` statement. @@ -299,7 +301,7 @@ Here is what is happening: We even changed the name of the option to match that idea. Note that we now specify a new keyword, ``action``, and give it the value ``"store_true"``. This means that, if the option is specified, - assign the value ``True`` to :data:`args.verbose`. + assign the value ``True`` to ``args.verbose``. Not specifying it implies ``False``. * It complains when you specify a value, in true spirit of what flags @@ -698,7 +700,7 @@ Conflicting options So far, we have been working with two methods of an :class:`argparse.ArgumentParser` instance. Let's introduce a third one, -:meth:`add_mutually_exclusive_group`. It allows for us to specify options that +:meth:`~ArgumentParser.add_mutually_exclusive_group`. It allows for us to specify options that conflict with each other. Let's also change the rest of the program so that the new functionality makes more sense: we'll introduce the ``--quiet`` option, diff --git a/Doc/howto/descriptor.rst b/Doc/howto/descriptor.rst index 74710d9b3fc2ed..3688c47f0d6ec9 100644 --- a/Doc/howto/descriptor.rst +++ b/Doc/howto/descriptor.rst @@ -1273,11 +1273,14 @@ Using the non-data descriptor protocol, a pure Python version of .. testcode:: + import functools + class StaticMethod: "Emulate PyStaticMethod_Type() in Objects/funcobject.c" def __init__(self, f): self.f = f + functools.update_wrapper(self, f) def __get__(self, obj, objtype=None): return self.f @@ -1285,13 +1288,19 @@ Using the non-data descriptor protocol, a pure Python version of def __call__(self, *args, **kwds): return self.f(*args, **kwds) +The :func:`functools.update_wrapper` call adds a ``__wrapped__`` attribute +that refers to the underlying function. Also it carries forward +the attributes necessary to make the wrapper look like the wrapped +function: ``__name__``, ``__qualname__``, ``__doc__``, and ``__annotations__``. + .. testcode:: :hide: class E_sim: @StaticMethod - def f(x): - return x * 10 + def f(x: int) -> str: + "Simple function example" + return "!" * x wrapped_ord = StaticMethod(ord) @@ -1299,11 +1308,51 @@ Using the non-data descriptor protocol, a pure Python version of :hide: >>> E_sim.f(3) - 30 + '!!!' >>> E_sim().f(3) - 30 + '!!!' + + >>> sm = vars(E_sim)['f'] + >>> type(sm).__name__ + 'StaticMethod' + >>> f = E_sim.f + >>> type(f).__name__ + 'function' + >>> sm.__name__ + 'f' + >>> f.__name__ + 'f' + >>> sm.__qualname__ + 'E_sim.f' + >>> f.__qualname__ + 'E_sim.f' + >>> sm.__doc__ + 'Simple function example' + >>> f.__doc__ + 'Simple function example' + >>> sm.__annotations__ + {'x': <class 'int'>, 'return': <class 'str'>} + >>> f.__annotations__ + {'x': <class 'int'>, 'return': <class 'str'>} + >>> sm.__module__ == f.__module__ + True + >>> sm(3) + '!!!' + >>> f(3) + '!!!' + >>> wrapped_ord('A') 65 + >>> wrapped_ord.__module__ == ord.__module__ + True + >>> wrapped_ord.__wrapped__ == ord + True + >>> wrapped_ord.__name__ == ord.__name__ + True + >>> wrapped_ord.__qualname__ == ord.__qualname__ + True + >>> wrapped_ord.__doc__ == ord.__doc__ + True Class methods @@ -1359,11 +1408,14 @@ Using the non-data descriptor protocol, a pure Python version of .. testcode:: + import functools + class ClassMethod: "Emulate PyClassMethod_Type() in Objects/funcobject.c" def __init__(self, f): self.f = f + functools.update_wrapper(self, f) def __get__(self, obj, cls=None): if cls is None: @@ -1380,8 +1432,9 @@ Using the non-data descriptor protocol, a pure Python version of # Verify the emulation works class T: @ClassMethod - def cm(cls, x, y): - return (cls, x, y) + def cm(cls, x: int, y: str) -> tuple[str, int, str]: + "Class method that returns a tuple" + return (cls.__name__, x, y) @ClassMethod @property @@ -1393,17 +1446,40 @@ Using the non-data descriptor protocol, a pure Python version of :hide: >>> T.cm(11, 22) - (<class 'T'>, 11, 22) + ('T', 11, 22) # Also call it from an instance >>> t = T() >>> t.cm(11, 22) - (<class 'T'>, 11, 22) + ('T', 11, 22) # Check the alternate path for chained descriptors >>> T.__doc__ "A doc for 'T'" + # Verify that T uses our emulation + >>> type(vars(T)['cm']).__name__ + 'ClassMethod' + + # Verify that update_wrapper() correctly copied attributes + >>> T.cm.__name__ + 'cm' + >>> T.cm.__qualname__ + 'T.cm' + >>> T.cm.__doc__ + 'Class method that returns a tuple' + >>> T.cm.__annotations__ + {'x': <class 'int'>, 'y': <class 'str'>, 'return': tuple[str, int, str]} + + # Verify that __wrapped__ was added and works correctly + >>> f = vars(T)['cm'].__wrapped__ + >>> type(f).__name__ + 'function' + >>> f.__name__ + 'cm' + >>> f(T, 11, 22) + ('T', 11, 22) + The code path for ``hasattr(type(self.f), '__get__')`` was added in Python 3.9 and makes it possible for :func:`classmethod` to support @@ -1423,6 +1499,12 @@ chained together. In Python 3.11, this functionality was deprecated. >>> G.__doc__ "A doc for 'G'" +The :func:`functools.update_wrapper` call in ``ClassMethod`` adds a +``__wrapped__`` attribute that refers to the underlying function. Also +it carries forward the attributes necessary to make the wrapper look +like the wrapped function: ``__name__``, ``__qualname__``, ``__doc__``, +and ``__annotations__``. + Member objects and __slots__ ---------------------------- diff --git a/Doc/howto/enum.rst b/Doc/howto/enum.rst index 4525acb04503b3..68b75c529e92c7 100644 --- a/Doc/howto/enum.rst +++ b/Doc/howto/enum.rst @@ -36,8 +36,10 @@ inherits from :class:`Enum` itself. .. note:: Case of Enum Members - Because Enums are used to represent constants we recommend using - UPPER_CASE names for members, and will be using that style in our examples. + Because Enums are used to represent constants, and to help avoid issues + with name clashes between mixin-class methods/attributes and enum names, + we strongly recommend using UPPER_CASE names for members, and will be using + that style in our examples. Depending on the nature of the enum a member's value may or may not be important, but either way that value can be used to get the corresponding @@ -284,6 +286,7 @@ The values are chosen by :func:`_generate_next_value_`, which can be overridden:: >>> class AutoName(Enum): + ... @staticmethod ... def _generate_next_value_(name, start, count, last_values): ... return name ... @@ -372,6 +375,11 @@ below):: >>> Color.BLUE == 2 False +.. warning:: + + It is possible to reload modules -- if a reloaded module contains + enums, they will be recreated, and the new members may not + compare identical/equal to the original members. Allowed members and attributes of enumerations ---------------------------------------------- @@ -484,6 +492,10 @@ the :meth:`~Enum.__repr__` omits the inherited class' name. For example:: Use the :func:`!dataclass` argument ``repr=False`` to use the standard :func:`repr`. +.. versionchanged:: 3.12 + Only the dataclass fields are shown in the value area, not the dataclass' + name. + Pickling -------- @@ -859,17 +871,19 @@ Some rules: 4. When another data type is mixed in, the :attr:`value` attribute is *not the same* as the enum member itself, although it is equivalent and will compare equal. -5. %-style formatting: ``%s`` and ``%r`` call the :class:`Enum` class's +5. A ``data type`` is a mixin that defines :meth:`__new__`, or a + :class:`~dataclasses.dataclass` +6. %-style formatting: ``%s`` and ``%r`` call the :class:`Enum` class's :meth:`__str__` and :meth:`__repr__` respectively; other codes (such as ``%i`` or ``%h`` for IntEnum) treat the enum member as its mixed-in type. -6. :ref:`Formatted string literals <f-strings>`, :meth:`str.format`, +7. :ref:`Formatted string literals <f-strings>`, :meth:`str.format`, and :func:`format` will use the enum's :meth:`__str__` method. .. note:: Because :class:`IntEnum`, :class:`IntFlag`, and :class:`StrEnum` are designed to be drop-in replacements for existing constants, their - :meth:`__str__` method has been reset to their data types + :meth:`__str__` method has been reset to their data types' :meth:`__str__` method. When to use :meth:`__new__` vs. :meth:`__init__` @@ -982,12 +996,13 @@ but remain normal attributes. """""""""""""""""""" Enum members are instances of their enum class, and are normally accessed as -``EnumClass.member``. In Python versions starting with ``3.5`` you could access -members from other members -- this practice is discouraged, is deprecated -in ``3.12``, and will be removed in ``3.14``. +``EnumClass.member``. In certain situations, such as writing custom enum +behavior, being able to access one member directly from another is useful, +and is supported; however, in order to avoid name clashes between member names +and attributes/methods from mixed-in classes, upper-case names are strongly +recommended. .. versionchanged:: 3.5 -.. versionchanged:: 3.12 Creating members that are mixed with other data types diff --git a/Doc/howto/functional.rst b/Doc/howto/functional.rst index 38a651b0f964a6..5cf12cc52bde4e 100644 --- a/Doc/howto/functional.rst +++ b/Doc/howto/functional.rst @@ -1208,8 +1208,8 @@ General ------- **Structure and Interpretation of Computer Programs**, by Harold Abelson and -Gerald Jay Sussman with Julie Sussman. Full text at -https://mitpress.mit.edu/sicp/. In this classic textbook of computer science, +Gerald Jay Sussman with Julie Sussman. The book can be found at +https://mitpress.mit.edu/sicp. In this classic textbook of computer science, chapters 2 and 3 discuss the use of sequences and streams to organize the data flow inside a program. The book uses Scheme for its examples, but many of the design approaches described in these chapters are applicable to functional-style diff --git a/Doc/howto/isolating-extensions.rst b/Doc/howto/isolating-extensions.rst index 2eddb582da7c24..0262054ae2b4a0 100644 --- a/Doc/howto/isolating-extensions.rst +++ b/Doc/howto/isolating-extensions.rst @@ -372,7 +372,7 @@ To save a some tedious error-handling boilerplate code, you can combine these two steps with :c:func:`PyType_GetModuleState`, resulting in:: my_struct *state = (my_struct*)PyType_GetModuleState(type); - if (state === NULL) { + if (state == NULL) { return NULL; } @@ -435,7 +435,7 @@ For example:: PyObject *kwnames) { my_struct *state = (my_struct*)PyType_GetModuleState(defining_class); - if (state === NULL) { + if (state == NULL) { return NULL; } ... // rest of logic @@ -479,7 +479,7 @@ to get the state:: PyObject *module = PyType_GetModuleByDef(Py_TYPE(self), &module_def); my_struct *state = (my_struct*)PyModule_GetState(module); - if (state === NULL) { + if (state == NULL) { return NULL; } diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst index 1a0afb6940dab9..6ef252d709e735 100644 --- a/Doc/howto/logging-cookbook.rst +++ b/Doc/howto/logging-cookbook.rst @@ -340,10 +340,12 @@ adding a ``filters`` section parallel to ``formatters`` and ``handlers``: .. code-block:: json - "filters": { - "warnings_and_below": { - "()" : "__main__.filter_maker", - "level": "WARNING" + { + "filters": { + "warnings_and_below": { + "()" : "__main__.filter_maker", + "level": "WARNING" + } } } @@ -351,12 +353,14 @@ and changing the section on the ``stdout`` handler to add it: .. code-block:: json - "stdout": { - "class": "logging.StreamHandler", - "level": "INFO", - "formatter": "simple", - "stream": "ext://sys.stdout", - "filters": ["warnings_and_below"] + { + "stdout": { + "class": "logging.StreamHandler", + "level": "INFO", + "formatter": "simple", + "stream": "ext://sys.stdout", + "filters": ["warnings_and_below"] + } } A filter is just a function, so we can define the ``filter_maker`` (a factory diff --git a/Doc/howto/perf_profiling.rst b/Doc/howto/perf_profiling.rst index ad2eb7b4d58aa5..6af5536166f58a 100644 --- a/Doc/howto/perf_profiling.rst +++ b/Doc/howto/perf_profiling.rst @@ -15,9 +15,9 @@ information about the performance of your application. that aid with the analysis of the data that it produces. The main problem with using the ``perf`` profiler with Python applications is that -``perf`` only allows to get information about native symbols, this is, the names of -the functions and procedures written in C. This means that the names and file names -of the Python functions in your code will not appear in the output of the ``perf``. +``perf`` only gets information about native symbols, that is, the names of +functions and procedures written in C. This means that the names and file names +of Python functions in your code will not appear in the output of ``perf``. Since Python 3.12, the interpreter can run in a special mode that allows Python functions to appear in the output of the ``perf`` profiler. When this mode is @@ -28,8 +28,8 @@ relationship between this piece of code and the associated Python function using .. note:: - Support for the ``perf`` profiler is only currently available for Linux on - selected architectures. Check the output of the configure build step or + Support for the ``perf`` profiler is currently only available for Linux on + select architectures. Check the output of the ``configure`` build step or check the output of ``python -m sysconfig | grep HAVE_PERF_TRAMPOLINE`` to see if your system is supported. @@ -52,11 +52,11 @@ For example, consider the following script: if __name__ == "__main__": baz(1000000) -We can run ``perf`` to sample CPU stack traces at 9999 Hertz:: +We can run ``perf`` to sample CPU stack traces at 9999 hertz:: $ perf record -F 9999 -g -o perf.data python my_script.py -Then we can use ``perf`` report to analyze the data: +Then we can use ``perf report`` to analyze the data: .. code-block:: shell-session @@ -97,7 +97,7 @@ Then we can use ``perf`` report to analyze the data: | | | | | |--2.97%--_PyObject_Malloc ... -As you can see here, the Python functions are not shown in the output, only ``_Py_Eval_EvalFrameDefault`` appears +As you can see, the Python functions are not shown in the output, only ``_Py_Eval_EvalFrameDefault`` (the function that evaluates the Python bytecode) shows up. Unfortunately that's not very useful because all Python functions use the same C function to evaluate bytecode so we cannot know which Python function corresponds to which bytecode-evaluating function. @@ -151,7 +151,7 @@ Instead, if we run the same experiment with ``perf`` support enabled we get: How to enable ``perf`` profiling support ---------------------------------------- -``perf`` profiling support can either be enabled from the start using +``perf`` profiling support can be enabled either from the start using the environment variable :envvar:`PYTHONPERFSUPPORT` or the :option:`-X perf <-X>` option, or dynamically using :func:`sys.activate_stack_trampoline` and @@ -192,7 +192,7 @@ Example, using the :mod:`sys` APIs in file :file:`example.py`: How to obtain the best results ------------------------------ -For the best results, Python should be compiled with +For best results, Python should be compiled with ``CFLAGS="-fno-omit-frame-pointer -mno-omit-leaf-frame-pointer"`` as this allows profilers to unwind using only the frame pointer and not on DWARF debug information. This is because as the code that is interposed to allow ``perf`` diff --git a/Doc/howto/urllib2.rst b/Doc/howto/urllib2.rst index 69af3c3a85c5d6..61ba6bd7224fcc 100644 --- a/Doc/howto/urllib2.rst +++ b/Doc/howto/urllib2.rst @@ -86,7 +86,7 @@ response:: import urllib.request - req = urllib.request.Request('http://www.voidspace.org.uk') + req = urllib.request.Request('http://python.org/') with urllib.request.urlopen(req) as response: the_page = response.read() @@ -458,7 +458,7 @@ To illustrate creating and installing a handler we will use the ``HTTPBasicAuthHandler``. For a more detailed discussion of this subject -- including an explanation of how Basic Authentication works - see the `Basic Authentication Tutorial -<http://www.voidspace.org.uk/python/articles/authentication.shtml>`_. +<https://web.archive.org/web/20201215133350/http://www.voidspace.org.uk/python/articles/authentication.shtml>`__. When authentication is required, the server sends a header (as well as the 401 error code) requesting authentication. This specifies the authentication scheme diff --git a/Doc/includes/custom.c b/Doc/includes/custom.c index 26ca754964733d..9cfba50ace25db 100644 --- a/Doc/includes/custom.c +++ b/Doc/includes/custom.c @@ -7,7 +7,7 @@ typedef struct { } CustomObject; static PyTypeObject CustomType = { - PyVarObject_HEAD_INIT(NULL, 0) + .ob_base = PyVarObject_HEAD_INIT(NULL, 0) .tp_name = "custom.Custom", .tp_doc = PyDoc_STR("Custom objects"), .tp_basicsize = sizeof(CustomObject), @@ -17,7 +17,7 @@ static PyTypeObject CustomType = { }; static PyModuleDef custommodule = { - PyModuleDef_HEAD_INIT, + .m_base = PyModuleDef_HEAD_INIT, .m_name = "custom", .m_doc = "Example module that creates an extension type.", .m_size = -1, diff --git a/Doc/includes/custom2.c b/Doc/includes/custom2.c index a3b2d6ab78d3c4..a0222b1795209b 100644 --- a/Doc/includes/custom2.c +++ b/Doc/includes/custom2.c @@ -90,7 +90,7 @@ static PyMethodDef Custom_methods[] = { }; static PyTypeObject CustomType = { - PyVarObject_HEAD_INIT(NULL, 0) + .ob_base = PyVarObject_HEAD_INIT(NULL, 0) .tp_name = "custom2.Custom", .tp_doc = PyDoc_STR("Custom objects"), .tp_basicsize = sizeof(CustomObject), @@ -104,7 +104,7 @@ static PyTypeObject CustomType = { }; static PyModuleDef custommodule = { - PyModuleDef_HEAD_INIT, + .m_base =PyModuleDef_HEAD_INIT, .m_name = "custom2", .m_doc = "Example module that creates an extension type.", .m_size = -1, diff --git a/Doc/includes/custom3.c b/Doc/includes/custom3.c index 1a68bc4be8c399..4aeebe0a7507d1 100644 --- a/Doc/includes/custom3.c +++ b/Doc/includes/custom3.c @@ -130,7 +130,7 @@ static PyMethodDef Custom_methods[] = { }; static PyTypeObject CustomType = { - PyVarObject_HEAD_INIT(NULL, 0) + .ob_base = PyVarObject_HEAD_INIT(NULL, 0) .tp_name = "custom3.Custom", .tp_doc = PyDoc_STR("Custom objects"), .tp_basicsize = sizeof(CustomObject), @@ -145,7 +145,7 @@ static PyTypeObject CustomType = { }; static PyModuleDef custommodule = { - PyModuleDef_HEAD_INIT, + .m_base = PyModuleDef_HEAD_INIT, .m_name = "custom3", .m_doc = "Example module that creates an extension type.", .m_size = -1, diff --git a/Doc/includes/custom4.c b/Doc/includes/custom4.c index b932d159d26e93..3998918f68301e 100644 --- a/Doc/includes/custom4.c +++ b/Doc/includes/custom4.c @@ -146,7 +146,7 @@ static PyMethodDef Custom_methods[] = { }; static PyTypeObject CustomType = { - PyVarObject_HEAD_INIT(NULL, 0) + .ob_base = PyVarObject_HEAD_INIT(NULL, 0) .tp_name = "custom4.Custom", .tp_doc = PyDoc_STR("Custom objects"), .tp_basicsize = sizeof(CustomObject), @@ -163,7 +163,7 @@ static PyTypeObject CustomType = { }; static PyModuleDef custommodule = { - PyModuleDef_HEAD_INIT, + .m_base = PyModuleDef_HEAD_INIT, .m_name = "custom4", .m_doc = "Example module that creates an extension type.", .m_size = -1, diff --git a/Doc/library/__main__.rst b/Doc/library/__main__.rst index 761c88710f9891..d29cbdff7830c8 100644 --- a/Doc/library/__main__.rst +++ b/Doc/library/__main__.rst @@ -124,7 +124,7 @@ This is where using the ``if __name__ == '__main__'`` code block comes in handy. Code within this block won't run unless the module is executed in the top-level environment. -Putting as few statements as possible in the block below ``if __name___ == +Putting as few statements as possible in the block below ``if __name__ == '__main__'`` can improve code clarity and correctness. Most often, a function named ``main`` encapsulates the program's primary behavior:: diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst index ee68ac58d3de75..33e367f3ccda89 100644 --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -67,7 +67,7 @@ default_ Default value used when an argument is not provided dest_ Specify the attribute name used in the result namespace help_ Help message for an argument metavar_ Alternate display name for the argument as shown in help -nargs_ Number of times the argument can be used :class:`int`, ``'?'``, ``'*'``, ``'+'``, or ``argparse.REMAINDER`` +nargs_ Number of times the argument can be used :class:`int`, ``'?'``, ``'*'``, or ``'+'`` required_ Indicate whether an argument is required or optional ``True`` or ``False`` type_ Automatically convert an argument to the given type :class:`int`, :class:`float`, ``argparse.FileType('w')``, or callable function ====================== =========================================================== ========================================================================================================================== @@ -585,7 +585,7 @@ arguments will never be treated as file references. .. versionchanged:: 3.12 :class:`ArgumentParser` changed encoding and errors to read arguments files - from default (e.g. :func:`locale.getpreferredencoding(False)` and + from default (e.g. :func:`locale.getpreferredencoding(False) <locale.getpreferredencoding>` and ``"strict"``) to :term:`filesystem encoding and error handler`. Arguments file should be encoded in UTF-8 instead of ANSI Codepage on Windows. @@ -1191,7 +1191,7 @@ done downstream after the arguments are parsed. For example, JSON or YAML conversions have complex error cases that require better reporting than can be given by the ``type`` keyword. A :exc:`~json.JSONDecodeError` would not be well formatted and a -:exc:`FileNotFound` exception would not be handled at all. +:exc:`FileNotFoundError` exception would not be handled at all. Even :class:`~argparse.FileType` has its limitations for use with the ``type`` keyword. If one argument uses *FileType* and then a subsequent argument fails, @@ -1445,7 +1445,7 @@ Action classes Action classes implement the Action API, a callable which returns a callable which processes arguments from the command-line. Any object which follows this API may be passed as the ``action`` parameter to -:meth:`add_argument`. +:meth:`~ArgumentParser.add_argument`. .. class:: Action(option_strings, dest, nargs=None, const=None, default=None, \ type=None, choices=None, required=False, help=None, \ @@ -1723,7 +1723,7 @@ Sub-commands :class:`ArgumentParser` supports the creation of such sub-commands with the :meth:`add_subparsers` method. The :meth:`add_subparsers` method is normally called with no arguments and returns a special action object. This object - has a single method, :meth:`~ArgumentParser.add_parser`, which takes a + has a single method, :meth:`~_SubParsersAction.add_parser`, which takes a command name and any :class:`ArgumentParser` constructor arguments, and returns an :class:`ArgumentParser` object that can be modified as usual. @@ -1789,7 +1789,7 @@ Sub-commands for that particular parser will be printed. The help message will not include parent parser or sibling parser messages. (A help message for each subparser command, however, can be given by supplying the ``help=`` argument - to :meth:`add_parser` as above.) + to :meth:`~_SubParsersAction.add_parser` as above.) :: @@ -2157,7 +2157,7 @@ the populated namespace and the list of remaining argument strings. .. warning:: :ref:`Prefix matching <prefix-matching>` rules apply to - :meth:`parse_known_args`. The parser may consume an option even if it's just + :meth:`~ArgumentParser.parse_known_args`. The parser may consume an option even if it's just a prefix of one of its known options, instead of leaving it in the remaining arguments list. @@ -2218,7 +2218,7 @@ support this parsing style. These parsers do not support all the argparse features, and will raise exceptions if unsupported features are used. In particular, subparsers, -``argparse.REMAINDER``, and mutually exclusive groups that include both +and mutually exclusive groups that include both optionals and positionals are not supported. The following example shows the difference between @@ -2295,3 +2295,17 @@ A partial upgrade path from :mod:`optparse` to :mod:`argparse`: * Replace the OptionParser constructor ``version`` argument with a call to ``parser.add_argument('--version', action='version', version='<the version>')``. + +Exceptions +---------- + +.. exception:: ArgumentError + + An error from creating or using an argument (optional or positional). + + The string value of this exception is the message, augmented with + information about the argument that caused it. + +.. exception:: ArgumentTypeError + + Raised when something goes wrong converting a command line string to a type. diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 5138afc2bbe47b..e982cc166a3f2d 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -1438,9 +1438,7 @@ async/await code consider using the high-level * *stdin* can be any of these: - * a file-like object representing a pipe to be connected to the - subprocess's standard input stream using - :meth:`~loop.connect_write_pipe` + * a file-like object * the :const:`subprocess.PIPE` constant (default) which will create a new pipe and connect it, * the value ``None`` which will make the subprocess inherit the file @@ -1450,9 +1448,7 @@ async/await code consider using the high-level * *stdout* can be any of these: - * a file-like object representing a pipe to be connected to the - subprocess's standard output stream using - :meth:`~loop.connect_write_pipe` + * a file-like object * the :const:`subprocess.PIPE` constant (default) which will create a new pipe and connect it, * the value ``None`` which will make the subprocess inherit the file @@ -1462,9 +1458,7 @@ async/await code consider using the high-level * *stderr* can be any of these: - * a file-like object representing a pipe to be connected to the - subprocess's standard error stream using - :meth:`~loop.connect_write_pipe` + * a file-like object * the :const:`subprocess.PIPE` constant (default) which will create a new pipe and connect it, * the value ``None`` which will make the subprocess inherit the file @@ -1483,6 +1477,11 @@ async/await code consider using the high-level as text. :func:`bytes.decode` can be used to convert the bytes returned from the stream to text. + If a file-like object passed as *stdin*, *stdout* or *stderr* represents a + pipe, then the other side of this pipe should be registered with + :meth:`~loop.connect_write_pipe` or :meth:`~loop.connect_read_pipe` for use + with the event loop. + See the constructor of the :class:`subprocess.Popen` class for documentation on other arguments. @@ -1571,7 +1570,7 @@ Server objects are created by :meth:`loop.create_server`, :meth:`loop.create_unix_server`, :func:`start_server`, and :func:`start_unix_server` functions. -Do not instantiate the class directly. +Do not instantiate the :class:`Server` class directly. .. class:: Server @@ -1662,7 +1661,8 @@ Do not instantiate the class directly. .. attribute:: sockets - List of :class:`socket.socket` objects the server is listening on. + List of socket-like objects, ``asyncio.trsock.TransportSocket``, which + the server is listening on. .. versionchanged:: 3.7 Prior to Python 3.7 ``Server.sockets`` used to return an diff --git a/Doc/library/asyncio-subprocess.rst b/Doc/library/asyncio-subprocess.rst index 4274638c5e8625..b7c83aa04c09f1 100644 --- a/Doc/library/asyncio-subprocess.rst +++ b/Doc/library/asyncio-subprocess.rst @@ -207,8 +207,9 @@ their completion. Interact with process: 1. send data to *stdin* (if *input* is not ``None``); - 2. read data from *stdout* and *stderr*, until EOF is reached; - 3. wait for process to terminate. + 2. closes *stdin*; + 3. read data from *stdout* and *stderr*, until EOF is reached; + 4. wait for process to terminate. The optional *input* argument is the data (:class:`bytes` object) that will be sent to the child process. @@ -229,6 +230,10 @@ their completion. Note, that the data read is buffered in memory, so do not use this method if the data size is large or unlimited. + .. versionchanged:: 3.12 + + *stdin* gets closed when `input=None` too. + .. method:: send_signal(signal) Sends the signal *signal* to the child process. diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index a0900cd25a7731..f8727b98066990 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -256,8 +256,9 @@ Creating Tasks .. note:: - :meth:`asyncio.TaskGroup.create_task` is a newer alternative - that allows for convenient waiting for a group of related tasks. + :meth:`asyncio.TaskGroup.create_task` is a new alternative + leveraging structural concurrency; it allows for waiting + for a group of related tasks with strong safety guarantees. .. important:: @@ -300,13 +301,17 @@ in the task at the next opportunity. It is recommended that coroutines use ``try/finally`` blocks to robustly perform clean-up logic. In case :exc:`asyncio.CancelledError` is explicitly caught, it should generally be propagated when -clean-up is complete. Most code can safely ignore :exc:`asyncio.CancelledError`. +clean-up is complete. :exc:`asyncio.CancelledError` directly subclasses +:exc:`BaseException` so most code will not need to be aware of it. The asyncio components that enable structured concurrency, like :class:`asyncio.TaskGroup` and :func:`asyncio.timeout`, are implemented using cancellation internally and might misbehave if a coroutine swallows :exc:`asyncio.CancelledError`. Similarly, user code -should not call :meth:`uncancel <asyncio.Task.uncancel>`. +should not generally call :meth:`uncancel <asyncio.Task.uncancel>`. +However, in cases when suppressing :exc:`asyncio.CancelledError` is +truly desired, it is necessary to also call ``uncancel()`` to completely +remove the cancellation state. .. _taskgroups: @@ -336,7 +341,7 @@ Example:: async with asyncio.TaskGroup() as tg: task1 = tg.create_task(some_coro(...)) task2 = tg.create_task(another_coro(...)) - print("Both tasks have completed now.") + print(f"Both tasks have completed now: {task1.result()}, {task2.result()}") The ``async with`` statement will wait for all tasks in the group to finish. While waiting, new tasks may still be added to the group @@ -455,8 +460,12 @@ Running Tasks Concurrently Tasks/Futures to be cancelled. .. note:: - A more modern way to create and run tasks concurrently and - wait for their completion is :class:`asyncio.TaskGroup`. + A new alternative to create and run tasks concurrently and + wait for their completion is :class:`asyncio.TaskGroup`. *TaskGroup* + provides stronger safety guarantees than *gather* for scheduling a nesting of subtasks: + if a task (or a subtask, a task scheduled by a task) + raises an exception, *TaskGroup* will, while *gather* will not, + cancel the remaining scheduled tasks). .. _asyncio_example_gather: @@ -518,6 +527,42 @@ Running Tasks Concurrently and there is no running event loop. +Eager Task Factory +================== + +.. function:: eager_task_factory(loop, coro, *, name=None, context=None) + + A task factory for eager task execution. + + When using this factory (via :meth:`loop.set_task_factory(asyncio.eager_task_factory) <loop.set_task_factory>`), + coroutines begin execution synchronously during :class:`Task` construction. + Tasks are only scheduled on the event loop if they block. + This can be a performance improvement as the overhead of loop scheduling + is avoided for coroutines that complete synchronously. + + A common example where this is beneficial is coroutines which employ + caching or memoization to avoid actual I/O when possible. + + .. note:: + + Immediate execution of the coroutine is a semantic change. + If the coroutine returns or raises, the task is never scheduled + to the event loop. If the coroutine execution blocks, the task is + scheduled to the event loop. This change may introduce behavior + changes to existing applications. For example, + the application's task execution order is likely to change. + + .. versionadded:: 3.12 + +.. function:: create_eager_task_factory(custom_task_constructor) + + Create an eager task factory, similar to :func:`eager_task_factory`, + using the provided *custom_task_constructor* when creating a new task instead + of the default :class:`Task`. + + .. versionadded:: 3.12 + + Shielding From Cancellation =========================== @@ -620,32 +665,26 @@ Timeouts The context manager produced by :func:`asyncio.timeout` can be rescheduled to a different deadline and inspected. - .. class:: Timeout() + .. class:: Timeout(when) An :ref:`asynchronous context manager <async-context-managers>` - that limits time spent inside of it. + for cancelling overdue coroutines. - .. versionadded:: 3.11 + ``when`` should be an absolute time at which the context should time out, + as measured by the event loop's clock: + + - If ``when`` is ``None``, the timeout will never trigger. + - If ``when < loop.time()``, the timeout will trigger on the next + iteration of the event loop. .. method:: when() -> float | None Return the current deadline, or ``None`` if the current deadline is not set. - The deadline is a float, consistent with the time returned by - :meth:`loop.time`. - .. method:: reschedule(when: float | None) - Change the time the timeout will trigger. - - If *when* is ``None``, any current deadline will be removed, and the - context manager will wait indefinitely. - - If *when* is a float, it is set as the new deadline. - - if *when* is in the past, the timeout will trigger on the next - iteration of the event loop. + Reschedule the timeout. .. method:: expired() -> bool @@ -831,6 +870,9 @@ Waiting Primitives Deprecation warning is emitted if not all awaitable objects in the *aws* iterable are Future-like objects and there is no running event loop. + .. versionchanged:: 3.12 + Added support for generators yielding tasks. + Running in Threads ================== @@ -962,6 +1004,13 @@ Introspection .. versionadded:: 3.7 +.. function:: iscoroutine(obj) + + Return ``True`` if *obj* is a coroutine object. + + .. versionadded:: 3.4 + + Task Object =========== @@ -1148,7 +1197,9 @@ Task Object Therefore, unlike :meth:`Future.cancel`, :meth:`Task.cancel` does not guarantee that the Task will be cancelled, although suppressing cancellation completely is not common and is actively - discouraged. + discouraged. Should the coroutine nevertheless decide to suppress + the cancellation, it needs to call :meth:`Task.uncancel` in addition + to catching the exception. .. versionchanged:: 3.9 Added the *msg* parameter. @@ -1238,6 +1289,10 @@ Task Object with :meth:`uncancel`. :class:`TaskGroup` context managers use :func:`uncancel` in a similar fashion. + If end-user code is, for some reason, suppresing cancellation by + catching :exc:`CancelledError`, it needs to call this method to remove + the cancellation state. + .. method:: cancelling() Return the number of pending cancellation requests to this Task, i.e., diff --git a/Doc/library/bisect.rst b/Doc/library/bisect.rst index b85564f17866e0..e3c8c801904b61 100644 --- a/Doc/library/bisect.rst +++ b/Doc/library/bisect.rst @@ -210,10 +210,10 @@ records in a table:: >>> Movie = namedtuple('Movie', ('name', 'released', 'director')) >>> movies = [ - ... Movie('Jaws', 1975, 'Speilberg'), + ... Movie('Jaws', 1975, 'Spielberg'), ... Movie('Titanic', 1997, 'Cameron'), ... Movie('The Birds', 1963, 'Hitchcock'), - ... Movie('Aliens', 1986, 'Scott') + ... Movie('Aliens', 1986, 'Cameron') ... ] >>> # Find the first movie released after 1960 @@ -228,8 +228,8 @@ records in a table:: >>> pprint(movies) [Movie(name='The Birds', released=1963, director='Hitchcock'), Movie(name='Love Story', released=1970, director='Hiller'), - Movie(name='Jaws', released=1975, director='Speilberg'), - Movie(name='Aliens', released=1986, director='Scott'), + Movie(name='Jaws', released=1975, director='Spielberg'), + Movie(name='Aliens', released=1986, director='Cameron'), Movie(name='Titanic', released=1997, director='Cameron')] If the key function is expensive, it is possible to avoid repeated function diff --git a/Doc/library/calendar.rst b/Doc/library/calendar.rst index 66f59f0e2ced27..07d04a1c7b582a 100644 --- a/Doc/library/calendar.rst +++ b/Doc/library/calendar.rst @@ -28,6 +28,58 @@ interpreted as prescribed by the ISO 8601 standard. Year 0 is 1 BC, year -1 is 2 BC, and so on. +.. class:: Day + + Enumeration defining the days of the week as integer constants, from 0 to 6. + + .. attribute:: MONDAY + + .. attribute:: TUESDAY + + .. attribute:: WEDNESDAY + + .. attribute:: THURSDAY + + .. attribute:: FRIDAY + + .. attribute:: SATURDAY + + .. attribute:: SUNDAY + + .. versionadded:: 3.12 + + +.. class:: Month + + Enumeration defining months of the year as integer constants, from 1 to 12. + + .. attribute:: JANUARY + + .. attribute:: FEBRUARY + + .. attribute:: MARCH + + .. attribute:: APRIL + + .. attribute:: MAY + + .. attribute:: JUNE + + .. attribute:: JULY + + .. attribute:: AUGUST + + .. attribute:: SEPTEMBER + + .. attribute:: OCTOBER + + .. attribute:: NOVEMBER + + .. attribute:: DECEMBER + + .. versionadded:: 3.12 + + .. class:: Calendar(firstweekday=0) Creates a :class:`Calendar` object. *firstweekday* is an integer specifying the diff --git a/Doc/library/codeop.rst b/Doc/library/codeop.rst index c66b9d3ec0a26d..90df499f8207b7 100644 --- a/Doc/library/codeop.rst +++ b/Doc/library/codeop.rst @@ -19,10 +19,10 @@ module instead. There are two parts to this job: -#. Being able to tell if a line of input completes a Python statement: in +#. Being able to tell if a line of input completes a Python statement: in short, telling whether to print '``>>>``' or '``...``' next. -#. Remembering which future statements the user has entered, so subsequent +#. Remembering which future statements the user has entered, so subsequent input can be compiled with these in effect. The :mod:`codeop` module provides a way of doing each of these things, and a way @@ -33,9 +33,9 @@ To do just the former: .. function:: compile_command(source, filename="<input>", symbol="single") Tries to compile *source*, which should be a string of Python code and return a - code object if *source* is valid Python code. In that case, the filename + code object if *source* is valid Python code. In that case, the filename attribute of the code object will be *filename*, which defaults to - ``'<input>'``. Returns ``None`` if *source* is *not* valid Python code, but is a + ``'<input>'``. Returns ``None`` if *source* is *not* valid Python code, but is a prefix of valid Python code. If there is a problem with *source*, an exception will be raised. @@ -43,9 +43,9 @@ To do just the former: :exc:`OverflowError` or :exc:`ValueError` if there is an invalid literal. The *symbol* argument determines whether *source* is compiled as a statement - (``'single'``, the default), as a sequence of statements (``'exec'``) or + (``'single'``, the default), as a sequence of :term:`statement` (``'exec'``) or as an :term:`expression` (``'eval'``). Any other value will - cause :exc:`ValueError` to be raised. + cause :exc:`ValueError` to be raised. .. note:: @@ -69,5 +69,5 @@ To do just the former: Instances of this class have :meth:`__call__` methods identical in signature to :func:`compile_command`; the difference is that if the instance compiles program - text containing a ``__future__`` statement, the instance 'remembers' and + text containing a :mod:`__future__` statement, the instance 'remembers' and compiles all subsequent program texts with the statement in force. diff --git a/Doc/library/contextlib.rst b/Doc/library/contextlib.rst index 1b55868c3aa62f..7cd081d1f54f43 100644 --- a/Doc/library/contextlib.rst +++ b/Doc/library/contextlib.rst @@ -304,8 +304,15 @@ Functions and classes provided: This context manager is :ref:`reentrant <reentrant-cms>`. + If the code within the :keyword:`!with` block raises an + :exc:`ExceptionGroup`, suppressed exceptions are removed from the + group. If any exceptions in the group are not suppressed, a group containing them is re-raised. + .. versionadded:: 3.4 + .. versionchanged:: 3.12 + ``suppress`` now supports suppressing exceptions raised as + part of an :exc:`ExceptionGroup`. .. function:: redirect_stdout(new_target) diff --git a/Doc/library/copyreg.rst b/Doc/library/copyreg.rst index 866b180f4bc3b8..2107215c0c1967 100644 --- a/Doc/library/copyreg.rst +++ b/Doc/library/copyreg.rst @@ -28,8 +28,8 @@ Such constructors may be factory functions or class instances. .. function:: pickle(type, function, constructor_ob=None) Declares that *function* should be used as a "reduction" function for objects - of type *type*. *function* should return either a string or a tuple - containing two or three elements. See the :attr:`~pickle.Pickler.dispatch_table` + of type *type*. *function* must return either a string or a tuple + containing two or five elements. See the :attr:`~pickle.Pickler.dispatch_table` for more details on the interface of *function*. The *constructor_ob* parameter is a legacy feature and is now ignored, but if diff --git a/Doc/library/csv.rst b/Doc/library/csv.rst index f1776554d8b9f2..64baa69be4af31 100644 --- a/Doc/library/csv.rst +++ b/Doc/library/csv.rst @@ -327,7 +327,7 @@ The :mod:`csv` module defines the following constants: Instructs :class:`writer` objects to quote all non-numeric fields. - Instructs the reader to convert all non-quoted fields to type *float*. + Instructs :class:`reader` objects to convert all non-quoted fields to type *float*. .. data:: QUOTE_NONE @@ -337,7 +337,25 @@ The :mod:`csv` module defines the following constants: character. If *escapechar* is not set, the writer will raise :exc:`Error` if any characters that require escaping are encountered. - Instructs :class:`reader` to perform no special processing of quote characters. + Instructs :class:`reader` objects to perform no special processing of quote characters. + +.. data:: QUOTE_NOTNULL + + Instructs :class:`writer` objects to quote all fields which are not + ``None``. This is similar to :data:`QUOTE_ALL`, except that if a + field value is ``None`` an empty (unquoted) string is written. + + Instructs :class:`reader` objects to interpret an empty (unquoted) field as None and + to otherwise behave as :data:`QUOTE_ALL`. + +.. data:: QUOTE_STRINGS + + Instructs :class:`writer` objects to always place quotes around fields + which are strings. This is similar to :data:`QUOTE_NONNUMERIC`, except that if a + field value is ``None`` an empty (unquoted) string is written. + + Instructs :class:`reader` objects to interpret an empty (unquoted) string as ``None`` and + to otherwise behave as :data:`QUOTE_NONNUMERIC`. The :mod:`csv` module defines the following exception: diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index 8fd681286b812d..81509c0920bb6e 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -375,8 +375,8 @@ that they can be converted to the required C data type:: .. _ctypes-calling-variadic-functions: -Calling varadic functions -^^^^^^^^^^^^^^^^^^^^^^^^^ +Calling variadic functions +^^^^^^^^^^^^^^^^^^^^^^^^^^ On a lot of platforms calling variadic functions through ctypes is exactly the same as calling functions with a fixed number of parameters. On some platforms, and in @@ -390,7 +390,7 @@ regular, non-variadic, function arguments: libc.printf.argtypes = [ctypes.c_char_p] -Because specifying the attribute does inhibit portability it is advised to always +Because specifying the attribute does not inhibit portability it is advised to always specify ``argtypes`` for all variadic functions. @@ -508,7 +508,7 @@ a string pointer and a char, and returns a pointer to a string:: If you want to avoid the ``ord("x")`` calls above, you can set the :attr:`argtypes` attribute, and the second argument will be converted from a -single character Python bytes object into a C char:: +single character Python bytes object into a C char: .. doctest:: diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst index 5f4dc25bfd7877..a5b20149921042 100644 --- a/Doc/library/dataclasses.rst +++ b/Doc/library/dataclasses.rst @@ -12,8 +12,8 @@ -------------- This module provides a decorator and functions for automatically -adding generated :term:`special method`\s such as :meth:`__init__` and -:meth:`__repr__` to user-defined classes. It was originally described +adding generated :term:`special method`\s such as :meth:`~object.__init__` and +:meth:`~object.__repr__` to user-defined classes. It was originally described in :pep:`557`. The member variables to use in these generated methods are defined @@ -31,7 +31,7 @@ using :pep:`526` type annotations. For example, this code:: def total_cost(self) -> float: return self.unit_price * self.quantity_on_hand -will add, among other things, a :meth:`__init__` that looks like:: +will add, among other things, a :meth:`~object.__init__` that looks like:: def __init__(self, name: str, unit_price: float, quantity_on_hand: int = 0): self.name = name @@ -86,86 +86,86 @@ Module contents The parameters to :func:`dataclass` are: - - ``init``: If true (the default), a :meth:`__init__` method will be + - ``init``: If true (the default), a :meth:`~object.__init__` method will be generated. - If the class already defines :meth:`__init__`, this parameter is + If the class already defines :meth:`~object.__init__`, this parameter is ignored. - - ``repr``: If true (the default), a :meth:`__repr__` method will be + - ``repr``: If true (the default), a :meth:`~object.__repr__` method will be generated. The generated repr string will have the class name and the name and repr of each field, in the order they are defined in the class. Fields that are marked as being excluded from the repr are not included. For example: ``InventoryItem(name='widget', unit_price=3.0, quantity_on_hand=10)``. - If the class already defines :meth:`__repr__`, this parameter is + If the class already defines :meth:`~object.__repr__`, this parameter is ignored. - - ``eq``: If true (the default), an :meth:`__eq__` method will be + - ``eq``: If true (the default), an :meth:`~object.__eq__` method will be generated. This method compares the class as if it were a tuple of its fields, in order. Both instances in the comparison must be of the identical type. - If the class already defines :meth:`__eq__`, this parameter is + If the class already defines :meth:`~object.__eq__`, this parameter is ignored. - - ``order``: If true (the default is ``False``), :meth:`__lt__`, - :meth:`__le__`, :meth:`__gt__`, and :meth:`__ge__` methods will be + - ``order``: If true (the default is ``False``), :meth:`~object.__lt__`, + :meth:`~object.__le__`, :meth:`~object.__gt__`, and :meth:`~object.__ge__` methods will be generated. These compare the class as if it were a tuple of its fields, in order. Both instances in the comparison must be of the identical type. If ``order`` is true and ``eq`` is false, a :exc:`ValueError` is raised. - If the class already defines any of :meth:`__lt__`, - :meth:`__le__`, :meth:`__gt__`, or :meth:`__ge__`, then + If the class already defines any of :meth:`~object.__lt__`, + :meth:`~object.__le__`, :meth:`~object.__gt__`, or :meth:`~object.__ge__`, then :exc:`TypeError` is raised. - - ``unsafe_hash``: If ``False`` (the default), a :meth:`__hash__` method + - ``unsafe_hash``: If ``False`` (the default), a :meth:`~object.__hash__` method is generated according to how ``eq`` and ``frozen`` are set. - :meth:`__hash__` is used by built-in :meth:`hash()`, and when objects are + :meth:`~object.__hash__` is used by built-in :meth:`hash()`, and when objects are added to hashed collections such as dictionaries and sets. Having a - :meth:`__hash__` implies that instances of the class are immutable. + :meth:`~object.__hash__` implies that instances of the class are immutable. Mutability is a complicated property that depends on the programmer's - intent, the existence and behavior of :meth:`__eq__`, and the values of + intent, the existence and behavior of :meth:`~object.__eq__`, and the values of the ``eq`` and ``frozen`` flags in the :func:`dataclass` decorator. - By default, :func:`dataclass` will not implicitly add a :meth:`__hash__` + By default, :func:`dataclass` will not implicitly add a :meth:`~object.__hash__` method unless it is safe to do so. Neither will it add or change an - existing explicitly defined :meth:`__hash__` method. Setting the class + existing explicitly defined :meth:`~object.__hash__` method. Setting the class attribute ``__hash__ = None`` has a specific meaning to Python, as - described in the :meth:`__hash__` documentation. + described in the :meth:`~object.__hash__` documentation. - If :meth:`__hash__` is not explicitly defined, or if it is set to ``None``, - then :func:`dataclass` *may* add an implicit :meth:`__hash__` method. + If :meth:`~object.__hash__` is not explicitly defined, or if it is set to ``None``, + then :func:`dataclass` *may* add an implicit :meth:`~object.__hash__` method. Although not recommended, you can force :func:`dataclass` to create a - :meth:`__hash__` method with ``unsafe_hash=True``. This might be the case + :meth:`~object.__hash__` method with ``unsafe_hash=True``. This might be the case if your class is logically immutable but can nonetheless be mutated. This is a specialized use case and should be considered carefully. - Here are the rules governing implicit creation of a :meth:`__hash__` - method. Note that you cannot both have an explicit :meth:`__hash__` + Here are the rules governing implicit creation of a :meth:`~object.__hash__` + method. Note that you cannot both have an explicit :meth:`~object.__hash__` method in your dataclass and set ``unsafe_hash=True``; this will result in a :exc:`TypeError`. If ``eq`` and ``frozen`` are both true, by default :func:`dataclass` will - generate a :meth:`__hash__` method for you. If ``eq`` is true and - ``frozen`` is false, :meth:`__hash__` will be set to ``None``, marking it + generate a :meth:`~object.__hash__` method for you. If ``eq`` is true and + ``frozen`` is false, :meth:`~object.__hash__` will be set to ``None``, marking it unhashable (which it is, since it is mutable). If ``eq`` is false, - :meth:`__hash__` will be left untouched meaning the :meth:`__hash__` + :meth:`~object.__hash__` will be left untouched meaning the :meth:`~object.__hash__` method of the superclass will be used (if the superclass is :class:`object`, this means it will fall back to id-based hashing). - ``frozen``: If true (the default is ``False``), assigning to fields will generate an exception. This emulates read-only frozen instances. If - :meth:`__setattr__` or :meth:`__delattr__` is defined in the class, then + :meth:`~object.__setattr__` or :meth:`~object.__delattr__` is defined in the class, then :exc:`TypeError` is raised. See the discussion below. - ``match_args``: If true (the default is ``True``), the ``__match_args__`` tuple will be created from the list of - parameters to the generated :meth:`__init__` method (even if - :meth:`__init__` is not generated, see above). If false, or if + parameters to the generated :meth:`~object.__init__` method (even if + :meth:`~object.__init__` is not generated, see above). If false, or if ``__match_args__`` is already defined in the class, then ``__match_args__`` will not be generated. @@ -173,18 +173,18 @@ Module contents - ``kw_only``: If true (the default value is ``False``), then all fields will be marked as keyword-only. If a field is marked as - keyword-only, then the only effect is that the :meth:`__init__` + keyword-only, then the only effect is that the :meth:`~object.__init__` parameter generated from a keyword-only field must be specified - with a keyword when :meth:`__init__` is called. There is no + with a keyword when :meth:`~object.__init__` is called. There is no effect on any other aspect of dataclasses. See the :term:`parameter` glossary entry for details. Also see the :const:`KW_ONLY` section. .. versionadded:: 3.10 - - ``slots``: If true (the default is ``False``), :attr:`__slots__` attribute + - ``slots``: If true (the default is ``False``), :attr:`~object.__slots__` attribute will be generated and new class will be returned instead of the original one. - If :attr:`__slots__` is already defined in the class, then :exc:`TypeError` + If :attr:`~object.__slots__` is already defined in the class, then :exc:`TypeError` is raised. .. versionadded:: 3.10 @@ -215,7 +215,7 @@ Module contents b: int = 0 # assign a default value for 'b' In this example, both ``a`` and ``b`` will be included in the added - :meth:`__init__` method, which will be defined as:: + :meth:`~object.__init__` method, which will be defined as:: def __init__(self, a: int, b: int = 0): @@ -256,13 +256,13 @@ Module contents error to specify both ``default`` and ``default_factory``. - ``init``: If true (the default), this field is included as a - parameter to the generated :meth:`__init__` method. + parameter to the generated :meth:`~object.__init__` method. - ``repr``: If true (the default), this field is included in the - string returned by the generated :meth:`__repr__` method. + string returned by the generated :meth:`~object.__repr__` method. - ``hash``: This can be a bool or ``None``. If true, this field is - included in the generated :meth:`__hash__` method. If ``None`` (the + included in the generated :meth:`~object.__hash__` method. If ``None`` (the default), use the value of ``compare``: this would normally be the expected behavior. A field should be considered in the hash if it's used for comparisons. Setting this value to anything @@ -275,8 +275,8 @@ Module contents is excluded from the hash, it will still be used for comparisons. - ``compare``: If true (the default), this field is included in the - generated equality and comparison methods (:meth:`__eq__`, - :meth:`__gt__`, et al.). + generated equality and comparison methods (:meth:`~object.__eq__`, + :meth:`~object.__gt__`, et al.). - ``metadata``: This can be a mapping or None. None is treated as an empty dict. This value is wrapped in @@ -287,7 +287,7 @@ Module contents namespace in the metadata. - ``kw_only``: If true, this field will be marked as keyword-only. - This is used when the generated :meth:`__init__` method's + This is used when the generated :meth:`~object.__init__` method's parameters are computed. .. versionadded:: 3.10 @@ -435,13 +435,13 @@ Module contents Class, raises :exc:`TypeError`. If values in ``changes`` do not specify fields, raises :exc:`TypeError`. - The newly returned object is created by calling the :meth:`__init__` + The newly returned object is created by calling the :meth:`~object.__init__` method of the dataclass. This ensures that :meth:`__post_init__`, if present, is also called. Init-only variables without default values, if any exist, must be specified on the call to :func:`replace` so that they can be passed to - :meth:`__init__` and :meth:`__post_init__`. + :meth:`~object.__init__` and :meth:`__post_init__`. It is an error for ``changes`` to contain any fields that are defined as having ``init=False``. A :exc:`ValueError` will be raised @@ -480,7 +480,7 @@ Module contents :const:`KW_ONLY` is otherwise completely ignored. This includes the name of such a field. By convention, a name of ``_`` is used for a :const:`KW_ONLY` field. Keyword-only fields signify - :meth:`__init__` parameters that must be specified as keywords when + :meth:`~object.__init__` parameters that must be specified as keywords when the class is instantiated. In this example, the fields ``y`` and ``z`` will be marked as keyword-only fields:: @@ -501,35 +501,38 @@ Module contents .. exception:: FrozenInstanceError - Raised when an implicitly defined :meth:`__setattr__` or - :meth:`__delattr__` is called on a dataclass which was defined with + Raised when an implicitly defined :meth:`~object.__setattr__` or + :meth:`~object.__delattr__` is called on a dataclass which was defined with ``frozen=True``. It is a subclass of :exc:`AttributeError`. +.. _post-init-processing: + Post-init processing -------------------- -The generated :meth:`__init__` code will call a method named -:meth:`__post_init__`, if :meth:`__post_init__` is defined on the -class. It will normally be called as ``self.__post_init__()``. -However, if any ``InitVar`` fields are defined, they will also be -passed to :meth:`__post_init__` in the order they were defined in the -class. If no :meth:`__init__` method is generated, then -:meth:`__post_init__` will not automatically be called. +.. function:: __post_init__() -Among other uses, this allows for initializing field values that -depend on one or more other fields. For example:: + When defined on the class, it will be called by the generated + :meth:`~object.__init__`, normally as ``self.__post_init__()``. + However, if any ``InitVar`` fields are defined, they will also be + passed to :meth:`__post_init__` in the order they were defined in the + class. If no :meth:`~object.__init__` method is generated, then + :meth:`__post_init__` will not automatically be called. - @dataclass - class C: - a: float - b: float - c: float = field(init=False) + Among other uses, this allows for initializing field values that + depend on one or more other fields. For example:: - def __post_init__(self): - self.c = self.a + self.b + @dataclass + class C: + a: float + b: float + c: float = field(init=False) + + def __post_init__(self): + self.c = self.a + self.b -The :meth:`__init__` method generated by :func:`dataclass` does not call base -class :meth:`__init__` methods. If the base class has an :meth:`__init__` method +The :meth:`~object.__init__` method generated by :func:`dataclass` does not call base +class :meth:`~object.__init__` methods. If the base class has an :meth:`~object.__init__` method that has to be called, it is common to call this method in a :meth:`__post_init__` method:: @@ -545,7 +548,7 @@ that has to be called, it is common to call this method in a def __post_init__(self): super().__init__(self.side, self.side) -Note, however, that in general the dataclass-generated :meth:`__init__` methods +Note, however, that in general the dataclass-generated :meth:`~object.__init__` methods don't need to be called, since the derived dataclass will take care of initializing all fields of any base class that is a dataclass itself. @@ -573,7 +576,7 @@ if the type of a field is of type ``dataclasses.InitVar``. If a field is an ``InitVar``, it is considered a pseudo-field called an init-only field. As it is not a true field, it is not returned by the module-level :func:`fields` function. Init-only fields are added as -parameters to the generated :meth:`__init__` method, and are passed to +parameters to the generated :meth:`~object.__init__` method, and are passed to the optional :meth:`__post_init__` method. They are not otherwise used by dataclasses. @@ -601,12 +604,12 @@ Frozen instances It is not possible to create truly immutable Python objects. However, by passing ``frozen=True`` to the :meth:`dataclass` decorator you can emulate immutability. In that case, dataclasses will add -:meth:`__setattr__` and :meth:`__delattr__` methods to the class. These +:meth:`~object.__setattr__` and :meth:`~object.__delattr__` methods to the class. These methods will raise a :exc:`FrozenInstanceError` when invoked. There is a tiny performance penalty when using ``frozen=True``: -:meth:`__init__` cannot use simple assignment to initialize fields, and -must use :meth:`object.__setattr__`. +:meth:`~object.__init__` cannot use simple assignment to initialize fields, and +must use :meth:`~object.__setattr__`. Inheritance ----------- @@ -634,14 +637,14 @@ example:: The final list of fields is, in order, ``x``, ``y``, ``z``. The final type of ``x`` is ``int``, as specified in class ``C``. -The generated :meth:`__init__` method for ``C`` will look like:: +The generated :meth:`~object.__init__` method for ``C`` will look like:: def __init__(self, x: int = 15, y: int = 0, z: int = 10): -Re-ordering of keyword-only parameters in :meth:`__init__` ----------------------------------------------------------- +Re-ordering of keyword-only parameters in :meth:`~object.__init__` +------------------------------------------------------------------ -After the parameters needed for :meth:`__init__` are computed, any +After the parameters needed for :meth:`~object.__init__` are computed, any keyword-only parameters are moved to come after all regular (non-keyword-only) parameters. This is a requirement of how keyword-only parameters are implemented in Python: they must come @@ -662,7 +665,7 @@ fields, and ``Base.x`` and ``D.z`` are regular fields:: z: int = 10 t: int = field(kw_only=True, default=0) -The generated :meth:`__init__` method for ``D`` will look like:: +The generated :meth:`~object.__init__` method for ``D`` will look like:: def __init__(self, x: Any = 15.0, z: int = 10, *, y: int = 0, w: int = 1, t: int = 0): @@ -671,7 +674,7 @@ the list of fields: parameters derived from regular fields are followed by parameters derived from keyword-only fields. The relative ordering of keyword-only parameters is maintained in the -re-ordered :meth:`__init__` parameter list. +re-ordered :meth:`~object.__init__` parameter list. Default factory functions @@ -683,10 +686,10 @@ example, to create a new instance of a list, use:: mylist: list = field(default_factory=list) -If a field is excluded from :meth:`__init__` (using ``init=False``) +If a field is excluded from :meth:`~object.__init__` (using ``init=False``) and the field also specifies ``default_factory``, then the default factory function will always be called from the generated -:meth:`__init__` function. This happens because there is no other +:meth:`~object.__init__` function. This happens because there is no other way to give the field an initial value. Mutable default values @@ -714,7 +717,7 @@ Using dataclasses, *if* this code was valid:: @dataclass class D: - x: List = [] + x: list = [] # This code raises ValueError def add(self, element): self.x += element diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst index 50827b27ebea04..bed19ad145a20c 100644 --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -737,18 +737,16 @@ Instance methods: .. method:: date.strftime(format) Return a string representing the date, controlled by an explicit format string. - Format codes referring to hours, minutes or seconds will see 0 values. For a - complete list of formatting directives, see - :ref:`strftime-strptime-behavior`. + Format codes referring to hours, minutes or seconds will see 0 values. + See also :ref:`strftime-strptime-behavior` and :meth:`date.isoformat`. .. method:: date.__format__(format) Same as :meth:`.date.strftime`. This makes it possible to specify a format string for a :class:`.date` object in :ref:`formatted string - literals <f-strings>` and when using :meth:`str.format`. For a - complete list of formatting directives, see - :ref:`strftime-strptime-behavior`. + literals <f-strings>` and when using :meth:`str.format`. + See also :ref:`strftime-strptime-behavior` and :meth:`date.isoformat`. Examples of Usage: :class:`date` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -898,6 +896,10 @@ Other constructors, all class methods: in UTC. As such, the recommended way to create an object representing the current time in UTC is by calling ``datetime.now(timezone.utc)``. + .. deprecated:: 3.12 + + Use :meth:`datetime.now` with :attr:`UTC` instead. + .. classmethod:: datetime.fromtimestamp(timestamp, tz=None) @@ -966,6 +968,10 @@ Other constructors, all class methods: :c:func:`gmtime` function. Raise :exc:`OSError` instead of :exc:`ValueError` on :c:func:`gmtime` failure. + .. deprecated:: 3.12 + + Use :meth:`datetime.fromtimestamp` with :attr:`UTC` instead. + .. classmethod:: datetime.fromordinal(ordinal) @@ -1045,14 +1051,14 @@ Other constructors, all class methods: Return a :class:`.datetime` corresponding to *date_string*, parsed according to *format*. - This is equivalent to:: + If *format* does not contain microseconds or timezone information, this is equivalent to:: datetime(*(time.strptime(date_string, format)[0:6])) :exc:`ValueError` is raised if the date_string and format can't be parsed by :func:`time.strptime` or if it returns a value which isn't a - time tuple. For a complete list of formatting directives, see - :ref:`strftime-strptime-behavior`. + time tuple. See also :ref:`strftime-strptime-behavior` and + :meth:`datetime.fromisoformat`. @@ -1510,20 +1516,21 @@ Instance methods: (which :func:`time.ctime` invokes, but which :meth:`datetime.ctime` does not invoke) conforms to the C standard. + .. method:: datetime.strftime(format) - Return a string representing the date and time, controlled by an explicit format - string. For a complete list of formatting directives, see - :ref:`strftime-strptime-behavior`. + Return a string representing the date and time, + controlled by an explicit format string. + See also :ref:`strftime-strptime-behavior` and :meth:`datetime.isoformat`. .. method:: datetime.__format__(format) Same as :meth:`.datetime.strftime`. This makes it possible to specify a format string for a :class:`.datetime` object in :ref:`formatted string - literals <f-strings>` and when using :meth:`str.format`. For a - complete list of formatting directives, see - :ref:`strftime-strptime-behavior`. + literals <f-strings>` and when using :meth:`str.format`. + See also :ref:`strftime-strptime-behavior` and :meth:`datetime.isoformat`. + Examples of Usage: :class:`.datetime` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1868,17 +1875,15 @@ Instance methods: .. method:: time.strftime(format) Return a string representing the time, controlled by an explicit format - string. For a complete list of formatting directives, see - :ref:`strftime-strptime-behavior`. + string. See also :ref:`strftime-strptime-behavior` and :meth:`time.isoformat`. .. method:: time.__format__(format) - Same as :meth:`.time.strftime`. This makes it possible to specify a format string - for a :class:`.time` object in :ref:`formatted string - literals <f-strings>` and when using :meth:`str.format`. For a - complete list of formatting directives, see - :ref:`strftime-strptime-behavior`. + Same as :meth:`.time.strftime`. This makes it possible to specify + a format string for a :class:`.time` object in :ref:`formatted string + literals <f-strings>` and when using :meth:`str.format`. + See also :ref:`strftime-strptime-behavior` and :meth:`time.isoformat`. .. method:: time.utcoffset() @@ -2320,6 +2325,14 @@ versus :meth:`strptime`: :meth:`strftime` and :meth:`strptime` Format Codes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +These methods accept format codes that can be used to parse and format dates:: + + >>> datetime.strptime('31/01/22 23:59:59.999999', + ... '%d/%m/%y %H:%M:%S.%f') + datetime.datetime(2022, 1, 31, 23, 59, 59, 999999) + >>> _.strftime('%a %d %b %Y, %I:%M%p') + 'Mon 31 Jan 2022, 11:59PM' + The following is a list of all the format codes that the 1989 C standard requires, and these work on all platforms with a standard C implementation. @@ -2505,10 +2518,7 @@ Notes: Because the format depends on the current locale, care should be taken when making assumptions about the output value. Field orderings will vary (for example, "month/day/year" versus "day/month/year"), and the output may - contain Unicode characters encoded using the locale's default encoding (for - example, if the current locale is ``ja_JP``, the default encoding could be - any one of ``eucJP``, ``SJIS``, or ``utf-8``; use :meth:`locale.getlocale` - to determine the current locale's encoding). + contain non-ASCII characters. (2) The :meth:`strptime` method can parse years in the full [1, 9999] range, but diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst index 6187098a752053..0b4a4973cb4da0 100644 --- a/Doc/library/decimal.rst +++ b/Doc/library/decimal.rst @@ -926,7 +926,7 @@ Each thread has its own current context which is accessed or changed using the You can also use the :keyword:`with` statement and the :func:`localcontext` function to temporarily change the active context. -.. function:: localcontext(ctx=None, \*\*kwargs) +.. function:: localcontext(ctx=None, **kwargs) Return a context manager that will set the current context for the active thread to a copy of *ctx* on entry to the with-statement and restore the previous context diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst index e44481f1f13d44..7fae0fee7e724b 100644 --- a/Doc/library/dis.rst +++ b/Doc/library/dis.rst @@ -59,7 +59,7 @@ the following command can be used to display the disassembly of 3 2 LOAD_GLOBAL 1 (NULL + len) 12 LOAD_FAST 0 (alist) 14 CALL 1 - 24 RETURN_VALUE + 22 RETURN_VALUE (The "2" is a line number). @@ -402,7 +402,7 @@ The Python compiler currently generates the following bytecode instructions. **General instructions** -In the following, We will refer to the interpreter stack as STACK and describe +In the following, We will refer to the interpreter stack as ``STACK`` and describe operations on it as if it was a Python list. The top of the stack corresponds to ``STACK[-1]`` in this language. @@ -414,7 +414,7 @@ operations on it as if it was a Python list. The top of the stack corresponds to .. opcode:: POP_TOP - Removes the top-of-stack item.:: + Removes the top-of-stack item:: STACK.pop() @@ -422,7 +422,7 @@ operations on it as if it was a Python list. The top of the stack corresponds to .. opcode:: END_FOR Removes the top two values from the stack. - Equivalent to POP_TOP; POP_TOP. + Equivalent to ``POP_TOP``; ``POP_TOP``. Used to clean up at the end of loops, hence the name. .. versionadded:: 3.12 @@ -431,7 +431,7 @@ operations on it as if it was a Python list. The top of the stack corresponds to .. opcode:: COPY (i) Push the i-th item to the top of the stack without removing it from its original - location.:: + location:: assert i > 0 STACK.append(STACK[-i]) @@ -441,7 +441,7 @@ operations on it as if it was a Python list. The top of the stack corresponds to .. opcode:: SWAP (i) - Swap the top of the stack with the i-th element.:: + Swap the top of the stack with the i-th element:: STACK[-i], STACK[-1] = stack[-1], STACK[-i] @@ -513,7 +513,7 @@ not have to be) the original ``STACK[-2]``. .. opcode:: BINARY_OP (op) Implements the binary and in-place operators (depending on the value of - *op*).:: + *op*):: rhs = STACK.pop() lhs = STACK.pop() @@ -580,14 +580,14 @@ not have to be) the original ``STACK[-2]``. Implements ``STACK[-1] = get_awaitable(STACK[-1])``, where ``get_awaitable(o)`` returns ``o`` if ``o`` is a coroutine object or a generator object with - the CO_ITERABLE_COROUTINE flag, or resolves + the :data:`~inspect.CO_ITERABLE_COROUTINE` flag, or resolves ``o.__await__``. If the ``where`` operand is nonzero, it indicates where the instruction occurs: - * ``1`` After a call to ``__aenter__`` - * ``2`` After a call to ``__aexit__`` + * ``1``: After a call to ``__aenter__`` + * ``2``: After a call to ``__aexit__`` .. versionadded:: 3.5 @@ -652,6 +652,7 @@ not have to be) the original ``STACK[-2]``. .. opcode:: SET_ADD (i) Implements:: + item = STACK.pop() set.add(STACK[-i], item) @@ -705,11 +706,11 @@ iterations of the loop. Yields ``STACK.pop()`` from a :term:`generator`. - .. versionchanged:: 3.11 - oparg set to be the stack depth. + .. versionchanged:: 3.11 + oparg set to be the stack depth. - .. versionchanged:: 3.12 - oparg set to be the exception block depth, for efficient closing of generators. + .. versionchanged:: 3.12 + oparg set to be the exception block depth, for efficient closing of generators. .. opcode:: SETUP_ANNOTATIONS @@ -726,32 +727,32 @@ iterations of the loop. Pops a value from the stack, which is used to restore the exception state. - .. versionchanged:: 3.11 - Exception representation on the stack now consist of one, not three, items. + .. versionchanged:: 3.11 + Exception representation on the stack now consist of one, not three, items. .. opcode:: RERAISE - Re-raises the exception currently on top of the stack. If oparg is non-zero, - pops an additional value from the stack which is used to set ``f_lasti`` - of the current frame. + Re-raises the exception currently on top of the stack. If oparg is non-zero, + pops an additional value from the stack which is used to set ``f_lasti`` + of the current frame. - .. versionadded:: 3.9 + .. versionadded:: 3.9 - .. versionchanged:: 3.11 - Exception representation on the stack now consist of one, not three, items. + .. versionchanged:: 3.11 + Exception representation on the stack now consist of one, not three, items. .. opcode:: PUSH_EXC_INFO - Pops a value from the stack. Pushes the current exception to the top of the stack. - Pushes the value originally popped back to the stack. - Used in exception handlers. + Pops a value from the stack. Pushes the current exception to the top of the stack. + Pushes the value originally popped back to the stack. + Used in exception handlers. - .. versionadded:: 3.11 + .. versionadded:: 3.11 .. opcode:: CHECK_EXC_MATCH Performs exception matching for ``except``. Tests whether the ``STACK[-2]`` - is an exception matching ``STACK[-1]``. Pops STACK[-1] and pushes the boolean + is an exception matching ``STACK[-1]``. Pops ``STACK[-1]`` and pushes the boolean result of the test. .. versionadded:: 3.11 @@ -770,16 +771,16 @@ iterations of the loop. .. opcode:: WITH_EXCEPT_START - Calls the function in position 4 on the stack with arguments (type, val, tb) - representing the exception at the top of the stack. - Used to implement the call ``context_manager.__exit__(*exc_info())`` when an exception - has occurred in a :keyword:`with` statement. + Calls the function in position 4 on the stack with arguments (type, val, tb) + representing the exception at the top of the stack. + Used to implement the call ``context_manager.__exit__(*exc_info())`` when an exception + has occurred in a :keyword:`with` statement. - .. versionadded:: 3.9 + .. versionadded:: 3.9 - .. versionchanged:: 3.11 - The ``__exit__`` function is in position 4 of the stack rather than 7. - Exception representation on the stack now consist of one, not three, items. + .. versionchanged:: 3.11 + The ``__exit__`` function is in position 4 of the stack rather than 7. + Exception representation on the stack now consist of one, not three, items. .. opcode:: LOAD_ASSERTION_ERROR @@ -863,7 +864,7 @@ iterations of the loop. .. opcode:: UNPACK_SEQUENCE (count) Unpacks ``STACK[-1]`` into *count* individual values, which are put onto the stack - right-to-left.:: + right-to-left:: STACK.extend(STACK.pop()[:count:-1]) @@ -1028,7 +1029,7 @@ iterations of the loop. This bytecode distinguishes two cases: if ``STACK[-1]`` has a method with the correct name, the bytecode pushes the unbound method and ``STACK[-1]``. ``STACK[-1]`` will be used as the first argument (``self``) by :opcode:`CALL` - when calling the unbound method. Otherwise, ``NULL`` and the object return by + when calling the unbound method. Otherwise, ``NULL`` and the object returned by the attribute lookup are pushed. .. versionchanged:: 3.12 @@ -1036,21 +1037,30 @@ iterations of the loop. pushed to the stack before the attribute or unbound method respectively. -.. opcode:: COMPARE_OP (opname) - - Performs a Boolean operation. The operation name can be found in - ``cmp_op[opname]``. +.. opcode:: LOAD_SUPER_ATTR (namei) + This opcode implements :func:`super` (e.g. ``super().method()`` and + ``super().attr``). It works the same as :opcode:`LOAD_ATTR`, except that + ``namei`` is shifted left by 2 bits instead of 1, and instead of expecting a + single receiver on the stack, it expects three objects (from top of stack + down): ``self`` (the first argument to the current method), ``cls`` (the + class within which the current method was defined), and the global ``super``. -.. opcode:: COMPARE_AND_BRANCH (opname) + The low bit of ``namei`` signals to attempt a method load, as with + :opcode:`LOAD_ATTR`. - Compares the top two values on the stack, popping them, then branches. - The direction and offset of the jump is embedded as a ``POP_JUMP_IF_TRUE`` - or ``POP_JUMP_IF_FALSE`` instruction immediately following the cache. + The second-low bit of ``namei``, if set, means that this was a two-argument + call to :func:`super` (unset means zero-argument). .. versionadded:: 3.12 +.. opcode:: COMPARE_OP (opname) + + Performs a Boolean operation. The operation name can be found in + ``cmp_op[opname]``. + + .. opcode:: IS_OP (invert) Performs ``is`` comparison, or ``is not`` if ``invert`` is 1. @@ -1152,30 +1162,6 @@ iterations of the loop. .. versionchanged:: 3.12 This is no longer a pseudo-instruction. - -.. opcode:: JUMP_IF_TRUE_OR_POP (delta) - - If ``STACK[-1]`` is true, increments the bytecode counter by *delta* and leaves - ``STACK[-1]`` on the stack. Otherwise (``STACK[-1]`` is false), ``STACK[-1]`` - is popped. - - .. versionadded:: 3.1 - - .. versionchanged:: 3.11 - The oparg is now a relative delta rather than an absolute target. - -.. opcode:: JUMP_IF_FALSE_OR_POP (delta) - - If ``STACK[-1]`` is false, increments the bytecode counter by *delta* and leaves - ``STACK[-1]`` on the stack. Otherwise (``STACK[-1]`` is true), ``STACK[-1]`` is - popped. - - .. versionadded:: 3.1 - - .. versionchanged:: 3.11 - The oparg is now a relative delta rather than an absolute target. - - .. opcode:: FOR_ITER (delta) ``STACK[-1]`` is an :term:`iterator`. Call its :meth:`~iterator.__next__` method. @@ -1230,7 +1216,7 @@ iterations of the loop. .. opcode:: MAKE_CELL (i) - Creates a new cell in slot ``i``. If that slot is empty then + Creates a new cell in slot ``i``. If that slot is nonempty then that value is stored into the new cell. .. versionadded:: 3.11 @@ -1355,9 +1341,9 @@ iterations of the loop. .. opcode:: PUSH_NULL - Pushes a ``NULL`` to the stack. - Used in the call sequence to match the ``NULL`` pushed by - :opcode:`LOAD_METHOD` for non-method calls. + Pushes a ``NULL`` to the stack. + Used in the call sequence to match the ``NULL`` pushed by + :opcode:`LOAD_METHOD` for non-method calls. .. versionadded:: 3.11 @@ -1457,38 +1443,38 @@ iterations of the loop. .. opcode:: RESUME (where) - A no-op. Performs internal tracing, debugging and optimization checks. + A no-op. Performs internal tracing, debugging and optimization checks. - The ``where`` operand marks where the ``RESUME`` occurs: + The ``where`` operand marks where the ``RESUME`` occurs: - * ``0`` The start of a function, which is neither a generator, coroutine - nor an async generator - * ``1`` After a ``yield`` expression - * ``2`` After a ``yield from`` expression - * ``3`` After an ``await`` expression + * ``0`` The start of a function, which is neither a generator, coroutine + nor an async generator + * ``1`` After a ``yield`` expression + * ``2`` After a ``yield from`` expression + * ``3`` After an ``await`` expression .. versionadded:: 3.11 .. opcode:: RETURN_GENERATOR - Create a generator, coroutine, or async generator from the current frame. - Used as first opcode of in code object for the above mentioned callables. - Clear the current frame and return the newly created generator. + Create a generator, coroutine, or async generator from the current frame. + Used as first opcode of in code object for the above mentioned callables. + Clear the current frame and return the newly created generator. - .. versionadded:: 3.11 + .. versionadded:: 3.11 .. opcode:: SEND (delta) - Equivalent to ``STACK[-1] = STACK[-2].send(STACK[-1])``. Used in ``yield from`` - and ``await`` statements. + Equivalent to ``STACK[-1] = STACK[-2].send(STACK[-1])``. Used in ``yield from`` + and ``await`` statements. - If the call raises :exc:`StopIteration`, pop both items, push the - exception's ``value`` attribute, and increment the bytecode counter by - *delta*. + If the call raises :exc:`StopIteration`, pop both items, push the + exception's ``value`` attribute, and increment the bytecode counter by + *delta*. - .. versionadded:: 3.11 + .. versionadded:: 3.11 .. opcode:: HAVE_ARGUMENT @@ -1516,15 +1502,15 @@ iterations of the loop. argument and sets ``STACK[-1]`` to the result. Used to implement functionality that is necessary but not performance critical. - The operand determines which intrinsic function is called: + The operand determines which intrinsic function is called: - * ``0`` Not valid - * ``1`` Prints the argument to standard out. Used in the REPL. - * ``2`` Performs ``import *`` for the named module. - * ``3`` Extracts the return value from a ``StopIteration`` exception. - * ``4`` Wraps an aync generator value - * ``5`` Performs the unary ``+`` operation - * ``6`` Converts a list to a tuple + * ``0`` Not valid + * ``1`` Prints the argument to standard out. Used in the REPL. + * ``2`` Performs ``import *`` for the named module. + * ``3`` Extracts the return value from a ``StopIteration`` exception. + * ``4`` Wraps an aync generator value + * ``5`` Performs the unary ``+`` operation + * ``6`` Converts a list to a tuple .. versionadded:: 3.12 @@ -1534,17 +1520,17 @@ iterations of the loop. arguments and sets ``STACK[-1]`` to the result. Used to implement functionality that is necessary but not performance critical. - The operand determines which intrinsic function is called: + The operand determines which intrinsic function is called: - * ``0`` Not valid - * ``1`` Calculates the :exc:`ExceptionGroup` to raise from a ``try-except*``. + * ``0`` Not valid + * ``1`` Calculates the :exc:`ExceptionGroup` to raise from a ``try-except*``. .. versionadded:: 3.12 **Pseudo-instructions** -These opcodes do not appear in python bytecode, they are used by the compiler +These opcodes do not appear in Python bytecode. They are used by the compiler but are replaced by real opcodes or removed before bytecode is generated. .. opcode:: SETUP_FINALLY (target) @@ -1556,7 +1542,7 @@ but are replaced by real opcodes or removed before bytecode is generated. .. opcode:: SETUP_CLEANUP (target) - Like ``SETUP_FINALLY``, but in case of exception also pushes the last + Like ``SETUP_FINALLY``, but in case of an exception also pushes the last instruction (``lasti``) to the stack so that ``RERAISE`` can restore it. If an exception occurs, the value stack level and the last instruction on the frame are restored to their current state, and control is transferred @@ -1565,7 +1551,7 @@ but are replaced by real opcodes or removed before bytecode is generated. .. opcode:: SETUP_WITH (target) - Like ``SETUP_CLEANUP``, but in case of exception one more item is popped + Like ``SETUP_CLEANUP``, but in case of an exception one more item is popped from the stack before control is transferred to the exception handler at ``target``. @@ -1599,9 +1585,9 @@ Opcode collections These collections are provided for automatic introspection of bytecode instructions: - .. versionchanged:: 3.12 - The collections now contain pseudo instructions as well. These are - opcodes with values ``>= MIN_PSEUDO_OPCODE``. +.. versionchanged:: 3.12 + The collections now contain pseudo instructions as well. These are + opcodes with values ``>= MIN_PSEUDO_OPCODE``. .. data:: opname @@ -1622,7 +1608,7 @@ instructions: Sequence of bytecodes that use their argument. - .. versionadded:: 3.12 + .. versionadded:: 3.12 .. data:: hasconst @@ -1632,10 +1618,10 @@ instructions: .. data:: hasfree - Sequence of bytecodes that access a free variable (note that 'free' in this + Sequence of bytecodes that access a free variable. 'free' in this context refers to names in the current scope that are referenced by inner scopes or names in outer scopes that are referenced from this scope. It does - *not* include references to global or builtin scopes). + *not* include references to global or builtin scopes. .. data:: hasname @@ -1666,4 +1652,4 @@ instructions: Sequence of bytecodes that set an exception handler. - .. versionadded:: 3.12 + .. versionadded:: 3.12 diff --git a/Doc/library/email.utils.rst b/Doc/library/email.utils.rst index 0e266b6a45782a..345b64001c1ace 100644 --- a/Doc/library/email.utils.rst +++ b/Doc/library/email.utils.rst @@ -13,19 +13,17 @@ module: .. function:: localtime(dt=None) - Return local time as an aware datetime object. If called without - arguments, return current time. Otherwise *dt* argument should be a - :class:`~datetime.datetime` instance, and it is converted to the local time - zone according to the system time zone database. If *dt* is naive (that - is, ``dt.tzinfo`` is ``None``), it is assumed to be in local time. In this - case, a positive or zero value for *isdst* causes ``localtime`` to presume - initially that summer time (for example, Daylight Saving Time) is or is not - (respectively) in effect for the specified time. A negative value for - *isdst* causes the ``localtime`` to attempt to divine whether summer time - is in effect for the specified time. - - .. versionadded:: 3.3 + Return local time as an aware datetime object. If called without + arguments, return current time. Otherwise *dt* argument should be a + :class:`~datetime.datetime` instance, and it is converted to the local time + zone according to the system time zone database. If *dt* is naive (that + is, ``dt.tzinfo`` is ``None``), it is assumed to be in local time. The + *isdst* parameter is ignored. + .. versionadded:: 3.3 + + .. deprecated-removed:: 3.12 3.14 + The *isdst* parameter. .. function:: make_msgid(idstring=None, domain=None) diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst index 24b6dbfe37cd38..582e06261afd72 100644 --- a/Doc/library/enum.rst +++ b/Doc/library/enum.rst @@ -119,7 +119,8 @@ Module Contents :func:`~enum.property` Allows :class:`Enum` members to have attributes without conflicting with - member names. + member names. The ``value`` and ``name`` attributes are implemented this + way. :func:`unique` @@ -141,9 +142,8 @@ Module Contents :func:`global_enum` Modify the :class:`str() <str>` and :func:`repr` of an enum - to show its members as belonging to the module instead of its class. - Should only be used if the enum members will be exported to the - module global namespace. + to show its members as belonging to the module instead of its class, + and export the enum members to the global namespace. :func:`show_flag_values` @@ -170,6 +170,27 @@ Data Types final *enum*, as well as creating the enum members, properly handling duplicates, providing iteration over the enum class, etc. + .. method:: EnumType.__call__(cls, value, names=None, \*, module=None, qualname=None, type=None, start=1, boundary=None) + + This method is called in two different ways: + + * to look up an existing member: + + :cls: The enum class being called. + :value: The value to lookup. + + * to use the ``cls`` enum to create a new enum (only if the existing enum + does not have any members): + + :cls: The enum class being called. + :value: The name of the new Enum to create. + :names: The names/values of the members for the new Enum. + :module: The name of the module the new Enum is created in. + :qualname: The actual location in the module where this Enum can be found. + :type: A mix-in type for the new Enum. + :start: The first integer value for the Enum (used by :class:`auto`). + :boundary: How to handle out-of-range values from bit operations (:class:`Flag` only). + .. method:: EnumType.__contains__(cls, member) Returns ``True`` if member belongs to the ``cls``:: @@ -255,26 +276,6 @@ Data Types names will also be removed from the completed enumeration. See :ref:`TimePeriod <enum-time-period>` for an example. - .. method:: Enum.__call__(cls, value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None) - - This method is called in two different ways: - - * to look up an existing member: - - :cls: The enum class being called. - :value: The value to lookup. - - * to use the ``cls`` enum to create a new enum: - - :cls: The enum class being called. - :value: The name of the new Enum to create. - :names: The names/values of the members for the new Enum. - :module: The name of the module the new Enum is created in. - :qualname: The actual location in the module where this Enum can be found. - :type: A mix-in type for the new Enum. - :start: The first integer value for the Enum (used by :class:`auto`). - :boundary: How to handle out-of-range values from bit operations (:class:`Flag` only). - .. method:: Enum.__dir__(self) Returns ``['__class__', '__doc__', '__module__', 'name', 'value']`` and @@ -317,7 +318,7 @@ Data Types >>> PowersOfThree.SECOND.value 9 - .. method:: Enum.__init_subclass__(cls, **kwds) + .. method:: Enum.__init_subclass__(cls, \**kwds) A *classmethod* that is used to further configure subsequent subclasses. By default, does nothing. @@ -696,7 +697,8 @@ Data Types .. attribute:: STRICT - Out-of-range values cause a :exc:`ValueError` to be raised:: + Out-of-range values cause a :exc:`ValueError` to be raised. This is the + default for :class:`Flag`:: >>> from enum import Flag, STRICT, auto >>> class StrictFlag(Flag, boundary=STRICT): @@ -714,7 +716,7 @@ Data Types .. attribute:: CONFORM Out-of-range values have invalid values removed, leaving a valid *Flag* - value. This is the default for :class:`Flag`:: + value:: >>> from enum import Flag, CONFORM, auto >>> class ConformFlag(Flag, boundary=CONFORM): @@ -728,7 +730,6 @@ Data Types .. attribute:: EJECT Out-of-range values lose their *Flag* membership and revert to :class:`int`. - This is the default for :class:`IntFlag`:: >>> from enum import Flag, EJECT, auto >>> class EjectFlag(Flag, boundary=EJECT): @@ -741,8 +742,8 @@ Data Types .. attribute:: KEEP - Out-of-range values are kept, and the *Flag* membership is kept. This is - used for some stdlib flags:: + Out-of-range values are kept, and the *Flag* membership is kept. + This is the default for :class:`IntFlag`:: >>> from enum import Flag, KEEP, auto >>> class KeepFlag(Flag, boundary=KEEP): diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index 4a57e9c8799336..18c3f47dddc079 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -948,8 +948,8 @@ their subgroups based on the types of the contained exceptions. these fields do not need to be updated by :meth:`derive`. :: >>> class MyGroup(ExceptionGroup): - ... def derive(self, exc): - ... return MyGroup(self.message, exc) + ... def derive(self, excs): + ... return MyGroup(self.message, excs) ... >>> e = MyGroup("eg", [ValueError(1), TypeError(2)]) >>> e.add_note("a note") diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index f0f374771b0cf1..a5e86ef0f9eb59 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -1444,8 +1444,9 @@ are always available. They are listed here in alphabetical order. arguments are converted to text strings, :func:`print` cannot be used with binary mode file objects. For these, use ``file.write(...)`` instead. - Whether the output is buffered is usually determined by *file*, but if the - *flush* keyword argument is true, the stream is forcibly flushed. + Output buffering is usually determined by *file*. + However, if *flush* is true, the stream is forcibly flushed. + .. versionchanged:: 3.3 Added the *flush* keyword argument. @@ -1680,7 +1681,7 @@ are always available. They are listed here in alphabetical order. class C: @staticmethod - def f(arg1, arg2, ...): ... + def f(arg1, arg2, argN): ... The ``@staticmethod`` form is a function :term:`decorator` -- see :ref:`function` for details. @@ -1986,7 +1987,6 @@ are always available. They are listed here in alphabetical order. .. index:: statement: import - module: imp .. note:: diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index d467e50bc7a424..29cbc87bf66d12 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -49,8 +49,13 @@ The :mod:`functools` module defines the following functions: >>> factorial(12) # makes two new recursive calls, the other 10 are cached 479001600 - The cache is threadsafe so the wrapped function can be used in multiple - threads. + The cache is threadsafe so that the wrapped function can be used in + multiple threads. This means that the underlying data structure will + remain coherent during concurrent updates. + + It is possible for the wrapped function to be called more than once if + another thread makes an additional call before the initial call has been + completed and cached. .. versionadded:: 3.9 @@ -118,6 +123,7 @@ The :mod:`functools` module defines the following functions: def stdev(self): return statistics.stdev(self._data) + .. versionadded:: 3.8 .. versionchanged:: 3.12 Prior to Python 3.12, ``cached_property`` included an undocumented lock to @@ -126,8 +132,6 @@ The :mod:`functools` module defines the following functions: per-instance, which could result in unacceptably high lock contention. In Python 3.12+ this locking is removed. - .. versionadded:: 3.8 - .. function:: cmp_to_key(func) @@ -159,8 +163,13 @@ The :mod:`functools` module defines the following functions: *maxsize* most recent calls. It can save time when an expensive or I/O bound function is periodically called with the same arguments. - The cache is threadsafe so the wrapped function can be used in multiple - threads. + The cache is threadsafe so that the wrapped function can be used in + multiple threads. This means that the underlying data structure will + remain coherent during concurrent updates. + + It is possible for the wrapped function to be called more than once if + another thread makes an additional call before the initial call has been + completed and cached. Since a dictionary is used to cache results, the positional and keyword arguments to the function must be :term:`hashable`. @@ -233,7 +242,7 @@ The :mod:`functools` module defines the following functions: @lru_cache(maxsize=32) def get_pep(num): 'Retrieve text of a Python Enhancement Proposal' - resource = 'https://peps.python.org/pep-%04d/' % num + resource = f'https://peps.python.org/pep-{num:04d}' try: with urllib.request.urlopen(resource) as s: return s.read() diff --git a/Doc/library/gc.rst b/Doc/library/gc.rst index 69a1a8313b7593..6d5c64df1a1f3f 100644 --- a/Doc/library/gc.rst +++ b/Doc/library/gc.rst @@ -206,12 +206,17 @@ The :mod:`gc` module provides the following functions: .. function:: freeze() - Freeze all the objects tracked by gc - move them to a permanent generation - and ignore all the future collections. This can be used before a POSIX - fork() call to make the gc copy-on-write friendly or to speed up collection. - Also collection before a POSIX fork() call may free pages for future - allocation which can cause copy-on-write too so it's advised to disable gc - in parent process and freeze before fork and enable gc in child process. + Freeze all the objects tracked by the garbage collector; move them to a + permanent generation and ignore them in all the future collections. + + If a process will ``fork()`` without ``exec()``, avoiding unnecessary + copy-on-write in child processes will maximize memory sharing and reduce + overall memory usage. This requires both avoiding creation of freed "holes" + in memory pages in the parent process and ensuring that GC collections in + child processes won't touch the ``gc_refs`` counter of long-lived objects + originating in the parent process. To accomplish both, call ``gc.disable()`` + early in the parent process, ``gc.freeze()`` right before ``fork()``, and + ``gc.enable()`` early in child processes. .. versionadded:: 3.7 @@ -251,7 +256,7 @@ values but should not rebind them): are printed. .. versionchanged:: 3.4 - Following :pep:`442`, objects with a :meth:`__del__` method don't end + Following :pep:`442`, objects with a :meth:`~object.__del__` method don't end up in :attr:`gc.garbage` anymore. .. data:: callbacks diff --git a/Doc/library/gzip.rst b/Doc/library/gzip.rst index 1a2582d6a904b2..06cbd2567a0bc6 100644 --- a/Doc/library/gzip.rst +++ b/Doc/library/gzip.rst @@ -143,6 +143,12 @@ The module defines the following items: :func:`time.time` and the :attr:`~os.stat_result.st_mtime` attribute of the object returned by :func:`os.stat`. + .. attribute:: name + + The path to the gzip file on disk, as a :class:`str` or :class:`bytes`. + Equivalent to the output of :func:`os.fspath` on the original input path, + with no other normalization, resolution or expansion. + .. versionchanged:: 3.1 Support for the :keyword:`with` statement was added, along with the *mtime* constructor argument and :attr:`mtime` attribute. diff --git a/Doc/library/http.client.rst b/Doc/library/http.client.rst index ad3416135e307b..38821b32c91cf1 100644 --- a/Doc/library/http.client.rst +++ b/Doc/library/http.client.rst @@ -353,6 +353,13 @@ HTTPConnection Objects The *headers* argument should be a mapping of extra HTTP headers to send with the CONNECT request. + As HTTP/1.1 is used for HTTP CONNECT tunnelling request, `as per the RFC + <https://tools.ietf.org/html/rfc7231#section-4.3.6>`_, a HTTP ``Host:`` + header must be provided, matching the authority-form of the request target + provided as the destination for the CONNECT request. If a HTTP ``Host:`` + header is not provided via the headers argument, one is generated and + transmitted automatically. + For example, to tunnel through a HTTPS proxy server running locally on port 8080, we would pass the address of the proxy to the :class:`HTTPSConnection` constructor, and the address of the host that we eventually want to reach to @@ -365,6 +372,11 @@ HTTPConnection Objects .. versionadded:: 3.2 + .. versionchanged:: 3.12 + HTTP CONNECT tunnelling requests use protocol HTTP/1.1, upgraded from + protocol HTTP/1.0. ``Host:`` HTTP headers are mandatory for HTTP/1.1, so + one will be automatically generated and transmitted if not provided in + the headers argument. .. method:: HTTPConnection.connect() diff --git a/Doc/library/imp.rst b/Doc/library/imp.rst deleted file mode 100644 index 000793a7e66cae..00000000000000 --- a/Doc/library/imp.rst +++ /dev/null @@ -1,411 +0,0 @@ -:mod:`imp` --- Access the :ref:`import <importsystem>` internals -================================================================ - -.. module:: imp - :synopsis: Access the implementation of the import statement. - :deprecated: - -**Source code:** :source:`Lib/imp.py` - -.. deprecated-removed:: 3.4 3.12 - The :mod:`imp` module is deprecated in favor of :mod:`importlib`. - -.. index:: statement: import - --------------- - -This module provides an interface to the mechanisms used to implement the -:keyword:`import` statement. It defines the following constants and functions: - - -.. function:: get_magic() - - .. index:: pair: file; byte-code - - Return the magic string value used to recognize byte-compiled code files - (:file:`.pyc` files). (This value may be different for each Python version.) - - .. deprecated:: 3.4 - Use :attr:`importlib.util.MAGIC_NUMBER` instead. - - -.. function:: get_suffixes() - - Return a list of 3-element tuples, each describing a particular type of - module. Each triple has the form ``(suffix, mode, type)``, where *suffix* is - a string to be appended to the module name to form the filename to search - for, *mode* is the mode string to pass to the built-in :func:`open` function - to open the file (this can be ``'r'`` for text files or ``'rb'`` for binary - files), and *type* is the file type, which has one of the values - :const:`PY_SOURCE`, :const:`PY_COMPILED`, or :const:`C_EXTENSION`, described - below. - - .. deprecated:: 3.3 - Use the constants defined on :mod:`importlib.machinery` instead. - - -.. function:: find_module(name[, path]) - - Try to find the module *name*. If *path* is omitted or ``None``, the list of - directory names given by ``sys.path`` is searched, but first a few special - places are searched: the function tries to find a built-in module with the - given name (:const:`C_BUILTIN`), then a frozen module (:const:`PY_FROZEN`), - and on some systems some other places are looked in as well (on Windows, it - looks in the registry which may point to a specific file). - - Otherwise, *path* must be a list of directory names; each directory is - searched for files with any of the suffixes returned by :func:`get_suffixes` - above. Invalid names in the list are silently ignored (but all list items - must be strings). - - If search is successful, the return value is a 3-element tuple ``(file, - pathname, description)``: - - *file* is an open :term:`file object` positioned at the beginning, *pathname* - is the pathname of the file found, and *description* is a 3-element tuple as - contained in the list returned by :func:`get_suffixes` describing the kind of - module found. - - If the module is built-in or frozen then *file* and *pathname* are both ``None`` - and the *description* tuple contains empty strings for its suffix and mode; - the module type is indicated as given in parentheses above. If the search - is unsuccessful, :exc:`ImportError` is raised. Other exceptions indicate - problems with the arguments or environment. - - If the module is a package, *file* is ``None``, *pathname* is the package - path and the last item in the *description* tuple is :const:`PKG_DIRECTORY`. - - This function does not handle hierarchical module names (names containing - dots). In order to find *P.M*, that is, submodule *M* of package *P*, use - :func:`find_module` and :func:`load_module` to find and load package *P*, and - then use :func:`find_module` with the *path* argument set to ``P.__path__``. - When *P* itself has a dotted name, apply this recipe recursively. - - .. deprecated:: 3.3 - Use :func:`importlib.util.find_spec` instead unless Python 3.3 - compatibility is required, in which case use - :func:`importlib.find_loader`. For example usage of the former case, - see the :ref:`importlib-examples` section of the :mod:`importlib` - documentation. - - -.. function:: load_module(name, file, pathname, description) - - Load a module that was previously found by :func:`find_module` (or by an - otherwise conducted search yielding compatible results). This function does - more than importing the module: if the module was already imported, it will - reload the module! The *name* argument indicates the full - module name (including the package name, if this is a submodule of a - package). The *file* argument is an open file, and *pathname* is the - corresponding file name; these can be ``None`` and ``''``, respectively, when - the module is a package or not being loaded from a file. The *description* - argument is a tuple, as would be returned by :func:`get_suffixes`, describing - what kind of module must be loaded. - - If the load is successful, the return value is the module object; otherwise, - an exception (usually :exc:`ImportError`) is raised. - - **Important:** the caller is responsible for closing the *file* argument, if - it was not ``None``, even when an exception is raised. This is best done - using a :keyword:`try` ... :keyword:`finally` statement. - - .. deprecated:: 3.3 - If previously used in conjunction with :func:`imp.find_module` then - consider using :func:`importlib.import_module`, otherwise use the loader - returned by the replacement you chose for :func:`imp.find_module`. If you - called :func:`imp.load_module` and related functions directly with file - path arguments then use a combination of - :func:`importlib.util.spec_from_file_location` and - :func:`importlib.util.module_from_spec`. See the :ref:`importlib-examples` - section of the :mod:`importlib` documentation for details of the various - approaches. - - -.. function:: new_module(name) - - Return a new empty module object called *name*. This object is *not* inserted - in ``sys.modules``. - - .. deprecated:: 3.4 - Use :func:`importlib.util.module_from_spec` instead. - - -.. function:: reload(module) - - Reload a previously imported *module*. The argument must be a module object, so - it must have been successfully imported before. This is useful if you have - edited the module source file using an external editor and want to try out the - new version without leaving the Python interpreter. The return value is the - module object (the same as the *module* argument). - - When ``reload(module)`` is executed: - - * Python modules' code is recompiled and the module-level code reexecuted, - defining a new set of objects which are bound to names in the module's - dictionary. The ``init`` function of extension modules is not called a second - time. - - * As with all other objects in Python the old objects are only reclaimed after - their reference counts drop to zero. - - * The names in the module namespace are updated to point to any new or changed - objects. - - * Other references to the old objects (such as names external to the module) are - not rebound to refer to the new objects and must be updated in each namespace - where they occur if that is desired. - - There are a number of other caveats: - - When a module is reloaded, its dictionary (containing the module's global - variables) is retained. Redefinitions of names will override the old - definitions, so this is generally not a problem. If the new version of a module - does not define a name that was defined by the old version, the old definition - remains. This feature can be used to the module's advantage if it maintains a - global table or cache of objects --- with a :keyword:`try` statement it can test - for the table's presence and skip its initialization if desired:: - - try: - cache - except NameError: - cache = {} - - It is legal though generally not very useful to reload built-in or dynamically - loaded modules, except for :mod:`sys`, :mod:`__main__` and :mod:`builtins`. - In many cases, however, extension modules are not designed to be initialized - more than once, and may fail in arbitrary ways when reloaded. - - If a module imports objects from another module using :keyword:`from` ... - :keyword:`import` ..., calling :func:`reload` for the other module does not - redefine the objects imported from it --- one way around this is to re-execute - the :keyword:`!from` statement, another is to use :keyword:`!import` and qualified - names (*module*.*name*) instead. - - If a module instantiates instances of a class, reloading the module that defines - the class does not affect the method definitions of the instances --- they - continue to use the old class definition. The same is true for derived classes. - - .. versionchanged:: 3.3 - Relies on both ``__name__`` and ``__loader__`` being defined on the module - being reloaded instead of just ``__name__``. - - .. deprecated:: 3.4 - Use :func:`importlib.reload` instead. - - -The following functions are conveniences for handling :pep:`3147` byte-compiled -file paths. - -.. versionadded:: 3.2 - -.. function:: cache_from_source(path, debug_override=None) - - Return the :pep:`3147` path to the byte-compiled file associated with the - source *path*. For example, if *path* is ``/foo/bar/baz.py`` the return - value would be ``/foo/bar/__pycache__/baz.cpython-32.pyc`` for Python 3.2. - The ``cpython-32`` string comes from the current magic tag (see - :func:`get_tag`; if :attr:`sys.implementation.cache_tag` is not defined then - :exc:`NotImplementedError` will be raised). By passing in ``True`` or - ``False`` for *debug_override* you can override the system's value for - ``__debug__``, leading to optimized bytecode. - - *path* need not exist. - - .. versionchanged:: 3.3 - If :attr:`sys.implementation.cache_tag` is ``None``, then - :exc:`NotImplementedError` is raised. - - .. deprecated:: 3.4 - Use :func:`importlib.util.cache_from_source` instead. - - .. versionchanged:: 3.5 - The *debug_override* parameter no longer creates a ``.pyo`` file. - - -.. function:: source_from_cache(path) - - Given the *path* to a :pep:`3147` file name, return the associated source code - file path. For example, if *path* is - ``/foo/bar/__pycache__/baz.cpython-32.pyc`` the returned path would be - ``/foo/bar/baz.py``. *path* need not exist, however if it does not conform - to :pep:`3147` format, a :exc:`ValueError` is raised. If - :attr:`sys.implementation.cache_tag` is not defined, - :exc:`NotImplementedError` is raised. - - .. versionchanged:: 3.3 - Raise :exc:`NotImplementedError` when - :attr:`sys.implementation.cache_tag` is not defined. - - .. deprecated:: 3.4 - Use :func:`importlib.util.source_from_cache` instead. - - -.. function:: get_tag() - - Return the :pep:`3147` magic tag string matching this version of Python's - magic number, as returned by :func:`get_magic`. - - .. deprecated:: 3.4 - Use :attr:`sys.implementation.cache_tag` directly starting - in Python 3.3. - - -The following functions help interact with the import system's internal -locking mechanism. Locking semantics of imports are an implementation -detail which may vary from release to release. However, Python ensures -that circular imports work without any deadlocks. - - -.. function:: lock_held() - - Return ``True`` if the global import lock is currently held, else - ``False``. On platforms without threads, always return ``False``. - - On platforms with threads, a thread executing an import first holds a - global import lock, then sets up a per-module lock for the rest of the - import. This blocks other threads from importing the same module until - the original import completes, preventing other threads from seeing - incomplete module objects constructed by the original thread. An - exception is made for circular imports, which by construction have to - expose an incomplete module object at some point. - - .. versionchanged:: 3.3 - The locking scheme has changed to per-module locks for - the most part. A global import lock is kept for some critical tasks, - such as initializing the per-module locks. - - .. deprecated:: 3.4 - - -.. function:: acquire_lock() - - Acquire the interpreter's global import lock for the current thread. - This lock should be used by import hooks to ensure thread-safety when - importing modules. - - Once a thread has acquired the import lock, the same thread may acquire it - again without blocking; the thread must release it once for each time it has - acquired it. - - On platforms without threads, this function does nothing. - - .. versionchanged:: 3.3 - The locking scheme has changed to per-module locks for - the most part. A global import lock is kept for some critical tasks, - such as initializing the per-module locks. - - .. deprecated:: 3.4 - - -.. function:: release_lock() - - Release the interpreter's global import lock. On platforms without - threads, this function does nothing. - - .. versionchanged:: 3.3 - The locking scheme has changed to per-module locks for - the most part. A global import lock is kept for some critical tasks, - such as initializing the per-module locks. - - .. deprecated:: 3.4 - - -The following constants with integer values, defined in this module, are used -to indicate the search result of :func:`find_module`. - - -.. data:: PY_SOURCE - - The module was found as a source file. - - .. deprecated:: 3.3 - - -.. data:: PY_COMPILED - - The module was found as a compiled code object file. - - .. deprecated:: 3.3 - - -.. data:: C_EXTENSION - - The module was found as dynamically loadable shared library. - - .. deprecated:: 3.3 - - -.. data:: PKG_DIRECTORY - - The module was found as a package directory. - - .. deprecated:: 3.3 - - -.. data:: C_BUILTIN - - The module was found as a built-in module. - - .. deprecated:: 3.3 - - -.. data:: PY_FROZEN - - The module was found as a frozen module. - - .. deprecated:: 3.3 - - -.. class:: NullImporter(path_string) - - The :class:`NullImporter` type is a :pep:`302` import hook that handles - non-directory path strings by failing to find any modules. Calling this type - with an existing directory or empty string raises :exc:`ImportError`. - Otherwise, a :class:`NullImporter` instance is returned. - - Instances have only one method: - - .. method:: NullImporter.find_module(fullname [, path]) - - This method always returns ``None``, indicating that the requested module could - not be found. - - .. versionchanged:: 3.3 - ``None`` is inserted into ``sys.path_importer_cache`` instead of an - instance of :class:`NullImporter`. - - .. deprecated:: 3.4 - Insert ``None`` into ``sys.path_importer_cache`` instead. - - -.. _examples-imp: - -Examples --------- - -The following function emulates what was the standard import statement up to -Python 1.4 (no hierarchical module names). (This *implementation* wouldn't work -in that version, since :func:`find_module` has been extended and -:func:`load_module` has been added in 1.4.) :: - - import imp - import sys - - def __import__(name, globals=None, locals=None, fromlist=None): - # Fast path: see if the module has already been imported. - try: - return sys.modules[name] - except KeyError: - pass - - # If any of the following calls raises an exception, - # there's a problem we can't handle -- let the caller handle it. - - fp, pathname, description = imp.find_module(name) - - try: - return imp.load_module(name, fp, pathname, description) - finally: - # Since we may exit via an exception, close fp explicitly. - if fp: - fp.close() diff --git a/Doc/library/importlib.metadata.rst b/Doc/library/importlib.metadata.rst index 6e084101995e25..b306d5f55a714f 100644 --- a/Doc/library/importlib.metadata.rst +++ b/Doc/library/importlib.metadata.rst @@ -308,6 +308,10 @@ Python module or `Import Package <https://packaging.python.org/en/latest/glossar >>> packages_distributions() {'importlib_metadata': ['importlib-metadata'], 'yaml': ['PyYAML'], 'jaraco': ['jaraco.classes', 'jaraco.functools'], ...} +Some editable installs, `do not supply top-level names +<https://github.com/pypa/packaging-problems/issues/609>`_, and thus this +function is not reliable with such installs. + .. versionadded:: 3.10 .. _distributions: diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst index ccf240193d36a9..88f843c03b1d5a 100644 --- a/Doc/library/inspect.rst +++ b/Doc/library/inspect.rst @@ -574,6 +574,8 @@ Retrieving source code object and the line number indicates where in the original source file the first line of code was found. An :exc:`OSError` is raised if the source code cannot be retrieved. + A :exc:`TypeError` is raised if the object is a built-in module, class, or + function. .. versionchanged:: 3.3 :exc:`OSError` is raised instead of :exc:`IOError`, now an alias of the @@ -586,6 +588,8 @@ Retrieving source code class, method, function, traceback, frame, or code object. The source code is returned as a single string. An :exc:`OSError` is raised if the source code cannot be retrieved. + A :exc:`TypeError` is raised if the object is a built-in module, class, or + function. .. versionchanged:: 3.3 :exc:`OSError` is raised instead of :exc:`IOError`, now an alias of the diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst index d85a17effb04a2..a0d794017e2602 100644 --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -195,7 +195,7 @@ loops that truncate the stream. if n < 1: raise ValueError('n must be at least one') it = iter(iterable) - while (batch := tuple(islice(it, n))): + while batch := tuple(islice(it, n)): yield batch .. versionadded:: 3.12 @@ -769,8 +769,8 @@ well as with the built-in itertools such as ``map()``, ``filter()``, A secondary purpose of the recipes is to serve as an incubator. The ``accumulate()``, ``compress()``, and ``pairwise()`` itertools started out as -recipes. Currently, the ``iter_index()`` recipe is being tested to see -whether it proves its worth. +recipes. Currently, the ``sliding_window()`` and ``iter_index()`` recipes +are being tested to see whether they prove their worth. Substantially all of these recipes and many, many others can be installed from the `more-itertools project <https://pypi.org/project/more-itertools/>`_ found @@ -789,6 +789,7 @@ which incur interpreter overhead. .. testcode:: import collections + import functools import math import operator import random @@ -806,6 +807,23 @@ which incur interpreter overhead. "Return function(0), function(1), ..." return map(function, count(start)) + def repeatfunc(func, times=None, *args): + """Repeat calls to func with specified arguments. + + Example: repeatfunc(random.random) + """ + if times is None: + return starmap(func, repeat(args)) + return starmap(func, repeat(args, times)) + + def flatten(list_of_lists): + "Flatten one level of nesting" + return chain.from_iterable(list_of_lists) + + def ncycles(iterable, n): + "Returns the sequence elements n times" + return chain.from_iterable(repeat(tuple(iterable), n)) + def tail(n, iterable): "Return an iterator over the last n items" # tail(3, 'ABCDEFG') --> E F G @@ -825,58 +843,27 @@ which incur interpreter overhead. "Returns the nth item or a default value" return next(islice(iterable, n, None), default) - def all_equal(iterable): - "Returns True if all the elements are equal to each other" - g = groupby(iterable) - return next(g, True) and not next(g, False) - def quantify(iterable, pred=bool): "Count how many times the predicate is True" return sum(map(pred, iterable)) - def ncycles(iterable, n): - "Returns the sequence elements n times" - return chain.from_iterable(repeat(tuple(iterable), n)) - - def sum_of_squares(it): - "Add up the squares of the input values." - # sum_of_squares([10, 20, 30]) -> 1400 - return math.sumprod(*tee(it)) - - def transpose(it): - "Swap the rows and columns of the input." - # transpose([(1, 2, 3), (11, 22, 33)]) --> (1, 11) (2, 22) (3, 33) - return zip(*it, strict=True) + def all_equal(iterable): + "Returns True if all the elements are equal to each other" + g = groupby(iterable) + return next(g, True) and not next(g, False) - def matmul(m1, m2): - "Multiply two matrices." - # matmul([(7, 5), (3, 5)], [[2, 5], [7, 9]]) --> (49, 80), (41, 60) - n = len(m2[0]) - return batched(starmap(math.sumprod, product(m1, transpose(m2))), n) + def first_true(iterable, default=False, pred=None): + """Returns the first true value in the iterable. - def convolve(signal, kernel): - # See: https://betterexplained.com/articles/intuitive-convolution/ - # convolve(data, [0.25, 0.25, 0.25, 0.25]) --> Moving average (blur) - # convolve(data, [1, -1]) --> 1st finite difference (1st derivative) - # convolve(data, [1, -2, 1]) --> 2nd finite difference (2nd derivative) - kernel = tuple(kernel)[::-1] - n = len(kernel) - window = collections.deque([0], maxlen=n) * n - for x in chain(signal, repeat(0, n-1)): - window.append(x) - yield math.sumprod(kernel, window) + If no true value is found, returns *default* - def polynomial_from_roots(roots): - """Compute a polynomial's coefficients from its roots. + If *pred* is not None, returns the first item + for which pred(item) is true. - (x - 5) (x + 4) (x - 3) expands to: x³ -4x² -17x + 60 """ - # polynomial_from_roots([5, -4, 3]) --> [1, -4, -17, 60] - roots = list(map(operator.neg, roots)) - return [ - sum(map(math.prod, combinations(roots, k))) - for k in range(len(roots) + 1) - ] + # first_true([a,b,c], x) --> a or b or c or x + # first_true([a,b], x, f) --> a if f(a) else b if f(b) else x + return next(filter(pred, iterable), default) def iter_index(iterable, value, start=0): "Return indices where a value occurs in a sequence or iterable." @@ -901,44 +888,28 @@ which incur interpreter overhead. except ValueError: pass - def sieve(n): - "Primes less than n" - # sieve(30) --> 2 3 5 7 11 13 17 19 23 29 - data = bytearray((0, 1)) * (n // 2) - data[:3] = 0, 0, 0 - limit = math.isqrt(n) + 1 - for p in compress(range(limit), data): - data[p*p : n : p+p] = bytes(len(range(p*p, n, p+p))) - data[2] = 1 - return iter_index(data, 1) if n > 2 else iter([]) - - def factor(n): - "Prime factors of n." - # factor(99) --> 3 3 11 - for prime in sieve(math.isqrt(n) + 1): - while True: - quotient, remainder = divmod(n, prime) - if remainder: - break - yield prime - n = quotient - if n == 1: - return - if n >= 2: - yield n + def iter_except(func, exception, first=None): + """ Call a function repeatedly until an exception is raised. - def flatten(list_of_lists): - "Flatten one level of nesting" - return chain.from_iterable(list_of_lists) + Converts a call-until-exception interface to an iterator interface. + Like builtins.iter(func, sentinel) but uses an exception instead + of a sentinel to end the loop. - def repeatfunc(func, times=None, *args): - """Repeat calls to func with specified arguments. + Examples: + iter_except(functools.partial(heappop, h), IndexError) # priority queue iterator + iter_except(d.popitem, KeyError) # non-blocking dict iterator + iter_except(d.popleft, IndexError) # non-blocking deque iterator + iter_except(q.get_nowait, Queue.Empty) # loop over a producer Queue + iter_except(s.pop, KeyError) # non-blocking set iterator - Example: repeatfunc(random.random) """ - if times is None: - return starmap(func, repeat(args)) - return starmap(func, repeat(args, times)) + try: + if first is not None: + yield first() # For database APIs needing an initial cast to db.first() + while True: + yield func() + except exception: + pass def grouper(iterable, n, *, incomplete='fill', fillvalue=None): "Collect data into non-overlapping fixed-length chunks or blocks" @@ -955,12 +926,6 @@ which incur interpreter overhead. else: raise ValueError('Expected fill, strict, or ignore') - def triplewise(iterable): - "Return overlapping triplets from an iterable" - # triplewise('ABCDEFG') --> ABC BCD CDE DEF EFG - for (a, _), (b, c) in pairwise(pairwise(iterable)): - yield a, b, c - def sliding_window(iterable, n): # sliding_window('ABCDEFG', 4) --> ABCD BCDE CDEF DEFG it = iter(iterable) @@ -991,6 +956,12 @@ which incur interpreter overhead. t1, t2 = tee(iterable) return filterfalse(pred, t1), filter(pred, t2) + def subslices(seq): + "Return all contiguous non-empty subslices of a sequence" + # subslices('ABCD') --> A AB ABC ABCD B BC BCD C CD D + slices = starmap(slice, combinations(range(len(seq) + 1), 2)) + return map(operator.getitem, repeat(seq), slices) + def before_and_after(predicate, it): """ Variant of takewhile() that allows complete access to the remainder of the iterator. @@ -1020,17 +991,6 @@ which incur interpreter overhead. yield from it return true_iterator(), remainder_iterator() - def subslices(seq): - "Return all contiguous non-empty subslices of a sequence" - # subslices('ABCD') --> A AB ABC ABCD B BC BCD C CD D - slices = starmap(slice, combinations(range(len(seq) + 1), 2)) - return map(operator.getitem, repeat(seq), slices) - - def powerset(iterable): - "powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)" - s = list(iterable) - return chain.from_iterable(combinations(s, r) for r in range(len(s)+1)) - def unique_everseen(iterable, key=None): "List unique elements, preserving order. Remember all elements ever seen." # unique_everseen('AAAABBBCCDAABBB') --> A B C D @@ -1060,41 +1020,94 @@ which incur interpreter overhead. # unique_justseen('ABBcCAD', str.lower) --> A B c A D return map(next, map(operator.itemgetter(1), groupby(iterable, key))) - def iter_except(func, exception, first=None): - """ Call a function repeatedly until an exception is raised. - Converts a call-until-exception interface to an iterator interface. - Like builtins.iter(func, sentinel) but uses an exception instead - of a sentinel to end the loop. +The following recipes have a more mathematical flavor: - Examples: - iter_except(functools.partial(heappop, h), IndexError) # priority queue iterator - iter_except(d.popitem, KeyError) # non-blocking dict iterator - iter_except(d.popleft, IndexError) # non-blocking deque iterator - iter_except(q.get_nowait, Queue.Empty) # loop over a producer Queue - iter_except(s.pop, KeyError) # non-blocking set iterator +.. testcode:: - """ - try: - if first is not None: - yield first() # For database APIs needing an initial cast to db.first() + def powerset(iterable): + "powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)" + s = list(iterable) + return chain.from_iterable(combinations(s, r) for r in range(len(s)+1)) + + def sieve(n): + "Primes less than n" + # sieve(30) --> 2 3 5 7 11 13 17 19 23 29 + data = bytearray((0, 1)) * (n // 2) + data[:3] = 0, 0, 0 + limit = math.isqrt(n) + 1 + for p in compress(range(limit), data): + data[p*p : n : p+p] = bytes(len(range(p*p, n, p+p))) + data[2] = 1 + return iter_index(data, 1) if n > 2 else iter([]) + + def factor(n): + "Prime factors of n." + # factor(99) --> 3 3 11 + for prime in sieve(math.isqrt(n) + 1): while True: - yield func() - except exception: - pass + quotient, remainder = divmod(n, prime) + if remainder: + break + yield prime + n = quotient + if n == 1: + return + if n > 1: + yield n - def first_true(iterable, default=False, pred=None): - """Returns the first true value in the iterable. + def sum_of_squares(it): + "Add up the squares of the input values." + # sum_of_squares([10, 20, 30]) -> 1400 + return math.sumprod(*tee(it)) - If no true value is found, returns *default* + def transpose(it): + "Swap the rows and columns of the input." + # transpose([(1, 2, 3), (11, 22, 33)]) --> (1, 11) (2, 22) (3, 33) + return zip(*it, strict=True) - If *pred* is not None, returns the first item - for which pred(item) is true. + def matmul(m1, m2): + "Multiply two matrices." + # matmul([(7, 5), (3, 5)], [[2, 5], [7, 9]]) --> (49, 80), (41, 60) + n = len(m2[0]) + return batched(starmap(math.sumprod, product(m1, transpose(m2))), n) + + def convolve(signal, kernel): + """Linear convolution of two iterables. + Article: https://betterexplained.com/articles/intuitive-convolution/ + Video: https://www.youtube.com/watch?v=KuXjwB4LzSA """ - # first_true([a,b,c], x) --> a or b or c or x - # first_true([a,b], x, f) --> a if f(a) else b if f(b) else x - return next(filter(pred, iterable), default) + # convolve(data, [0.25, 0.25, 0.25, 0.25]) --> Moving average (blur) + # convolve(data, [1, -1]) --> 1st finite difference (1st derivative) + # convolve(data, [1, -2, 1]) --> 2nd finite difference (2nd derivative) + kernel = tuple(kernel)[::-1] + n = len(kernel) + padded_signal = chain(repeat(0, n-1), signal, repeat(0, n-1)) + for window in sliding_window(padded_signal, n): + yield math.sumprod(kernel, window) + + def polynomial_from_roots(roots): + """Compute a polynomial's coefficients from its roots. + + (x - 5) (x + 4) (x - 3) expands to: x³ -4x² -17x + 60 + """ + # polynomial_from_roots([5, -4, 3]) --> [1, -4, -17, 60] + factors = zip(repeat(1), map(operator.neg, roots)) + return list(functools.reduce(convolve, factors, [1])) + + def polynomial_eval(coefficients, x): + """Evaluate a polynomial at a specific value. + + Computes with better numeric stability than Horner's method. + """ + # Evaluate x³ -4x² -17x + 60 at x = 2.5 + # polynomial_eval([1, -4, -17, 60], x=2.5) --> 8.125 + n = len(coefficients) + if n == 0: + return x * 0 # coerce zero to the type of x + powers = map(pow, repeat(x), reversed(range(n))) + return math.sumprod(coefficients, powers) def nth_combination(iterable, r, index): "Equivalent to list(combinations(iterable, r))[index]" @@ -1114,6 +1127,7 @@ which incur interpreter overhead. result.append(pool[-1-n]) return tuple(result) + .. doctest:: :hide: @@ -1245,6 +1259,37 @@ which incur interpreter overhead. >>> list(convolve(data, [1, -2, 1])) [20, 0, -36, 24, -20, 20, -20, -4, 16] + >>> from fractions import Fraction + >>> from decimal import Decimal + >>> polynomial_eval([1, -4, -17, 60], x=2) + 18 + >>> x = 2; x**3 - 4*x**2 -17*x + 60 + 18 + >>> polynomial_eval([1, -4, -17, 60], x=2.5) + 8.125 + >>> x = 2.5; x**3 - 4*x**2 -17*x + 60 + 8.125 + >>> polynomial_eval([1, -4, -17, 60], x=Fraction(2, 3)) + Fraction(1274, 27) + >>> x = Fraction(2, 3); x**3 - 4*x**2 -17*x + 60 + Fraction(1274, 27) + >>> polynomial_eval([1, -4, -17, 60], x=Decimal('1.75')) + Decimal('23.359375') + >>> x = Decimal('1.75'); x**3 - 4*x**2 -17*x + 60 + Decimal('23.359375') + >>> polynomial_eval([], 2) + 0 + >>> polynomial_eval([], 2.5) + 0.0 + >>> polynomial_eval([], Fraction(2, 3)) + Fraction(0, 1) + >>> polynomial_eval([], Decimal('1.75')) + Decimal('0.00') + >>> polynomial_eval([11], 7) == 11 + True + >>> polynomial_eval([11, 2], 7) == 11 * 7 + 2 + True + >>> polynomial_from_roots([5, -4, 3]) [1, -4, -17, 60] >>> factored = lambda x: (x - 5) * (x + 4) * (x - 3) @@ -1358,9 +1403,6 @@ which incur interpreter overhead. >>> list(grouper('abcdefg', n=3, incomplete='ignore')) [('a', 'b', 'c'), ('d', 'e', 'f')] - >>> list(triplewise('ABCDEFG')) - [('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E'), ('D', 'E', 'F'), ('E', 'F', 'G')] - >>> list(sliding_window('ABCDEFG', 4)) [('A', 'B', 'C', 'D'), ('B', 'C', 'D', 'E'), ('C', 'D', 'E', 'F'), ('D', 'E', 'F', 'G')] @@ -1442,3 +1484,45 @@ which incur interpreter overhead. >>> combos = list(combinations(iterable, r)) >>> all(nth_combination(iterable, r, i) == comb for i, comb in enumerate(combos)) True + + +.. testcode:: + :hide: + + # Old recipes and their tests which are guaranteed to continue to work. + + def sumprod(vec1, vec2): + "Compute a sum of products." + return sum(starmap(operator.mul, zip(vec1, vec2, strict=True))) + + def dotproduct(vec1, vec2): + return sum(map(operator.mul, vec1, vec2)) + + def pad_none(iterable): + """Returns the sequence elements and then returns None indefinitely. + + Useful for emulating the behavior of the built-in map() function. + """ + return chain(iterable, repeat(None)) + + def triplewise(iterable): + "Return overlapping triplets from an iterable" + # triplewise('ABCDEFG') --> ABC BCD CDE DEF EFG + for (a, _), (b, c) in pairwise(pairwise(iterable)): + yield a, b, c + + +.. doctest:: + :hide: + + >>> dotproduct([1,2,3], [4,5,6]) + 32 + + >>> sumprod([1,2,3], [4,5,6]) + 32 + + >>> list(islice(pad_none('abc'), 0, 6)) + ['a', 'b', 'c', None, None, None] + + >>> list(triplewise('ABCDEFG')) + [('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E'), ('D', 'E', 'F'), ('E', 'F', 'G')] diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst index 2daf2422ebd5b4..250246b5cd9adc 100644 --- a/Doc/library/logging.config.rst +++ b/Doc/library/logging.config.rst @@ -253,6 +253,7 @@ otherwise, the context is used to determine what to instantiate. * ``datefmt`` * ``style`` * ``validate`` (since version >=3.8) + * ``defaults`` (since version >=3.12) An optional ``class`` key indicates the name of the formatter's class (as a dotted module and class name). The instantiation @@ -953,16 +954,22 @@ Sections which specify formatter configuration are typified by the following. .. code-block:: ini [formatter_form01] - format=F1 %(asctime)s %(levelname)s %(message)s + format=F1 %(asctime)s %(levelname)s %(message)s %(customfield)s datefmt= style=% validate=True + defaults={'customfield': 'defaultvalue'} class=logging.Formatter The arguments for the formatter configuration are the same as the keys in the dictionary schema :ref:`formatters section <logging-config-dictschema-formatters>`. +The ``defaults`` entry, when :ref:`evaluated <func-eval>` in the context of +the ``logging`` package's namespace, is a dictionary of default values for +custom formatting fields. If not provided, it defaults to ``None``. + + .. note:: Due to the use of :func:`eval` as described above, there are diff --git a/Doc/library/logging.rst b/Doc/library/logging.rst index 34e98fc2577003..22412e1a2113bb 100644 --- a/Doc/library/logging.rst +++ b/Doc/library/logging.rst @@ -813,8 +813,9 @@ wire). :type lineno: int :param msg: The event description message, - which can be a %-format string with placeholders for variable data. - :type msg: str + which can be a %-format string with placeholders for variable data, + or an arbitrary object (see :ref:`arbitrary-object-messages`). + :type msg: typing.Any :param args: Variable data to merge into the *msg* argument to obtain the event description. diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst index 0ec47bb956a99e..8454296b815b41 100644 --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -460,16 +460,16 @@ process which created it. ... return x*x ... >>> with p: - ... p.map(f, [1,2,3]) + ... p.map(f, [1,2,3]) Process PoolWorker-1: Process PoolWorker-2: Process PoolWorker-3: Traceback (most recent call last): Traceback (most recent call last): Traceback (most recent call last): - AttributeError: 'module' object has no attribute 'f' - AttributeError: 'module' object has no attribute 'f' - AttributeError: 'module' object has no attribute 'f' + AttributeError: Can't get attribute 'f' on <module '__main__' (<class '_frozen_importlib.BuiltinImporter'>)> + AttributeError: Can't get attribute 'f' on <module '__main__' (<class '_frozen_importlib.BuiltinImporter'>)> + AttributeError: Can't get attribute 'f' on <module '__main__' (<class '_frozen_importlib.BuiltinImporter'>)> (If you try this it will actually output three full tracebacks interleaved in a semi-random fashion, and then you may have to diff --git a/Doc/library/optparse.rst b/Doc/library/optparse.rst index 3e29fed0175e04..5c02d8bc8835bf 100644 --- a/Doc/library/optparse.rst +++ b/Doc/library/optparse.rst @@ -954,7 +954,16 @@ The canonical way to create an :class:`Option` instance is with the As you can see, most actions involve storing or updating a value somewhere. :mod:`optparse` always creates a special object for this, conventionally called -``options`` (it happens to be an instance of :class:`optparse.Values`). Option +``options``, which is an instance of :class:`optparse.Values`. + +.. class:: Values + + An object holding parsed argument names and values as attributes. + Normally created by calling when calling :meth:`OptionParser.parse_args`, + and can be overridden by a custom subclass passed to the *values* argument of + :meth:`OptionParser.parse_args` (as described in :ref:`optparse-parsing-arguments`). + +Option arguments (and various other values) are stored as attributes of this object, according to the :attr:`~Option.dest` (destination) option attribute. @@ -991,6 +1000,14 @@ one that makes sense for *all* options. Option attributes ^^^^^^^^^^^^^^^^^ +.. class:: Option + + A single command line argument, + with various attributes passed by keyword to the constructor. + Normally created with :meth:`OptionParser.add_option` rather than directly, + and can be overridden by a custom class via the *option_class* argument + to :class:`OptionParser`. + The following option attributes may be passed as keyword arguments to :meth:`OptionParser.add_option`. If you pass an option attribute that is not relevant to a particular option, or fail to pass a required option attribute, @@ -2027,7 +2044,7 @@ Features of note: values.ensure_value(attr, value) If the ``attr`` attribute of ``values`` doesn't exist or is ``None``, then - ensure_value() first sets it to ``value``, and then returns 'value. This is + ensure_value() first sets it to ``value``, and then returns ``value``. This is very handy for actions like ``"extend"``, ``"append"``, and ``"count"``, all of which accumulate data in a variable and expect that variable to be of a certain type (a list for the first two, an integer for the latter). Using @@ -2035,3 +2052,27 @@ Features of note: about setting a default value for the option destinations in question; they can just leave the default as ``None`` and :meth:`ensure_value` will take care of getting it right when it's needed. + +Exceptions +---------- + +.. exception:: OptionError + + Raised if an :class:`Option` instance is created with invalid or + inconsistent arguments. + +.. exception:: OptionConflictError + + Raised if conflicting options are added to an :class:`OptionParser`. + +.. exception:: OptionValueError + + Raised if an invalid option value is encountered on the command line. + +.. exception:: BadOptionError + + Raised if an invalid option is passed on the command line. + +.. exception:: AmbiguousOptionError + + Raised if an ambiguous option is passed on the command line. diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 3153f79e10ce1f..50e951c631fa88 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -3919,7 +3919,8 @@ to be ignored. the :envvar:`PATH` variable. The other variants, :func:`execl`, :func:`execle`, :func:`execv`, and :func:`execve`, will not use the :envvar:`PATH` variable to locate the executable; *path* must contain an appropriate absolute or relative - path. + path. Relative paths must include at least one slash, even on Windows, as + plain names will not be resolved. For :func:`execle`, :func:`execlpe`, :func:`execve`, and :func:`execvpe` (note that these all end in "e"), the *env* parameter must be a mapping which is @@ -3951,7 +3952,7 @@ to be ignored. .. note:: - The standard way to exit is ``sys.exit(n)``. :func:`_exit` should + The standard way to exit is :func:`sys.exit(n) <sys.exit>`. :func:`!_exit` should normally only be used in the child process after a :func:`fork`. The following exit codes are defined and can be used with :func:`_exit`, diff --git a/Doc/library/pdb.rst b/Doc/library/pdb.rst index 21c6ca8622dceb..5bc48a6d5f77fd 100644 --- a/Doc/library/pdb.rst +++ b/Doc/library/pdb.rst @@ -36,73 +36,91 @@ extension interface uses the modules :mod:`bdb` and :mod:`cmd`. Module :mod:`traceback` Standard interface to extract, format and print stack traces of Python programs. -The debugger's prompt is ``(Pdb)``. Typical usage to run a program under control -of the debugger is:: +The typical usage to break into the debugger is to insert:: - >>> import pdb - >>> import mymodule - >>> pdb.run('mymodule.test()') - > <string>(0)?() - (Pdb) continue - > <string>(1)?() + import pdb; pdb.set_trace() + +Or:: + + breakpoint() + +at the location you want to break into the debugger, and then run the program. +You can then step through the code following this statement, and continue +running without the debugger using the :pdbcmd:`continue` command. + +.. versionadded:: 3.7 + The built-in :func:`breakpoint()`, when called with defaults, can be used + instead of ``import pdb; pdb.set_trace()``. + +:: + + def double(x): + breakpoint() + return x * 2 + val = 3 + print(f"{val} * 2 is {double(val)}") + +The debugger's prompt is ``(Pdb)``, which is the indicator that you are in debug mode:: + + > ...(3)double() + -> return x * 2 + (Pdb) p x + 3 (Pdb) continue - NameError: 'spam' - > <string>(1)?() - (Pdb) + 3 * 2 is 6 .. versionchanged:: 3.3 Tab-completion via the :mod:`readline` module is available for commands and command arguments, e.g. the current global and local names are offered as arguments of the ``p`` command. -:file:`pdb.py` can also be invoked as a script to debug other scripts. For + +You can also invoke :mod:`pdb` from the command line to debug other scripts. For example:: python -m pdb myscript.py -When invoked as a script, pdb will automatically enter post-mortem debugging if +When invoked as a module, pdb will automatically enter post-mortem debugging if the program being debugged exits abnormally. After post-mortem debugging (or after normal exit of the program), pdb will restart the program. Automatic restarting preserves pdb's state (such as breakpoints) and in most cases is more useful than quitting the debugger upon program's exit. .. versionadded:: 3.2 - :file:`pdb.py` now accepts a ``-c`` option that executes commands as if given + ``-c`` option is introduced to execute commands as if given in a :file:`.pdbrc` file, see :ref:`debugger-commands`. .. versionadded:: 3.7 - :file:`pdb.py` now accepts a ``-m`` option that execute modules similar to the way + ``-m`` option is introduced to execute modules similar to the way ``python -m`` does. As with a script, the debugger will pause execution just before the first line of the module. +Typical usage to execute a statement under control of the debugger is:: -The typical usage to break into the debugger is to insert:: - - import pdb; pdb.set_trace() - -at the location you want to break into the debugger, and then run the program. -You can then step through the code following this statement, and continue -running without the debugger using the :pdbcmd:`continue` command. - -.. versionadded:: 3.7 - The built-in :func:`breakpoint()`, when called with defaults, can be used - instead of ``import pdb; pdb.set_trace()``. + >>> import pdb + >>> def f(x): + ... print(1 / x) + >>> pdb.run("f(2)") + > <string>(1)<module>() + (Pdb) continue + 0.5 + >>> The typical usage to inspect a crashed program is:: >>> import pdb - >>> import mymodule - >>> mymodule.test() + >>> def f(x): + ... print(1 / x) + ... + >>> f(0) Traceback (most recent call last): File "<stdin>", line 1, in <module> - File "./mymodule.py", line 4, in test - test2() - File "./mymodule.py", line 3, in test2 - print(spam) - NameError: spam + File "<stdin>", line 2, in f + ZeroDivisionError: division by zero >>> pdb.pm() - > ./mymodule.py(3)test2() - -> print(spam) + > <stdin>(2)f() + (Pdb) p x + 0 (Pdb) @@ -125,7 +143,7 @@ slightly different way: Evaluate the *expression* (given as a string or a code object) under debugger control. When :func:`runeval` returns, it returns the value of the - expression. Otherwise this function is similar to :func:`run`. + *expression*. Otherwise this function is similar to :func:`run`. .. function:: runcall(function, *args, **kwds) @@ -178,7 +196,7 @@ access further features, you have to do this yourself: that matches one of these patterns. [1]_ By default, Pdb sets a handler for the SIGINT signal (which is sent when the - user presses :kbd:`Ctrl-C` on the console) when you give a ``continue`` command. + user presses :kbd:`Ctrl-C` on the console) when you give a :pdbcmd:`continue` command. This allows you to break into the debugger again by pressing :kbd:`Ctrl-C`. If you want Pdb not to touch the SIGINT handler, set *nosigint* to true. @@ -275,7 +293,7 @@ can be overridden by the local file. .. pdbcommand:: w(here) - Print a stack trace, with the most recent frame at the bottom. An arrow + Print a stack trace, with the most recent frame at the bottom. An arrow (``>``) indicates the current frame, which determines the context of most commands. .. pdbcommand:: d(own) [count] @@ -315,22 +333,22 @@ can be overridden by the local file. With a space separated list of breakpoint numbers, clear those breakpoints. Without argument, clear all breaks (but first ask confirmation). -.. pdbcommand:: disable [bpnumber ...] +.. pdbcommand:: disable bpnumber [bpnumber ...] Disable the breakpoints given as a space separated list of breakpoint numbers. Disabling a breakpoint means it cannot cause the program to stop execution, but unlike clearing a breakpoint, it remains in the list of breakpoints and can be (re-)enabled. -.. pdbcommand:: enable [bpnumber ...] +.. pdbcommand:: enable bpnumber [bpnumber ...] Enable the breakpoints specified. .. pdbcommand:: ignore bpnumber [count] - Set the ignore count for the given breakpoint number. If count is omitted, + Set the ignore count for the given breakpoint number. If *count* is omitted, the ignore count is set to 0. A breakpoint becomes active when the ignore - count is zero. When non-zero, the count is decremented each time the + count is zero. When non-zero, the *count* is decremented each time the breakpoint is reached and the breakpoint is not disabled and any associated condition evaluates to true. @@ -369,7 +387,7 @@ can be overridden by the local file. breakpoint—which could have its own command list, leading to ambiguities about which list to execute. - If you use the 'silent' command in the command list, the usual message about + If you use the ``silent`` command in the command list, the usual message about stopping at a breakpoint is not printed. This may be desirable for breakpoints that are to print a specific message and then continue. If none of the other commands print anything, you see no sign that the breakpoint was reached. @@ -392,8 +410,8 @@ can be overridden by the local file. Without argument, continue execution until the line with a number greater than the current one is reached. - With a line number, continue execution until a line with a number greater or - equal to that is reached. In both cases, also stop when the current frame + With *lineno*, continue execution until a line with a number greater or + equal to *lineno* is reached. In both cases, also stop when the current frame returns. .. versionchanged:: 3.2 @@ -442,11 +460,11 @@ can be overridden by the local file. .. pdbcommand:: a(rgs) - Print the argument list of the current function. + Print the arguments of the current function and their current values. .. pdbcommand:: p expression - Evaluate the *expression* in the current context and print its value. + Evaluate *expression* in the current context and print its value. .. note:: @@ -456,32 +474,76 @@ can be overridden by the local file. .. pdbcommand:: pp expression - Like the :pdbcmd:`p` command, except the value of the expression is + Like the :pdbcmd:`p` command, except the value of *expression* is pretty-printed using the :mod:`pprint` module. .. pdbcommand:: whatis expression - Print the type of the *expression*. + Print the type of *expression*. .. pdbcommand:: source expression - Try to get source code for the given object and display it. + Try to get source code of *expression* and display it. .. versionadded:: 3.2 .. pdbcommand:: display [expression] - Display the value of the expression if it changed, each time execution stops + Display the value of *expression* if it changed, each time execution stops in the current frame. - Without expression, list all display expressions for the current frame. + Without *expression*, list all display expressions for the current frame. + + .. note:: + + Display evaluates *expression* and compares to the result of the previous + evaluation of *expression*, so when the result is mutable, display may not + be able to pick up the changes. + + Example:: + + lst = [] + breakpoint() + pass + lst.append(1) + print(lst) + + Display won't realize ``lst`` has been changed because the result of evaluation + is modified in place by ``lst.append(1)`` before being compared:: + + > example.py(3)<module>() + -> pass + (Pdb) display lst + display lst: [] + (Pdb) n + > example.py(4)<module>() + -> lst.append(1) + (Pdb) n + > example.py(5)<module>() + -> print(lst) + (Pdb) + + You can do some tricks with copy mechanism to make it work:: + + > example.py(3)<module>() + -> pass + (Pdb) display lst[:] + display lst[:]: [] + (Pdb) n + > example.py(4)<module>() + -> lst.append(1) + (Pdb) n + > example.py(5)<module>() + -> print(lst) + display lst[:]: [1] [old: []] + (Pdb) .. versionadded:: 3.2 .. pdbcommand:: undisplay [expression] - Do not display the expression any more in the current frame. Without - expression, clear all display expressions for the current frame. + Do not display *expression* anymore in the current frame. Without + *expression*, clear all display expressions for the current frame. .. versionadded:: 3.2 @@ -497,10 +559,10 @@ can be overridden by the local file. .. pdbcommand:: alias [name [command]] - Create an alias called *name* that executes *command*. The command must + Create an alias called *name* that executes *command*. The *command* must *not* be enclosed in quotes. Replaceable parameters can be indicated by ``%1``, ``%2``, and so on, while ``%*`` is replaced by all the parameters. - If no command is given, the current alias for *name* is shown. If no + If *command* is omitted, the current alias for *name* is shown. If no arguments are given, all aliases are listed. Aliases may be nested and can contain anything that can be legally typed at @@ -513,13 +575,13 @@ can be overridden by the local file. :file:`.pdbrc` file):: # Print instance variables (usage "pi classInst") - alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k]) + alias pi for k in %1.__dict__.keys(): print(f"%1.{k} = {%1.__dict__[k]}") # Print instance variables in self alias ps pi self .. pdbcommand:: unalias name - Delete the specified alias. + Delete the specified alias *name*. .. pdbcommand:: ! statement @@ -535,7 +597,7 @@ can be overridden by the local file. .. pdbcommand:: run [args ...] restart [args ...] - Restart the debugged Python program. If an argument is supplied, it is split + Restart the debugged Python program. If *args* is supplied, it is split with :mod:`shlex` and the result is used as the new :data:`sys.argv`. History, breakpoints, actions and debugger options are preserved. :pdbcmd:`restart` is an alias for :pdbcmd:`run`. @@ -546,13 +608,13 @@ can be overridden by the local file. .. pdbcommand:: debug code - Enter a recursive debugger that steps through the code - argument (which is an arbitrary expression or statement to be + Enter a recursive debugger that steps through *code* + (which is an arbitrary expression or statement to be executed in the current environment). .. pdbcommand:: retval - Print the return value for the last return of a function. + Print the return value for the last return of the current function. .. rubric:: Footnotes diff --git a/Doc/library/pkgutil.rst b/Doc/library/pkgutil.rst index 788a02dcb8922f..64e617b82b48bc 100644 --- a/Doc/library/pkgutil.rst +++ b/Doc/library/pkgutil.rst @@ -25,9 +25,9 @@ support. from pkgutil import extend_path __path__ = extend_path(__path__, __name__) - This will add to the package's ``__path__`` all subdirectories of directories - on :data:`sys.path` named after the package. This is useful if one wants to - distribute different parts of a single logical package as multiple + For each directory on :data:`sys.path` that has a subdirectory that matches the + package name, add the subdirectory to the package's :attr:`__path__`. This is useful + if one wants to distribute different parts of a single logical package as multiple directories. It also looks for :file:`\*.pkg` files beginning where ``*`` matches the @@ -82,7 +82,7 @@ support. This is a backwards compatibility wrapper around :func:`importlib.util.find_spec` that converts most failures to :exc:`ImportError` and only returns the loader rather than the full - :class:`ModuleSpec`. + :class:`importlib.machinery.ModuleSpec`. .. versionchanged:: 3.3 Updated to be based directly on :mod:`importlib` rather than relying diff --git a/Doc/library/profile.rst b/Doc/library/profile.rst index c2189e02656c7a..723f927135a0f4 100644 --- a/Doc/library/profile.rst +++ b/Doc/library/profile.rst @@ -82,7 +82,7 @@ the following:: The first line indicates that 214 calls were monitored. Of those calls, 207 were :dfn:`primitive`, meaning that the call was not induced via recursion. The -next line: ``Ordered by: cumulative name``, indicates that the text string in the +next line: ``Ordered by: cumulative time``, indicates that the text string in the far right column was used to sort the output. The column headings include: ncalls diff --git a/Doc/library/random.rst b/Doc/library/random.rst index 098684d7270ffa..c192919ac62e54 100644 --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -404,8 +404,8 @@ Alternative Generator Class that implements the default pseudo-random number generator used by the :mod:`random` module. - .. deprecated:: 3.9 - In the future, the *seed* must be one of the following types: + .. deprecated-removed:: 3.9 3.11 + Formerly the *seed* could be any hashable object. Now it is limited to: :class:`NoneType`, :class:`int`, :class:`float`, :class:`str`, :class:`bytes`, or :class:`bytearray`. @@ -423,7 +423,7 @@ Notes on Reproducibility ------------------------ Sometimes it is useful to be able to reproduce the sequences given by a -pseudo-random number generator. By re-using a seed value, the same sequence should be +pseudo-random number generator. By reusing a seed value, the same sequence should be reproducible from run to run as long as multiple threads are not running. Most of the random module's algorithms and seeding functions are subject to diff --git a/Doc/library/readline.rst b/Doc/library/readline.rst index 4d485d25b54020..8fb0eca8df74d8 100644 --- a/Doc/library/readline.rst +++ b/Doc/library/readline.rst @@ -19,7 +19,7 @@ function. Readline keybindings may be configured via an initialization file, typically ``.inputrc`` in your home directory. See `Readline Init File -<https://tiswww.cwru.edu/php/chet/readline/rluserman.html#SEC9>`_ +<https://tiswww.cwru.edu/php/chet/readline/rluserman.html#Readline-Init-File>`_ in the GNU Readline manual for information about the format and allowable constructs of that file, and the capabilities of the Readline library in general. diff --git a/Doc/library/sched.rst b/Doc/library/sched.rst index a051c65b97b05e..04215d31ba10ca 100644 --- a/Doc/library/sched.rst +++ b/Doc/library/sched.rst @@ -36,7 +36,7 @@ scheduler: Example:: >>> import sched, time - >>> s = sched.scheduler(time.time, time.sleep) + >>> s = sched.scheduler(time.monotonic, time.sleep) >>> def print_time(a='default'): ... print("From print_time", time.time(), a) ... diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst index b33dbe21b1fa19..7f408be2336824 100644 --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -292,15 +292,15 @@ Directory and files operations .. versionadded:: 3.8 The *dirs_exist_ok* parameter. -.. function:: rmtree(path, ignore_errors=False, onerror=None, *, dir_fd=None) +.. function:: rmtree(path, ignore_errors=False, onerror=None, *, onexc=None, dir_fd=None) .. index:: single: directory; deleting Delete an entire directory tree; *path* must point to a directory (but not a symbolic link to a directory). If *ignore_errors* is true, errors resulting from failed removals will be ignored; if false or omitted, such errors are - handled by calling a handler specified by *onerror* or, if that is omitted, - they raise an exception. + handled by calling a handler specified by *onexc* or *onerror* or, if both + are omitted, exceptions are propagated to the caller. This function can support :ref:`paths relative to directory descriptors <dir_fd>`. @@ -315,14 +315,17 @@ Directory and files operations otherwise. Applications can use the :data:`rmtree.avoids_symlink_attacks` function attribute to determine which case applies. - If *onerror* is provided, it must be a callable that accepts three - parameters: *function*, *path*, and *excinfo*. + If *onexc* is provided, it must be a callable that accepts three parameters: + *function*, *path*, and *excinfo*. The first parameter, *function*, is the function which raised the exception; it depends on the platform and implementation. The second parameter, *path*, will be the path name passed to *function*. The third parameter, - *excinfo*, will be the exception information returned by - :func:`sys.exc_info`. Exceptions raised by *onerror* will not be caught. + *excinfo*, is the exception that was raised. Exceptions raised by *onexc* + will not be caught. + + The deprecated *onerror* is similar to *onexc*, except that the third + parameter it receives is the tuple returned from :func:`sys.exc_info`. .. audit-event:: shutil.rmtree path,dir_fd shutil.rmtree @@ -337,6 +340,9 @@ Directory and files operations .. versionchanged:: 3.11 The *dir_fd* parameter. + .. versionchanged:: 3.12 + Added the *onexc* parameter, deprecated *onerror*. + .. attribute:: rmtree.avoids_symlink_attacks Indicates whether the current platform and implementation provides a @@ -427,23 +433,43 @@ Directory and files operations When no *path* is specified, the results of :func:`os.environ` are used, returning either the "PATH" value or a fallback of :attr:`os.defpath`. - On Windows, the current directory is always prepended to the *path* whether - or not you use the default or provide your own, which is the behavior the - command shell uses when finding executables. Additionally, when finding the - *cmd* in the *path*, the ``PATHEXT`` environment variable is checked. For - example, if you call ``shutil.which("python")``, :func:`which` will search - ``PATHEXT`` to know that it should look for ``python.exe`` within the *path* - directories. For example, on Windows:: + On Windows, the current directory is prepended to the *path* if *mode* does + not include ``os.X_OK``. When the *mode* does include ``os.X_OK``, the + Windows API ``NeedCurrentDirectoryForExePathW`` will be consulted to + determine if the current directory should be prepended to *path*. To avoid + consulting the current working directory for executables: set the environment + variable ``NoDefaultCurrentDirectoryInExePath``. + + Also on Windows, the ``PATHEXT`` variable is used to resolve commands + that may not already include an extension. For example, if you call + ``shutil.which("python")``, :func:`which` will search ``PATHEXT`` + to know that it should look for ``python.exe`` within the *path* + directories. For example, on Windows:: >>> shutil.which("python") 'C:\\Python33\\python.EXE' + This is also applied when *cmd* is a path that contains a directory + component:: + + >> shutil.which("C:\\Python33\\python") + 'C:\\Python33\\python.EXE' + .. versionadded:: 3.3 .. versionchanged:: 3.8 The :class:`bytes` type is now accepted. If *cmd* type is :class:`bytes`, the result type is also :class:`bytes`. + .. versionchanged:: 3.12 + On Windows, the current directory is no longer prepended to the search + path if *mode* includes ``os.X_OK`` and WinAPI + ``NeedCurrentDirectoryForExePathW(cmd)`` is false, else the current + directory is prepended even if it is already in the search path; + ``PATHEXT`` is used now even when *cmd* includes a directory component + or ends with an extension that is in ``PATHEXT``; and filenames that + have no extension can now be found. + .. exception:: Error This exception collects exceptions that are raised during a multi-file @@ -509,7 +535,7 @@ rmtree example ~~~~~~~~~~~~~~ This example shows how to remove a directory tree on Windows where some -of the files have their read-only bit set. It uses the onerror callback +of the files have their read-only bit set. It uses the onexc callback to clear the readonly bit and reattempt the remove. Any subsequent failure will propagate. :: @@ -521,7 +547,7 @@ will propagate. :: os.chmod(path, stat.S_IWRITE) func(path) - shutil.rmtree(directory, onerror=remove_readonly) + shutil.rmtree(directory, onexc=remove_readonly) .. _archiving-operations: @@ -636,7 +662,7 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules. Remove the archive format *name* from the list of supported formats. -.. function:: unpack_archive(filename[, extract_dir[, format]]) +.. function:: unpack_archive(filename[, extract_dir[, format[, filter]]]) Unpack an archive. *filename* is the full path of the archive. @@ -650,6 +676,14 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules. registered for that extension. In case none is found, a :exc:`ValueError` is raised. + The keyword-only *filter* argument is passed to the underlying unpacking + function. For zip files, *filter* is not accepted. + For tar files, it is recommended to set it to ``'data'``, + unless using features specific to tar and UNIX-like filesystems. + (See :ref:`tarfile-extraction-filter` for details.) + The ``'data'`` filter will become the default for tar files + in Python 3.14. + .. audit-event:: shutil.unpack_archive filename,extract_dir,format shutil.unpack_archive .. warning:: @@ -662,6 +696,9 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules. .. versionchanged:: 3.7 Accepts a :term:`path-like object` for *filename* and *extract_dir*. + .. versionchanged:: 3.12 + Added the *filter* argument. + .. function:: register_unpack_format(name, extensions, function[, extra_args[, description]]) Registers an unpack format. *name* is the name of the format and @@ -669,11 +706,14 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules. ``.zip`` for Zip files. *function* is the callable that will be used to unpack archives. The - callable will receive the path of the archive, followed by the directory - the archive must be extracted to. - - When provided, *extra_args* is a sequence of ``(name, value)`` tuples that - will be passed as keywords arguments to the callable. + callable will receive: + + - the path of the archive, as a positional argument; + - the directory the archive must be extracted to, as a positional argument; + - possibly a *filter* keyword argument, if it was given to + :func:`unpack_archive`; + - additional keyword arguments, specified by *extra_args* as a sequence + of ``(name, value)`` tuples. *description* can be provided to describe the format, and will be returned by the :func:`get_unpack_formats` function. diff --git a/Doc/library/smtplib.rst b/Doc/library/smtplib.rst index 2539c3d3883298..4686232b09ac47 100644 --- a/Doc/library/smtplib.rst +++ b/Doc/library/smtplib.rst @@ -25,7 +25,7 @@ Protocol) and :rfc:`1869` (SMTP Service Extensions). An :class:`SMTP` instance encapsulates an SMTP connection. It has methods that support a full repertoire of SMTP and ESMTP operations. If the optional - host and port parameters are given, the SMTP :meth:`connect` method is + *host* and *port* parameters are given, the SMTP :meth:`connect` method is called with those parameters during initialization. If specified, *local_hostname* is used as the FQDN of the local host in the HELO/EHLO command. Otherwise, the local hostname is found using @@ -34,12 +34,12 @@ Protocol) and :rfc:`1869` (SMTP Service Extensions). *timeout* parameter specifies a timeout in seconds for blocking operations like the connection attempt (if not specified, the global default timeout setting will be used). If the timeout expires, :exc:`TimeoutError` is - raised. The optional source_address parameter allows binding + raised. The optional *source_address* parameter allows binding to some specific source address in a machine with multiple network interfaces, and/or to some specific source TCP port. It takes a 2-tuple - (host, port), for the socket to bind to as its source address before - connecting. If omitted (or if host or port are ``''`` and/or 0 respectively) - the OS default behavior will be used. + ``(host, port)``, for the socket to bind to as its source address before + connecting. If omitted (or if *host* or *port* are ``''`` and/or ``0`` + respectively) the OS default behavior will be used. For normal use, you should only require the initialization/connect, :meth:`sendmail`, and :meth:`SMTP.quit` methods. diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index aec79da57f0576..c8ca555700a3c9 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -1775,7 +1775,7 @@ to sockets. much data, if any, was successfully sent. .. versionchanged:: 3.5 - The socket timeout is no more reset each time data is sent successfully. + The socket timeout is no longer reset each time data is sent successfully. The socket timeout is now the maximum total duration to send all data. .. versionchanged:: 3.5 @@ -1998,8 +1998,8 @@ can be changed by calling :func:`setdefaulttimeout`. * In *non-blocking mode*, operations fail (with an error that is unfortunately system-dependent) if they cannot be completed immediately: functions from the - :mod:`select` can be used to know when and whether a socket is available for - reading or writing. + :mod:`select` module can be used to know when and whether a socket is available + for reading or writing. * In *timeout mode*, operations fail if they cannot be completed within the timeout specified for the socket (they raise a :exc:`timeout` exception) @@ -2188,7 +2188,7 @@ manager protocol instead, open a socket with:: socket.socket(socket.AF_CAN, socket.SOCK_DGRAM, socket.CAN_BCM) After binding (:const:`CAN_RAW`) or connecting (:const:`CAN_BCM`) the socket, you -can use the :meth:`socket.send`, and the :meth:`socket.recv` operations (and +can use the :meth:`socket.send` and :meth:`socket.recv` operations (and their counterparts) on the socket object as usual. This last example might require special privileges:: diff --git a/Doc/library/socketserver.rst b/Doc/library/socketserver.rst index ceb962e860042d..d65e9fe81acf8b 100644 --- a/Doc/library/socketserver.rst +++ b/Doc/library/socketserver.rst @@ -140,9 +140,16 @@ server is the address family. ForkingUDPServer ThreadingTCPServer ThreadingUDPServer + ForkingUnixStreamServer + ForkingUnixDatagramServer + ThreadingUnixStreamServer + ThreadingUnixDatagramServer These classes are pre-defined using the mix-in classes. +.. versionadded:: 3.12 + The ``ForkingUnixStreamServer`` and ``ForkingUnixDatagramServer`` classes + were added. To implement a service, you must derive a class from :class:`BaseRequestHandler` and redefine its :meth:`~BaseRequestHandler.handle` method. diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst index ff036ad56acba8..89673b8006ae77 100644 --- a/Doc/library/sqlite3.rst +++ b/Doc/library/sqlite3.rst @@ -259,7 +259,7 @@ Module functions .. function:: connect(database, timeout=5.0, detect_types=0, \ isolation_level="DEFERRED", check_same_thread=True, \ factory=sqlite3.Connection, cached_statements=128, \ - uri=False, \*, \ + uri=False, *, \ autocommit=sqlite3.LEGACY_TRANSACTION_CONTROL) Open a connection to an SQLite database. @@ -272,9 +272,9 @@ Module functions :param float timeout: How many seconds the connection should wait before raising - an exception, if the database is locked by another connection. - If another connection opens a transaction to modify the database, - it will be locked until that transaction is committed. + an :exc:`OperationalError` when a table is locked. + If another connection opens a transaction to modify a table, + that table will be locked until the transaction is committed. Default five seconds. :param int detect_types: @@ -310,7 +310,7 @@ Module functions to avoid data corruption. See :attr:`threadsafety` for more information. - :param Connection factory: + :param ~sqlite3.Connection factory: A custom subclass of :class:`Connection` to create the connection with, if not the default :class:`Connection` class. @@ -337,7 +337,7 @@ Module functions The default will change to ``False`` in a future Python release. :type autocommit: bool - :rtype: Connection + :rtype: ~sqlite3.Connection .. audit-event:: sqlite3.connect database sqlite3.connect .. audit-event:: sqlite3.connect/handle connection_handle sqlite3.connect @@ -573,6 +573,38 @@ Module constants package, a third-party library which used to upstream changes to :mod:`!sqlite3`. Today, it carries no meaning or practical value. +.. _sqlite3-dbconfig-constants: + +.. data:: SQLITE_DBCONFIG_DEFENSIVE + SQLITE_DBCONFIG_DQS_DDL + SQLITE_DBCONFIG_DQS_DML + SQLITE_DBCONFIG_ENABLE_FKEY + SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER + SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION + SQLITE_DBCONFIG_ENABLE_QPSG + SQLITE_DBCONFIG_ENABLE_TRIGGER + SQLITE_DBCONFIG_ENABLE_VIEW + SQLITE_DBCONFIG_LEGACY_ALTER_TABLE + SQLITE_DBCONFIG_LEGACY_FILE_FORMAT + SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE + SQLITE_DBCONFIG_RESET_DATABASE + SQLITE_DBCONFIG_TRIGGER_EQP + SQLITE_DBCONFIG_TRUSTED_SCHEMA + SQLITE_DBCONFIG_WRITABLE_SCHEMA + + These constants are used for the :meth:`Connection.setconfig` + and :meth:`~Connection.getconfig` methods. + + The availability of these constants varies depending on the version of SQLite + Python was compiled with. + + .. versionadded:: 3.12 + + .. seealso:: + + https://www.sqlite.org/c3ref/c_dbconfig_defensive.html + SQLite docs: Database Connection Configuration Options + .. _sqlite3-connection-objects: @@ -911,7 +943,7 @@ Connection objects Call this method from a different thread to abort any queries that might be executing on the connection. - Aborted queries will raise an exception. + Aborted queries will raise an :exc:`OperationalError`. .. method:: set_authorizer(authorizer_callback) @@ -1041,12 +1073,25 @@ Connection objects (2, 'broccoli pie', 'broccoli cheese onions flour') (3, 'pumpkin pie', 'pumpkin sugar flour butter') - .. method:: load_extension(path, /) + .. method:: load_extension(path, /, *, entrypoint=None) - Load an SQLite extension from a shared library located at *path*. + Load an SQLite extension from a shared library. Enable extension loading with :meth:`enable_load_extension` before calling this method. + :param str path: + + The path to the SQLite extension. + + :param entrypoint: + + Entry point name. + If ``None`` (the default), + SQLite will come up with an entry point name of its own; + see the SQLite docs `Loading an Extension`_ for details. + + :type entrypoint: str | None + .. audit-event:: sqlite3.load_extension connection,path sqlite3.Connection.load_extension .. versionadded:: 3.2 @@ -1054,6 +1099,11 @@ Connection objects .. versionchanged:: 3.10 Added the ``sqlite3.load_extension`` auditing event. + .. versionadded:: 3.12 + The *entrypoint* parameter. + + .. _Loading an Extension: https://www.sqlite.org/loadext.html#loading_an_extension_ + .. method:: iterdump Return an :term:`iterator` to dump the database as SQL source code. @@ -1079,7 +1129,7 @@ Connection objects Works even if the database is being accessed by other clients or concurrently by the same connection. - :param Connection target: + :param ~sqlite3.Connection target: The database connection to save the backup to. :param int pages: @@ -1201,6 +1251,30 @@ Connection objects .. _SQLite limit category: https://www.sqlite.org/c3ref/c_limit_attached.html + .. method:: getconfig(op, /) + + Query a boolean connection configuration option. + + :param int op: + A :ref:`SQLITE_DBCONFIG code <sqlite3-dbconfig-constants>`. + + :rtype: bool + + .. versionadded:: 3.12 + + .. method:: setconfig(op, enable=True, /) + + Set a boolean connection configuration option. + + :param int op: + A :ref:`SQLITE_DBCONFIG code <sqlite3-dbconfig-constants>`. + + :param bool enable: + ``True`` if the configuration option should be enabled (default); + ``False`` if it should be disabled. + + .. versionadded:: 3.12 + .. method:: serialize(*, name="main") Serialize a database into a :class:`bytes` object. For an @@ -1456,12 +1530,12 @@ Cursor objects For every item in *parameters*, repeatedly execute the :ref:`parameterized <sqlite3-placeholders>` - SQL statement *sql*. + :abbr:`DML (Data Manipulation Language)` SQL statement *sql*. Uses the same implicit transaction handling as :meth:`~Cursor.execute`. :param str sql: - A single SQL :abbr:`DML (Data Manipulation Language)` statement. + A single SQL DML statement. :param parameters: An :term:`!iterable` of parameters to bind with @@ -1484,6 +1558,13 @@ Cursor objects # cur is an sqlite3.Cursor object cur.executemany("INSERT INTO data VALUES(?)", rows) + .. note:: + + Any resulting rows are discarded, + including DML statements with `RETURNING clauses`_. + + .. _RETURNING clauses: https://www.sqlite.org/lang_returning.html + .. deprecated-removed:: 3.12 3.14 :exc:`DeprecationWarning` is emitted if diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst index 30f2a0765cc955..4b60b7c643b62c 100644 --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -1218,7 +1218,7 @@ SSL sockets also have the following additional methods and attributes: .. method:: SSLSocket.shared_ciphers() - Return the list of ciphers shared by the client during the handshake. Each + Return the list of ciphers available in both the client and server. Each entry of the returned list is a three-value tuple containing the name of the cipher, the version of the SSL protocol that defines its use, and the number of secret bits the cipher uses. :meth:`~SSLSocket.shared_ciphers` returns diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index bcfc6e5cfce611..2360472b31f175 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -1605,8 +1605,8 @@ expression support in the :mod:`re` module). converts it to ``"ss"``. The casefolding algorithm is - `described in section 3.13 of the Unicode Standard - <http://www.unicode.org/versions/Unicode15.0.0/ch03.pdf#G53253>`__. + `described in section 3.13 'Default Case Folding' of the Unicode Standard + <https://www.unicode.org/versions/Unicode15.0.0/ch03.pdf>`__. .. versionadded:: 3.3 @@ -1768,8 +1768,9 @@ expression support in the :mod:`re` module). one character, ``False`` otherwise. Alphabetic characters are those characters defined in the Unicode character database as "Letter", i.e., those with general category property being one of "Lm", "Lt", "Lu", "Ll", or "Lo". Note that this is different - from the `Alphabetic property defined in the Unicode Standard - <https://www.unicode.org/versions/Unicode15.0.0/ch04.pdf#G91002>`_. + from the `Alphabetic property defined in the section 4.10 'Letters, Alphabetic, and + Ideographic' of the Unicode Standard + <https://www.unicode.org/versions/Unicode15.0.0/ch04.pdf>`_. .. method:: str.isascii() @@ -1904,8 +1905,8 @@ expression support in the :mod:`re` module). lowercase. The lowercasing algorithm used is - `described in section 3.13 of the Unicode Standard - <https://www.unicode.org/versions/Unicode15.0.0/ch03.pdf#G34078>`__. + `described in section 3.13 'Default Case Folding' of the Unicode Standard + <https://www.unicode.org/versions/Unicode15.0.0/ch03.pdf>`__. .. method:: str.lstrip([chars]) @@ -2250,8 +2251,8 @@ expression support in the :mod:`re` module). titlecase). The uppercasing algorithm used is - `described in section 3.13 of the Unicode Standard - <https://www.unicode.org/versions/Unicode15.0.0/ch03.pdf#G34078>`__. + `described in section 3.13 'Default Case Folding' of the Unicode Standard + <https://www.unicode.org/versions/Unicode15.0.0/ch03.pdf>`__. .. method:: str.zfill(width) @@ -3714,12 +3715,15 @@ copying. types such as :class:`bytes` and :class:`bytearray`, an element is a single byte, but other types such as :class:`array.array` may have bigger elements. - ``len(view)`` is equal to the length of :class:`~memoryview.tolist`. - If ``view.ndim = 0``, the length is 1. If ``view.ndim = 1``, the length - is equal to the number of elements in the view. For higher dimensions, - the length is equal to the length of the nested list representation of - the view. The :class:`~memoryview.itemsize` attribute will give you the - number of bytes in a single element. + ``len(view)`` is equal to the length of :class:`~memoryview.tolist`, which + is the nested list representation of the view. If ``view.ndim = 1``, + this is equal to the number of elements in the view. + + .. versionchanged:: 3.12 + If ``view.ndim == 0``, ``len(view)`` now raises :exc:`TypeError` instead of returning 1. + + The :class:`~memoryview.itemsize` attribute will give you the number of + bytes in a single element. A :class:`memoryview` supports slicing and indexing to expose its data. One-dimensional slicing will result in a subview:: diff --git a/Doc/library/string.rst b/Doc/library/string.rst index 3b96813e683864..26b3f5000634f5 100644 --- a/Doc/library/string.rst +++ b/Doc/library/string.rst @@ -235,7 +235,7 @@ dictionary keys (e.g., the strings ``'10'`` or ``':-]'``) within a format string The *arg_name* can be followed by any number of index or attribute expressions. An expression of the form ``'.name'`` selects the named attribute using :func:`getattr`, while an expression of the form ``'[index]'`` -does an index lookup using :func:`__getitem__`. +does an index lookup using :meth:`~object.__getitem__`. .. versionchanged:: 3.1 The positional argument specifiers can be omitted for :meth:`str.format`, @@ -254,10 +254,10 @@ Some simple format string examples:: "Units destroyed: {players[0]}" # First element of keyword argument 'players'. The *conversion* field causes a type coercion before formatting. Normally, the -job of formatting a value is done by the :meth:`__format__` method of the value +job of formatting a value is done by the :meth:`~object.__format__` method of the value itself. However, in some cases it is desirable to force a type to be formatted as a string, overriding its own definition of formatting. By converting the -value to a string before calling :meth:`__format__`, the normal formatting logic +value to a string before calling :meth:`~object.__format__`, the normal formatting logic is bypassed. Three conversion flags are currently supported: ``'!s'`` which calls :func:`str` @@ -310,7 +310,7 @@ non-empty format specification typically modifies the result. The general form of a *standard format specifier* is: .. productionlist:: format-spec - format_spec: [[`fill`]`align`][`sign`][z][#][0][`width`][`grouping_option`][.`precision`][`type`] + format_spec: [[`fill`]`align`][`sign`]["z"]["#"]["0"][`width`][`grouping_option`]["." `precision`][`type`] fill: <any character> align: "<" | ">" | "=" | "^" sign: "+" | "-" | " " diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index 2b5a82e0107fb6..53dfbf827260c9 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -919,9 +919,12 @@ Reassigning them to new values is unsupported: .. attribute:: Popen.returncode - The child return code, set by :meth:`poll` and :meth:`wait` (and indirectly - by :meth:`communicate`). A ``None`` value indicates that the process - hasn't terminated yet. + The child return code. Initially ``None``, :attr:`returncode` is set by + a call to the :meth:`poll`, :meth:`wait`, or :meth:`communicate` methods + if they detect that the process has terminated. + + A ``None`` value indicates that the process hadn't yet terminated at the + time of the last method call. A negative value ``-N`` indicates that the child was terminated by signal ``N`` (POSIX only). diff --git a/Doc/library/superseded.rst b/Doc/library/superseded.rst index 8786e227be9182..aaf66ea121d39c 100644 --- a/Doc/library/superseded.rst +++ b/Doc/library/superseded.rst @@ -17,7 +17,6 @@ backwards compatibility. They have been superseded by other modules. chunk.rst crypt.rst imghdr.rst - imp.rst mailcap.rst msilib.rst nis.rst diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index a53d4908783e15..7c0e85142e7716 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -220,6 +220,10 @@ always available. .. audit-event:: sys._current_exceptions "" sys._current_exceptions + .. versionchanged:: 3.12 + Each value in the dictionary is now a single exception instance, rather + than a 3-tuple as returned from ``sys.exc_info()``. + .. function:: breakpointhook() This hook function is called by built-in :func:`breakpoint`. By default, @@ -666,6 +670,13 @@ always available. .. versionadded:: 3.4 +.. function:: getunicodeinternedsize() + + Return the number of unicode objects that have been interned. + + .. versionadded:: 3.12 + + .. function:: getandroidapilevel() Return the build time API version of Android as an integer. @@ -697,7 +708,7 @@ always available. the encoding used with the :term:`filesystem error handler <filesystem encoding and error handler>` to convert between Unicode filenames and bytes filenames. The filesystem error handler is returned from - :func:`getfilesystemencoding`. + :func:`getfilesystemencodeerrors`. For best compatibility, str should be used for filenames in all cases, although representing filenames as bytes is also supported. Functions @@ -1102,22 +1113,25 @@ always available. .. versionadded:: 3.5 +.. data:: last_exc + + This variable is not always defined; it is set to the exception instance + when an exception is not handled and the interpreter prints an error message + and a stack traceback. Its intended use is to allow an interactive user to + import a debugger module and engage in post-mortem debugging without having + to re-execute the command that caused the error. (Typical use is + ``import pdb; pdb.pm()`` to enter the post-mortem debugger; see :mod:`pdb` + module for more information.) + + .. versionadded:: 3.12 .. data:: last_type last_value last_traceback - These three variables are not always defined; they are set when an exception is - not handled and the interpreter prints an error message and a stack traceback. - Their intended use is to allow an interactive user to import a debugger module - and engage in post-mortem debugging without having to re-execute the command - that caused the error. (Typical use is ``import pdb; pdb.pm()`` to enter the - post-mortem debugger; see :mod:`pdb` module for - more information.) - - The meaning of the variables is the same as that of the return values from - :func:`exc_info` above. - + These three variables are deprecated; use :data:`sys.last_exc` instead. + They hold the legacy representation of ``sys.last_exc``, as returned + from :func:`exc_info` above. .. data:: maxsize @@ -1239,10 +1253,6 @@ always available. Originally specified in :pep:`302`. - .. versionchanged:: 3.3 - ``None`` is stored instead of :class:`imp.NullImporter` when no finder - is found. - .. data:: platform diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst index 741d40da152101..891af1bcf7edff 100644 --- a/Doc/library/tarfile.rst +++ b/Doc/library/tarfile.rst @@ -36,6 +36,13 @@ Some facts and figures: .. versionchanged:: 3.3 Added support for :mod:`lzma` compression. +.. versionchanged:: 3.12 + Archives are extracted using a :ref:`filter <tarfile-extraction-filter>`, + which makes it possible to either limit surprising/dangerous features, + or to acknowledge that they are expected and the archive is fully trusted. + By default, archives are fully trusted, but this default is deprecated + and slated to change in Python 3.14. + .. function:: open(name=None, mode='r', fileobj=None, bufsize=10240, **kwargs) @@ -209,6 +216,38 @@ The :mod:`tarfile` module defines the following exceptions: Is raised by :meth:`TarInfo.frombuf` if the buffer it gets is invalid. +.. exception:: FilterError + + Base class for members :ref:`refused <tarfile-extraction-refuse>` by + filters. + + .. attribute:: tarinfo + + Information about the member that the filter refused to extract, + as :ref:`TarInfo <tarinfo-objects>`. + +.. exception:: AbsolutePathError + + Raised to refuse extracting a member with an absolute path. + +.. exception:: OutsideDestinationError + + Raised to refuse extracting a member outside the destination directory. + +.. exception:: SpecialFileError + + Raised to refuse extracting a special file (e.g. a device or pipe). + +.. exception:: AbsoluteLinkError + + Raised to refuse extracting a symbolic link with an absolute path. + +.. exception:: LinkOutsideDestinationError + + Raised to refuse extracting a symbolic link pointing outside the destination + directory. + + The following constants are available at the module level: .. data:: ENCODING @@ -319,11 +358,8 @@ be finalized; only the internally used file object will be closed. See the *debug* can be set from ``0`` (no debug messages) up to ``3`` (all debug messages). The messages are written to ``sys.stderr``. - If *errorlevel* is ``0``, all errors are ignored when using :meth:`TarFile.extract`. - Nevertheless, they appear as error messages in the debug output, when debugging - is enabled. If ``1``, all *fatal* errors are raised as :exc:`OSError` - exceptions. If ``2``, all *non-fatal* errors are raised as :exc:`TarError` - exceptions as well. + *errorlevel* controls how extraction errors are handled, + see :attr:`the corresponding attribute <~TarFile.errorlevel>`. The *encoding* and *errors* arguments define the character encoding to be used for reading or writing the archive and how conversion errors are going @@ -390,7 +426,7 @@ be finalized; only the internally used file object will be closed. See the available. -.. method:: TarFile.extractall(path=".", members=None, *, numeric_owner=False) +.. method:: TarFile.extractall(path=".", members=None, *, numeric_owner=False, filter=None) Extract all members from the archive to the current working directory or directory *path*. If optional *members* is given, it must be a subset of the @@ -404,6 +440,12 @@ be finalized; only the internally used file object will be closed. See the are used to set the owner/group for the extracted files. Otherwise, the named values from the tarfile are used. + The *filter* argument specifies how ``members`` are modified or rejected + before extraction. + See :ref:`tarfile-extraction-filter` for details. + It is recommended to set this explicitly depending on which *tar* features + you need to support. + .. warning:: Never extract archives from untrusted sources without prior inspection. @@ -411,14 +453,20 @@ be finalized; only the internally used file object will be closed. See the that have absolute filenames starting with ``"/"`` or filenames with two dots ``".."``. + Set ``filter='data'`` to prevent the most dangerous security issues, + and read the :ref:`tarfile-extraction-filter` section for details. + .. versionchanged:: 3.5 Added the *numeric_owner* parameter. .. versionchanged:: 3.6 The *path* parameter accepts a :term:`path-like object`. + .. versionchanged:: 3.12 + Added the *filter* parameter. -.. method:: TarFile.extract(member, path="", set_attrs=True, *, numeric_owner=False) + +.. method:: TarFile.extract(member, path="", set_attrs=True, *, numeric_owner=False, filter=None) Extract a member from the archive to the current working directory, using its full name. Its file information is extracted as accurately as possible. *member* @@ -426,9 +474,8 @@ be finalized; only the internally used file object will be closed. See the directory using *path*. *path* may be a :term:`path-like object`. File attributes (owner, mtime, mode) are set unless *set_attrs* is false. - If *numeric_owner* is :const:`True`, the uid and gid numbers from the tarfile - are used to set the owner/group for the extracted files. Otherwise, the named - values from the tarfile are used. + The *numeric_owner* and *filter* arguments are the same as + for :meth:`extractall`. .. note:: @@ -439,6 +486,9 @@ be finalized; only the internally used file object will be closed. See the See the warning for :meth:`extractall`. + Set ``filter='data'`` to prevent the most dangerous security issues, + and read the :ref:`tarfile-extraction-filter` section for details. + .. versionchanged:: 3.2 Added the *set_attrs* parameter. @@ -448,6 +498,9 @@ be finalized; only the internally used file object will be closed. See the .. versionchanged:: 3.6 The *path* parameter accepts a :term:`path-like object`. + .. versionchanged:: 3.12 + Added the *filter* parameter. + .. method:: TarFile.extractfile(member) @@ -460,6 +513,55 @@ be finalized; only the internally used file object will be closed. See the .. versionchanged:: 3.3 Return an :class:`io.BufferedReader` object. +.. attribute:: TarFile.errorlevel + :type: int + + If *errorlevel* is ``0``, errors are ignored when using :meth:`TarFile.extract` + and :meth:`TarFile.extractall`. + Nevertheless, they appear as error messages in the debug output when + *debug* is greater than 0. + If ``1`` (the default), all *fatal* errors are raised as :exc:`OSError` or + :exc:`FilterError` exceptions. If ``2``, all *non-fatal* errors are raised + as :exc:`TarError` exceptions as well. + + Some exceptions, e.g. ones caused by wrong argument types or data + corruption, are always raised. + + Custom :ref:`extraction filters <tarfile-extraction-filter>` + should raise :exc:`FilterError` for *fatal* errors + and :exc:`ExtractError` for *non-fatal* ones. + + Note that when an exception is raised, the archive may be partially + extracted. It is the user’s responsibility to clean up. + +.. attribute:: TarFile.extraction_filter + + .. versionadded:: 3.12 + + The :ref:`extraction filter <tarfile-extraction-filter>` used + as a default for the *filter* argument of :meth:`~TarFile.extract` + and :meth:`~TarFile.extractall`. + + The attribute may be ``None`` or a callable. + String names are not allowed for this attribute, unlike the *filter* + argument to :meth:`~TarFile.extract`. + + If ``extraction_filter`` is ``None`` (the default), + calling an extraction method without a *filter* argument will raise a + ``DeprecationWarning``, + and fall back to the :func:`fully_trusted <fully_trusted_filter>` filter, + whose dangerous behavior matches previous versions of Python. + + In Python 3.14+, leaving ``extraction_filter=None`` will cause + extraction methods to use the :func:`data <data_filter>` filter by default. + + The attribute may be set on instances or overridden in subclasses. + It also is possible to set it on the ``TarFile`` class itself to set a + global default, although, since it affects all uses of *tarfile*, + it is best practice to only do so in top-level applications or + :mod:`site configuration <site>`. + To set a global default this way, a filter function needs to be wrapped in + :func:`staticmethod()` to prevent injection of a ``self`` argument. .. method:: TarFile.add(name, arcname=None, recursive=True, *, filter=None) @@ -535,8 +637,23 @@ permissions, owner etc.), it provides some useful methods to determine its type. It does *not* contain the file's data itself. :class:`TarInfo` objects are returned by :class:`TarFile`'s methods -:meth:`getmember`, :meth:`getmembers` and :meth:`gettarinfo`. +:meth:`~TarFile.getmember`, :meth:`~TarFile.getmembers` and +:meth:`~TarFile.gettarinfo`. +Modifying the objects returned by :meth:`~!TarFile.getmember` or +:meth:`~!TarFile.getmembers` will affect all subsequent +operations on the archive. +For cases where this is unwanted, you can use :mod:`copy.copy() <copy>` or +call the :meth:`~TarInfo.replace` method to create a modified copy in one step. + +Several attributes can be set to ``None`` to indicate that a piece of metadata +is unused or unknown. +Different :class:`TarInfo` methods handle ``None`` differently: + +- The :meth:`~TarFile.extract` or :meth:`~TarFile.extractall` methods will + ignore the corresponding metadata, leaving it set to a default. +- :meth:`~TarFile.addfile` will fail. +- :meth:`~TarFile.list` will print a placeholder string. .. class:: TarInfo(name="") @@ -569,24 +686,39 @@ A ``TarInfo`` object has the following public data attributes: .. attribute:: TarInfo.name + :type: str Name of the archive member. .. attribute:: TarInfo.size + :type: int Size in bytes. .. attribute:: TarInfo.mtime + :type: int | float - Time of last modification. + Time of last modification in seconds since the :ref:`epoch <epoch>`, + as in :attr:`os.stat_result.st_mtime`. + + .. versionchanged:: 3.12 + Can be set to ``None`` for :meth:`~TarFile.extract` and + :meth:`~TarFile.extractall`, causing extraction to skip applying this + attribute. .. attribute:: TarInfo.mode + :type: int - Permission bits. + Permission bits, as for :func:`os.chmod`. + .. versionchanged:: 3.12 + + Can be set to ``None`` for :meth:`~TarFile.extract` and + :meth:`~TarFile.extractall`, causing extraction to skip applying this + attribute. .. attribute:: TarInfo.type @@ -598,35 +730,76 @@ A ``TarInfo`` object has the following public data attributes: .. attribute:: TarInfo.linkname + :type: str Name of the target file name, which is only present in :class:`TarInfo` objects of type :const:`LNKTYPE` and :const:`SYMTYPE`. .. attribute:: TarInfo.uid + :type: int User ID of the user who originally stored this member. + .. versionchanged:: 3.12 + + Can be set to ``None`` for :meth:`~TarFile.extract` and + :meth:`~TarFile.extractall`, causing extraction to skip applying this + attribute. .. attribute:: TarInfo.gid + :type: int Group ID of the user who originally stored this member. + .. versionchanged:: 3.12 + + Can be set to ``None`` for :meth:`~TarFile.extract` and + :meth:`~TarFile.extractall`, causing extraction to skip applying this + attribute. .. attribute:: TarInfo.uname + :type: str User name. + .. versionchanged:: 3.12 + + Can be set to ``None`` for :meth:`~TarFile.extract` and + :meth:`~TarFile.extractall`, causing extraction to skip applying this + attribute. .. attribute:: TarInfo.gname + :type: str Group name. + .. versionchanged:: 3.12 + + Can be set to ``None`` for :meth:`~TarFile.extract` and + :meth:`~TarFile.extractall`, causing extraction to skip applying this + attribute. .. attribute:: TarInfo.pax_headers + :type: dict A dictionary containing key-value pairs of an associated pax extended header. +.. method:: TarInfo.replace(name=..., mtime=..., mode=..., linkname=..., + uid=..., gid=..., uname=..., gname=..., + deep=True) + + .. versionadded:: 3.12 + + Return a *new* copy of the :class:`!TarInfo` object with the given attributes + changed. For example, to return a ``TarInfo`` with the group name set to + ``'staff'``, use:: + + new_tarinfo = old_tarinfo.replace(gname='staff') + + By default, a deep copy is made. + If *deep* is false, the copy is shallow, i.e. ``pax_headers`` + and any custom attributes are shared with the original ``TarInfo`` object. A :class:`TarInfo` object also provides some convenient query methods: @@ -676,9 +849,258 @@ A :class:`TarInfo` object also provides some convenient query methods: Return :const:`True` if it is one of character device, block device or FIFO. +.. _tarfile-extraction-filter: + +Extraction filters +------------------ + +.. versionadded:: 3.12 + +The *tar* format is designed to capture all details of a UNIX-like filesystem, +which makes it very powerful. +Unfortunately, the features make it easy to create tar files that have +unintended -- and possibly malicious -- effects when extracted. +For example, extracting a tar file can overwrite arbitrary files in various +ways (e.g. by using absolute paths, ``..`` path components, or symlinks that +affect later members). + +In most cases, the full functionality is not needed. +Therefore, *tarfile* supports extraction filters: a mechanism to limit +functionality, and thus mitigate some of the security issues. + +.. seealso:: + + :pep:`706` + Contains further motivation and rationale behind the design. + +The *filter* argument to :meth:`TarFile.extract` or :meth:`~TarFile.extractall` +can be: + +* the string ``'fully_trusted'``: Honor all metadata as specified in the + archive. + Should be used if the user trusts the archive completely, or implements + their own complex verification. + +* the string ``'tar'``: Honor most *tar*-specific features (i.e. features of + UNIX-like filesystems), but block features that are very likely to be + surprising or malicious. See :func:`tar_filter` for details. + +* the string ``'data'``: Ignore or block most features specific to UNIX-like + filesystems. Intended for extracting cross-platform data archives. + See :func:`data_filter` for details. + +* ``None`` (default): Use :attr:`TarFile.extraction_filter`. + + If that is also ``None`` (the default), raise a ``DeprecationWarning``, + and fall back to the ``'fully_trusted'`` filter, whose dangerous behavior + matches previous versions of Python. + + In Python 3.14, the ``'data'`` filter will become the default instead. + It's possible to switch earlier; see :attr:`TarFile.extraction_filter`. + +* A callable which will be called for each extracted member with a + :ref:`TarInfo <tarinfo-objects>` describing the member and the destination + path to where the archive is extracted (i.e. the same path is used for all + members):: + + filter(/, member: TarInfo, path: str) -> TarInfo | None + + The callable is called just before each member is extracted, so it can + take the current state of the disk into account. + It can: + + - return a :class:`TarInfo` object which will be used instead of the metadata + in the archive, or + - return ``None``, in which case the member will be skipped, or + - raise an exception to abort the operation or skip the member, + depending on :attr:`~TarFile.errorlevel`. + Note that when extraction is aborted, :meth:`~TarFile.extractall` may leave + the archive partially extracted. It does not attempt to clean up. + +Default named filters +~~~~~~~~~~~~~~~~~~~~~ + +The pre-defined, named filters are available as functions, so they can be +reused in custom filters: + +.. function:: fully_trusted_filter(/, member, path) + + Return *member* unchanged. + + This implements the ``'fully_trusted'`` filter. + +.. function:: tar_filter(/, member, path) + + Implements the ``'tar'`` filter. + + - Strip leading slashes (``/`` and :attr:`os.sep`) from filenames. + - :ref:`Refuse <tarfile-extraction-refuse>` to extract files with absolute + paths (in case the name is absolute + even after stripping slashes, e.g. ``C:/foo`` on Windows). + This raises :class:`~tarfile.AbsolutePathError`. + - :ref:`Refuse <tarfile-extraction-refuse>` to extract files whose absolute + path (after following symlinks) would end up outside the destination. + This raises :class:`~tarfile.OutsideDestinationError`. + - Clear high mode bits (setuid, setgid, sticky) and group/other write bits + (:attr:`~stat.S_IWGRP`|:attr:`~stat.S_IWOTH`). + + Return the modified ``TarInfo`` member. + +.. function:: data_filter(/, member, path) + + Implements the ``'data'`` filter. + In addition to what ``tar_filter`` does: + + - :ref:`Refuse <tarfile-extraction-refuse>` to extract links (hard or soft) + that link to absolute paths, or ones that link outside the destination. + + This raises :class:`~tarfile.AbsoluteLinkError` or + :class:`~tarfile.LinkOutsideDestinationError`. + + Note that such files are refused even on platforms that do not support + symbolic links. + + - :ref:`Refuse <tarfile-extraction-refuse>` to extract device files + (including pipes). + This raises :class:`~tarfile.SpecialFileError`. + + - For regular files, including hard links: + + - Set the owner read and write permissions + (:attr:`~stat.S_IRUSR`|:attr:`~stat.S_IWUSR`). + - Remove the group & other executable permission + (:attr:`~stat.S_IXGRP`|:attr:`~stat.S_IXOTH`) + if the owner doesn’t have it (:attr:`~stat.S_IXUSR`). + + - For other files (directories), set ``mode`` to ``None``, so + that extraction methods skip applying permission bits. + - Set user and group info (``uid``, ``gid``, ``uname``, ``gname``) + to ``None``, so that extraction methods skip setting it. + + Return the modified ``TarInfo`` member. + + +.. _tarfile-extraction-refuse: + +Filter errors +~~~~~~~~~~~~~ + +When a filter refuses to extract a file, it will raise an appropriate exception, +a subclass of :class:`~tarfile.FilterError`. +This will abort the extraction if :attr:`TarFile.errorlevel` is 1 or more. +With ``errorlevel=0`` the error will be logged and the member will be skipped, +but extraction will continue. + + +Hints for further verification +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Even with ``filter='data'``, *tarfile* is not suited for extracting untrusted +files without prior inspection. +Among other issues, the pre-defined filters do not prevent denial-of-service +attacks. Users should do additional checks. + +Here is an incomplete list of things to consider: + +* Extract to a :func:`new temporary directory <tempfile.mkdtemp>` + to prevent e.g. exploiting pre-existing links, and to make it easier to + clean up after a failed extraction. +* When working with untrusted data, use external (e.g. OS-level) limits on + disk, memory and CPU usage. +* Check filenames against an allow-list of characters + (to filter out control characters, confusables, foreign path separators, + etc.). +* Check that filenames have expected extensions (discouraging files that + execute when you “click on them”, or extension-less files like Windows special device names). +* Limit the number of extracted files, total size of extracted data, + filename length (including symlink length), and size of individual files. +* Check for files that would be shadowed on case-insensitive filesystems. + +Also note that: + +* Tar files may contain multiple versions of the same file. + Later ones are expected to overwrite any earlier ones. + This feature is crucial to allow updating tape archives, but can be abused + maliciously. +* *tarfile* does not protect against issues with “live” data, + e.g. an attacker tinkering with the destination (or source) directory while + extraction (or archiving) is in progress. + + +Supporting older Python versions +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Extraction filters were added to Python 3.12, but may be backported to older +versions as security updates. +To check whether the feature is available, use e.g. +``hasattr(tarfile, 'data_filter')`` rather than checking the Python version. + +The following examples show how to support Python versions with and without +the feature. +Note that setting ``extraction_filter`` will affect any subsequent operations. + +* Fully trusted archive:: + + my_tarfile.extraction_filter = (lambda member, path: member) + my_tarfile.extractall() + +* Use the ``'data'`` filter if available, but revert to Python 3.11 behavior + (``'fully_trusted'``) if this feature is not available:: + + my_tarfile.extraction_filter = getattr(tarfile, 'data_filter', + (lambda member, path: member)) + my_tarfile.extractall() + +* Use the ``'data'`` filter; *fail* if it is not available:: + + my_tarfile.extractall(filter=tarfile.data_filter) + + or:: + + my_tarfile.extraction_filter = tarfile.data_filter + my_tarfile.extractall() + +* Use the ``'data'`` filter; *warn* if it is not available:: + + if hasattr(tarfile, 'data_filter'): + my_tarfile.extractall(filter='data') + else: + # remove this when no longer needed + warn_the_user('Extracting may be unsafe; consider updating Python') + my_tarfile.extractall() + + +Stateful extraction filter example +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +While *tarfile*'s extraction methods take a simple *filter* callable, +custom filters may be more complex objects with an internal state. +It may be useful to write these as context managers, to be used like this:: + + with StatefulFilter() as filter_func: + tar.extractall(path, filter=filter_func) + +Such a filter can be written as, for example:: + + class StatefulFilter: + def __init__(self): + self.file_count = 0 + + def __enter__(self): + return self + + def __call__(self, member, path): + self.file_count += 1 + return member + + def __exit__(self, *exc_info): + print(f'{self.file_count} files extracted') + + .. _tarfile-commandline: .. program:: tarfile + Command-Line Interface ---------------------- @@ -748,6 +1170,13 @@ Command-line options Verbose output. +.. cmdoption:: --filter <filtername> + + Specifies the *filter* for ``--extract``. + See :ref:`tarfile-extraction-filter` for details. + Only string names are accepted (that is, ``fully_trusted``, ``tar``, + and ``data``). + .. _tar-examples: Examples @@ -757,7 +1186,7 @@ How to extract an entire tar archive to the current working directory:: import tarfile tar = tarfile.open("sample.tar.gz") - tar.extractall() + tar.extractall(filter='data') tar.close() How to extract a subset of a tar archive with :meth:`TarFile.extractall` using diff --git a/Doc/library/tempfile.rst b/Doc/library/tempfile.rst index b6d4f5dd05bbfc..fd4c294613fd31 100644 --- a/Doc/library/tempfile.rst +++ b/Doc/library/tempfile.rst @@ -173,7 +173,7 @@ The module defines the following user-callable items: or text *mode* was specified). -.. class:: TemporaryDirectory(suffix=None, prefix=None, dir=None, ignore_cleanup_errors=False) +.. class:: TemporaryDirectory(suffix=None, prefix=None, dir=None, ignore_cleanup_errors=False, *, delete=True) This class securely creates a temporary directory using the same rules as :func:`mkdtemp`. The resulting object can be used as a context manager (see @@ -195,6 +195,12 @@ The module defines the following user-callable items: (the :func:`cleanup` call, exiting the context manager, when the object is garbage-collected or during interpreter shutdown). + The *delete* parameter can be used to disable cleanup of the directory tree + upon exiting the context. While it may seem unusual for a context manager + to disable the action taken when exiting the context, it can be useful during + debugging or when you need your cleanup behavior to be conditional based on + other logic. + .. audit-event:: tempfile.mkdtemp fullpath tempfile.TemporaryDirectory .. versionadded:: 3.2 @@ -202,6 +208,9 @@ The module defines the following user-callable items: .. versionchanged:: 3.10 Added *ignore_cleanup_errors* parameter. + .. versionchanged:: 3.12 + Added the *delete* parameter. + .. function:: mkstemp(suffix=None, prefix=None, dir=None, text=False) @@ -283,6 +292,9 @@ The module defines the following user-callable items: .. versionchanged:: 3.6 The *dir* parameter now accepts a :term:`path-like object`. + .. versionchanged:: 3.12 + :func:`mkdtemp` now always returns an absolute path, even if *dir* is relative. + .. function:: gettempdir() diff --git a/Doc/library/test.rst b/Doc/library/test.rst index c60b4da75d1acb..20f633b8f569be 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -1691,6 +1691,21 @@ The :mod:`test.support.warnings_helper` module provides support for warnings tes .. versionadded:: 3.10 +.. function:: ignore_warnings(*, category) + + Suppress warnings that are instances of *category*, + which must be :exc:`Warning` or a subclass. + Roughly equivalent to :func:`warnings.catch_warnings` + with :meth:`warnings.simplefilter('ignore', category=category) <warnings.simplefilter>`. + For example:: + + @warning_helper.ignore_warnings(category=DeprecationWarning) + def test_suppress_warning(): + # do something + + .. versionadded:: 3.8 + + .. function:: check_no_resource_warning(testcase) Context manager to check that no :exc:`ResourceWarning` was raised. You diff --git a/Doc/library/token-list.inc b/Doc/library/token-list.inc index 2739d5bfc1dfa2..3b345099bf54b5 100644 --- a/Doc/library/token-list.inc +++ b/Doc/library/token-list.inc @@ -201,6 +201,10 @@ Token value for ``":="``. +.. data:: EXCLAMATION + + Token value for ``"!"``. + .. data:: OP .. data:: AWAIT @@ -213,6 +217,12 @@ .. data:: SOFT_KEYWORD +.. data:: FSTRING_START + +.. data:: FSTRING_MIDDLE + +.. data:: FSTRING_END + .. data:: ERRORTOKEN .. data:: N_TOKENS diff --git a/Doc/library/turtle.rst b/Doc/library/turtle.rst index 05392d04e52263..10138f4f406f85 100644 --- a/Doc/library/turtle.rst +++ b/Doc/library/turtle.rst @@ -107,6 +107,7 @@ Turtle motion | :func:`right` | :func:`rt` | :func:`left` | :func:`lt` | :func:`goto` | :func:`setpos` | :func:`setposition` + | :func:`teleport` | :func:`setx` | :func:`sety` | :func:`setheading` | :func:`seth` @@ -372,6 +373,44 @@ Turtle motion (0.00,0.00) +.. function:: teleport(x, y=None, *, fill_gap=False) + + :param x: a number or ``None`` + :param y: a number or ``None`` + :param fill_gap: a boolean + + Move turtle to an absolute position. Unlike goto(x, y), a line will not + be drawn. The turtle's orientation does not change. If currently + filling, the polygon(s) teleported from will be filled after leaving, + and filling will begin again after teleporting. This can be disabled + with fill_gap=True, which makes the imaginary line traveled during + teleporting act as a fill barrier like in goto(x, y). + + .. doctest:: + :skipif: _tkinter is None + :hide: + + >>> turtle.goto(0, 0) + + .. doctest:: + :skipif: _tkinter is None + + >>> tp = turtle.pos() + >>> tp + (0.00,0.00) + >>> turtle.teleport(60) + >>> turtle.pos() + (60.00,0.00) + >>> turtle.teleport(y=10) + >>> turtle.pos() + (60.00,10.00) + >>> turtle.teleport(20, 30) + >>> turtle.pos() + (20.00,30.00) + + .. versionadded: 3.12 + + .. function:: setx(x) :param x: a number (integer or float) @@ -537,8 +576,7 @@ Turtle motion :skipif: _tkinter is None >>> turtle.color("blue") - >>> turtle.stamp() - 11 + >>> stamp_id = turtle.stamp() >>> turtle.fd(50) @@ -575,15 +613,8 @@ Turtle motion .. doctest:: >>> for i in range(8): - ... turtle.stamp(); turtle.fd(30) - 13 - 14 - 15 - 16 - 17 - 18 - 19 - 20 + ... unused_stamp_id = turtle.stamp() + ... turtle.fd(30) >>> turtle.clearstamps(2) >>> turtle.clearstamps(-2) >>> turtle.clearstamps() diff --git a/Doc/library/types.rst b/Doc/library/types.rst index 747ba58bb225d4..a15fb5cfa49473 100644 --- a/Doc/library/types.rst +++ b/Doc/library/types.rst @@ -75,13 +75,53 @@ Dynamic Type Creation This function looks for items in *bases* that are not instances of :class:`type`, and returns a tuple where each such object that has - an ``__mro_entries__`` method is replaced with an unpacked result of + an :meth:`~object.__mro_entries__` method is replaced with an unpacked result of calling this method. If a *bases* item is an instance of :class:`type`, - or it doesn't have an ``__mro_entries__`` method, then it is included in + or it doesn't have an :meth:`!__mro_entries__` method, then it is included in the return tuple unchanged. .. versionadded:: 3.7 +.. function:: get_original_bases(cls, /) + + Return the tuple of objects originally given as the bases of *cls* before + the :meth:`~object.__mro_entries__` method has been called on any bases + (following the mechanisms laid out in :pep:`560`). This is useful for + introspecting :ref:`Generics <user-defined-generics>`. + + For classes that have an ``__orig_bases__`` attribute, this + function returns the value of ``cls.__orig_bases__``. + For classes without the ``__orig_bases__`` attribute, ``cls.__bases__`` is + returned. + + Examples:: + + from typing import TypeVar, Generic, NamedTuple, TypedDict + + T = TypeVar("T") + class Foo(Generic[T]): ... + class Bar(Foo[int], float): ... + class Baz(list[str]): ... + Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) + Spam = TypedDict("Spam", {"a": int, "b": str}) + + assert Bar.__bases__ == (Foo, float) + assert get_original_bases(Bar) == (Foo[int], float) + + assert Baz.__bases__ == (list,) + assert get_original_bases(Baz) == (list[str],) + + assert Eggs.__bases__ == (tuple,) + assert get_original_bases(Eggs) == (NamedTuple,) + + assert Spam.__bases__ == (dict,) + assert get_original_bases(Spam) == (TypedDict,) + + assert int.__bases__ == (object,) + assert get_original_bases(int) == (object,) + + .. versionadded:: 3.12 + .. seealso:: :pep:`560` - Core support for typing module and generic types @@ -311,6 +351,13 @@ Standard names are defined for the following types: .. versionchanged:: 3.9.2 This type can now be subclassed. + .. seealso:: + + :ref:`Generic Alias Types<types-genericalias>` + In-depth documentation on instances of :class:`!types.GenericAlias` + + :pep:`585` - Type Hinting Generics In Standard Collections + Introducing the :class:`!types.GenericAlias` class .. class:: UnionType diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index 80a969e6335abe..409a95d528b5d3 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -41,10 +41,17 @@ For a summary of deprecated features and a deprecation timeline, please see .. seealso:: + For a quick overview of type hints, refer to + `this cheat sheet <https://mypy.readthedocs.io/en/stable/cheat_sheet_py3.html>`_. + + The "Type System Reference" section of https://mypy.readthedocs.io/ -- since + the Python typing system is standardised via PEPs, this reference should + broadly apply to most Python type checkers, although some parts may still be + specific to mypy. + The documentation at https://typing.readthedocs.io/ serves as useful reference for type system features, useful typing related tools and typing best practices. - .. _relevant-peps: Relevant PEPs @@ -91,6 +98,9 @@ annotations. These include: *Introducing* :data:`LiteralString` * :pep:`681`: Data Class Transforms *Introducing* the :func:`@dataclass_transform<dataclass_transform>` decorator +* :pep:`692`: Using ``TypedDict`` for more precise ``**kwargs`` typing + *Introducing* a new way of typing ``**kwargs`` with :data:`Unpack` and + :data:`TypedDict` * :pep:`698`: Adding an override decorator to typing *Introducing* the :func:`@override<override>` decorator @@ -416,7 +426,7 @@ to this is that a list of types can be used to substitute a :class:`ParamSpec`:: >>> class Z(Generic[T, P]): ... ... >>> Z[int, [dict, float]] - __main__.Z[int, (<class 'dict'>, <class 'float'>)] + __main__.Z[int, [dict, float]] Furthermore, a generic with only one parameter specification variable will accept @@ -427,9 +437,9 @@ to the former, so the following are equivalent:: >>> class X(Generic[P]): ... ... >>> X[int, str] - __main__.X[(<class 'int'>, <class 'str'>)] + __main__.X[[int, str]] >>> X[[int, str]] - __main__.X[(<class 'int'>, <class 'str'>)] + __main__.X[[int, str]] Do note that generics with :class:`ParamSpec` may not have correct ``__parameters__`` after substitution in some cases because they @@ -1410,8 +1420,10 @@ These are not used in annotations. They are building blocks for creating generic tup: tuple[Unpack[Ts]] In fact, ``Unpack`` can be used interchangeably with ``*`` in the context - of types. You might see ``Unpack`` being used explicitly in older versions - of Python, where ``*`` couldn't be used in certain places:: + of :class:`typing.TypeVarTuple <TypeVarTuple>` and + :class:`builtins.tuple <tuple>` types. You might see ``Unpack`` being used + explicitly in older versions of Python, where ``*`` couldn't be used in + certain places:: # In older versions of Python, TypeVarTuple and Unpack # are located in the `typing_extensions` backports package. @@ -1421,6 +1433,21 @@ These are not used in annotations. They are building blocks for creating generic tup: tuple[*Ts] # Syntax error on Python <= 3.10! tup: tuple[Unpack[Ts]] # Semantically equivalent, and backwards-compatible + ``Unpack`` can also be used along with :class:`typing.TypedDict` for typing + ``**kwargs`` in a function signature:: + + from typing import TypedDict, Unpack + + class Movie(TypedDict): + name: str + year: int + + # This function expects two keyword arguments - `name` of type `str` + # and `year` of type `int`. + def foo(**kwargs: Unpack[Movie]): ... + + See :pep:`692` for more details on using ``Unpack`` for ``**kwargs`` typing. + .. versionadded:: 3.11 .. class:: ParamSpec(name, *, bound=None, covariant=False, contravariant=False) @@ -1584,18 +1611,52 @@ These are not used in annotations. They are building blocks for creating generic assert isinstance(open('/some/file'), Closable) + @runtime_checkable + class Named(Protocol): + name: str + + import threading + assert isinstance(threading.Thread(name='Bob'), Named) + .. note:: - :func:`runtime_checkable` will check only the presence of the required - methods, not their type signatures. For example, :class:`ssl.SSLObject` + :func:`!runtime_checkable` will check only the presence of the required + methods or attributes, not their type signatures or types. + For example, :class:`ssl.SSLObject` is a class, therefore it passes an :func:`issubclass` check against :data:`Callable`. However, the ``ssl.SSLObject.__init__`` method exists only to raise a :exc:`TypeError` with a more informative message, therefore making it impossible to call (instantiate) :class:`ssl.SSLObject`. + .. note:: + + An :func:`isinstance` check against a runtime-checkable protocol can be + surprisingly slow compared to an ``isinstance()`` check against + a non-protocol class. Consider using alternative idioms such as + :func:`hasattr` calls for structural checks in performance-sensitive + code. + .. versionadded:: 3.8 + .. versionchanged:: 3.12 + The internal implementation of :func:`isinstance` checks against + runtime-checkable protocols now uses :func:`inspect.getattr_static` + to look up attributes (previously, :func:`hasattr` was used). + As a result, some objects which used to be considered instances + of a runtime-checkable protocol may no longer be considered instances + of that protocol on Python 3.12+, and vice versa. + Most users are unlikely to be affected by this change. + + .. versionchanged:: 3.12 + The members of a runtime-checkable protocol are now considered "frozen" + at runtime as soon as the class has been created. Monkey-patching + attributes onto a runtime-checkable protocol will still work, but will + have no impact on :func:`isinstance` checks comparing objects to the + protocol. See :ref:`"What's new in Python 3.12" <whatsnew-typing-py312>` + for more details. + + Other special directives """""""""""""""""""""""" diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst index 1577149e976474..c70153dfcd69e1 100644 --- a/Doc/library/unittest.rst +++ b/Doc/library/unittest.rst @@ -244,6 +244,10 @@ Command-line options Show local variables in tracebacks. +.. cmdoption:: --durations N + + Show the N slowest test cases (N=0 for all). + .. versionadded:: 3.2 The command-line options ``-b``, ``-c`` and ``-f`` were added. @@ -253,10 +257,12 @@ Command-line options .. versionadded:: 3.7 The command-line option ``-k``. +.. versionadded:: 3.12 + The command-line option ``--durations``. + The command line can also be used for test discovery, for running all of the tests in a project or just a subset. - .. _unittest-test-discovery: Test Discovery @@ -2009,6 +2015,13 @@ Loading and running tests A list containing :class:`TestCase` instances that were marked as expected failures, but succeeded. + .. attribute:: collectedDurations + + A list containing 2-tuples of :class:`TestCase` instances and floats + representing the elapsed time of each test which was run. + + .. versionadded:: 3.12 + .. attribute:: shouldStop Set to ``True`` when the execution of tests should stop by :meth:`stop`. @@ -2160,14 +2173,23 @@ Loading and running tests .. versionadded:: 3.4 + .. method:: addDuration(test, elapsed) + + Called when the test case finishes. *elapsed* is the time represented in + seconds, and it includes the execution of cleanup functions. -.. class:: TextTestResult(stream, descriptions, verbosity) + .. versionadded:: 3.12 + +.. class:: TextTestResult(stream, descriptions, verbosity, *, durations=None) A concrete implementation of :class:`TestResult` used by the - :class:`TextTestRunner`. + :class:`TextTestRunner`. Subclasses should accept ``**kwargs`` to ensure + compatibility as the interface changes. .. versionadded:: 3.2 + .. versionadded:: 3.12 + Added *durations* keyword argument. .. data:: defaultTestLoader @@ -2177,7 +2199,8 @@ Loading and running tests .. class:: TextTestRunner(stream=None, descriptions=True, verbosity=1, failfast=False, \ - buffer=False, resultclass=None, warnings=None, *, tb_locals=False) + buffer=False, resultclass=None, warnings=None, *, \ + tb_locals=False, durations=None) A basic test runner implementation that outputs results to a stream. If *stream* is ``None``, the default, :data:`sys.stderr` is used as the output stream. This class @@ -2195,14 +2218,17 @@ Loading and running tests *warnings* to ``None``. .. versionchanged:: 3.2 - Added the ``warnings`` argument. + Added the *warnings* parameter. .. versionchanged:: 3.2 The default stream is set to :data:`sys.stderr` at instantiation time rather than import time. .. versionchanged:: 3.5 - Added the tb_locals parameter. + Added the *tb_locals* parameter. + + .. versionchanged:: 3.12 + Added the *durations* parameter. .. method:: _makeResult() @@ -2255,7 +2281,8 @@ Loading and running tests The *testRunner* argument can either be a test runner class or an already created instance of it. By default ``main`` calls :func:`sys.exit` with - an exit code indicating success or failure of the tests run. + an exit code indicating success (0) or failure (1) of the tests run. + An exit code of 5 indicates that no tests were run. The *testLoader* argument has to be a :class:`TestLoader` instance, and defaults to :data:`defaultTestLoader`. diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst index 64cc9c388ec30d..1b05458280d896 100644 --- a/Doc/library/urllib.request.rst +++ b/Doc/library/urllib.request.rst @@ -28,8 +28,8 @@ The :mod:`urllib.request` module defines the following functions: .. function:: urlopen(url, data=None[, timeout], *, cafile=None, capath=None, cadefault=False, context=None) - Open the URL *url*, which can be either a string or a - :class:`Request` object. + Open *url*, which can be either a string containing a valid, properly + encoded URL, or a :class:`Request` object. *data* must be an object specifying additional data to be sent to the server, or ``None`` if no such data is needed. See :class:`Request` @@ -192,7 +192,7 @@ The following classes are provided: This class is an abstraction of a URL request. - *url* should be a string containing a valid URL. + *url* should be a string containing a valid, properly encoded URL. *data* must be an object specifying additional data to send to the server, or ``None`` if no such data is needed. Currently HTTP diff --git a/Doc/library/uuid.rst b/Doc/library/uuid.rst index 38b6434f467fd6..94b9a432372195 100644 --- a/Doc/library/uuid.rst +++ b/Doc/library/uuid.rst @@ -186,7 +186,8 @@ The :mod:`uuid` module defines the following functions: .. function:: uuid3(namespace, name) Generate a UUID based on the MD5 hash of a namespace identifier (which is a - UUID) and a name (which is a string). + UUID) and a name (which is a :class:`bytes` object or a string + that will be encoded using UTF-8). .. index:: single: uuid3 @@ -201,7 +202,8 @@ The :mod:`uuid` module defines the following functions: .. function:: uuid5(namespace, name) Generate a UUID based on the SHA-1 hash of a namespace identifier (which is a - UUID) and a name (which is a string). + UUID) and a name (which is a :class:`bytes` object or a string + that will be encoded using UTF-8). .. index:: single: uuid5 diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst index 240ab139838db9..9e5672545dea35 100644 --- a/Doc/library/venv.rst +++ b/Doc/library/venv.rst @@ -55,7 +55,7 @@ point to the directories of the virtual environment, whereas :data:`sys.base_prefix` and :data:`sys.base_exec_prefix` point to those of the base Python used to create the environment. It is sufficient to check -``sys.prefix == sys.base_prefix`` to determine if the current interpreter is +``sys.prefix != sys.base_prefix`` to determine if the current interpreter is running from a virtual environment. A virtual environment may be "activated" using a script in its binary directory @@ -284,11 +284,14 @@ creation according to their needs, the :class:`EnvBuilder` class. .. method:: upgrade_dependencies(context) - Upgrades the core venv dependency packages (currently ``pip`` and - ``setuptools``) in the environment. This is done by shelling out to the + Upgrades the core venv dependency packages (currently ``pip``) + in the environment. This is done by shelling out to the ``pip`` executable in the environment. .. versionadded:: 3.9 + .. versionchanged:: 3.12 + + ``setuptools`` is no longer a core venv dependency. .. method:: post_setup(context) diff --git a/Doc/library/webbrowser.rst b/Doc/library/webbrowser.rst index 734b6321e5a7e7..61db8042093627 100644 --- a/Doc/library/webbrowser.rst +++ b/Doc/library/webbrowser.rst @@ -115,13 +115,7 @@ for the controller classes, all defined in this module. +------------------------+-----------------------------------------+-------+ | ``'firefox'`` | :class:`Mozilla('mozilla')` | | +------------------------+-----------------------------------------+-------+ -| ``'netscape'`` | :class:`Mozilla('netscape')` | | -+------------------------+-----------------------------------------+-------+ -| ``'galeon'`` | :class:`Galeon('galeon')` | | -+------------------------+-----------------------------------------+-------+ -| ``'epiphany'`` | :class:`Galeon('epiphany')` | | -+------------------------+-----------------------------------------+-------+ -| ``'skipstone'`` | :class:`BackgroundBrowser('skipstone')` | | +| ``'epiphany'`` | :class:`Epiphany('epiphany')` | | +------------------------+-----------------------------------------+-------+ | ``'kfmclient'`` | :class:`Konqueror()` | \(1) | +------------------------+-----------------------------------------+-------+ @@ -129,12 +123,8 @@ for the controller classes, all defined in this module. +------------------------+-----------------------------------------+-------+ | ``'kfm'`` | :class:`Konqueror()` | \(1) | +------------------------+-----------------------------------------+-------+ -| ``'mosaic'`` | :class:`BackgroundBrowser('mosaic')` | | -+------------------------+-----------------------------------------+-------+ | ``'opera'`` | :class:`Opera()` | | +------------------------+-----------------------------------------+-------+ -| ``'grail'`` | :class:`Grail()` | | -+------------------------+-----------------------------------------+-------+ | ``'links'`` | :class:`GenericBrowser('links')` | | +------------------------+-----------------------------------------+-------+ | ``'elinks'`` | :class:`Elinks('elinks')` | | @@ -176,6 +166,11 @@ Notes: .. versionadded:: 3.3 Support for Chrome/Chromium has been added. +.. versionchanged:: 3.12 + Support for several obsolete browsers has been removed. + Removed browsers include Grail, Mosaic, Netscape, Galeon, + Skipstone, Iceape, and Firefox versions 35 and below. + .. deprecated-removed:: 3.11 3.13 :class:`MacOSX` is deprecated, use :class:`MacOSXOSAScript` instead. diff --git a/Doc/library/zipfile.rst b/Doc/library/zipfile.rst index e2a085d6e98e67..6f4826cb065c64 100644 --- a/Doc/library/zipfile.rst +++ b/Doc/library/zipfile.rst @@ -7,7 +7,7 @@ .. moduleauthor:: James C. Ahlstrom <jim@interet.com> .. sectionauthor:: James C. Ahlstrom <jim@interet.com> -**Source code:** :source:`Lib/zipfile.py` +**Source code:** :source:`Lib/zipfile/` -------------- diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst index 1865d09fcaa127..55431f1951e50d 100644 --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -991,7 +991,8 @@ Internal types the filename from which the code was compiled; :attr:`co_firstlineno` is the first line number of the function; :attr:`co_lnotab` is a string encoding the mapping from bytecode offsets to line numbers (for details - see the source code of the interpreter); :attr:`co_stacksize` is the + see the source code of the interpreter, is deprecated since 3.12 + and may be removed in 3.14); :attr:`co_stacksize` is the required stack size; :attr:`co_flags` is an integer encoding a number of flags for the interpreter. @@ -2085,15 +2086,28 @@ When a class definition is executed, the following steps occur: Resolving MRO entries ^^^^^^^^^^^^^^^^^^^^^ -If a base that appears in class definition is not an instance of :class:`type`, -then an ``__mro_entries__`` method is searched on it. If found, it is called -with the original bases tuple. This method must return a tuple of classes that -will be used instead of this base. The tuple may be empty, in such case -the original base is ignored. +.. method:: object.__mro_entries__(self, bases) + + If a base that appears in a class definition is not an instance of + :class:`type`, then an :meth:`!__mro_entries__` method is searched on the base. + If an :meth:`!__mro_entries__` method is found, the base is substituted with the + result of a call to :meth:`!__mro_entries__` when creating the class. + The method is called with the original bases tuple + passed to the *bases* parameter, and must return a tuple + of classes that will be used instead of the base. The returned tuple may be + empty: in these cases, the original base is ignored. .. seealso:: - :pep:`560` - Core support for typing module and generic types + :func:`types.resolve_bases` + Dynamically resolve bases that are not instances of :class:`type`. + + :func:`types.get_original_bases` + Retrieve a class's "original bases" prior to modifications by + :meth:`~object.__mro_entries__`. + + :pep:`560` + Core support for typing module and generic types. Determining the appropriate metaclass diff --git a/Doc/reference/import.rst b/Doc/reference/import.rst index b22b5251f1de46..57eb5403243eef 100644 --- a/Doc/reference/import.rst +++ b/Doc/reference/import.rst @@ -1077,4 +1077,5 @@ methods to finders and loaders. .. [#fnpic] In legacy code, it is possible to find instances of :class:`imp.NullImporter` in the :data:`sys.path_importer_cache`. It is recommended that code be changed to use ``None`` instead. See - :ref:`portingpythoncode` for more details. + :ref:`portingpythoncode` for more details. Note that the ``imp`` module + was removed in Python 3.12. diff --git a/Doc/requirements-oldest-sphinx.txt b/Doc/requirements-oldest-sphinx.txt new file mode 100644 index 00000000000000..d0390a04ea6dd8 --- /dev/null +++ b/Doc/requirements-oldest-sphinx.txt @@ -0,0 +1,38 @@ +# Requirements to build the Python documentation, for the oldest supported +# Sphinx version. +# +# We pin Sphinx and all of its dependencies to ensure a consistent environment. + +blurb +python-docs-theme>=2022.1 + +# Generated from: +# pip install "Sphinx~=3.2.0" "docutils<0.17" "Jinja2<3" "MarkupSafe<2" +# pip freeze +# +# Sphinx 3.2 comes from ``needs_sphinx = '3.2'`` in ``Doc/conf.py``. +# Docutils<0.17, Jinja2<3, and MarkupSafe<2 are additionally specified as +# Sphinx 3.2 is incompatible with newer releases of these packages. + +Sphinx==3.2.1 +alabaster==0.7.13 +Babel==2.12.1 +certifi==2022.12.7 +charset-normalizer==3.1.0 +colorama==0.4.6 +docutils==0.16 +idna==3.4 +imagesize==1.4.1 +Jinja2==2.11.3 +MarkupSafe==1.1.1 +packaging==23.1 +Pygments==2.15.1 +requests==2.29.0 +snowballstemmer==2.2.0 +sphinxcontrib-applehelp==1.0.4 +sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-jsmath==1.0.1 +sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-serializinghtml==1.1.5 +urllib3==1.26.15 diff --git a/Doc/requirements.txt b/Doc/requirements.txt index 71d3cd61e53877..9cbd15c2209dc6 100644 --- a/Doc/requirements.txt +++ b/Doc/requirements.txt @@ -1,4 +1,7 @@ # Requirements to build the Python documentation +# +# Note that when updating this file, you will likely also have to update +# the Doc/constraints.txt file. # Sphinx version is pinned so that new versions that introduce new warnings # won't suddenly cause build failures. Updating the version is fine as long @@ -13,3 +16,5 @@ sphinxext-opengraph==0.7.5 # The theme used by the documentation is stored separately, so we need # to install that as well. python-docs-theme>=2022.1 + +-c constraints.txt diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore new file mode 100644 index 00000000000000..1d3503bf06f085 --- /dev/null +++ b/Doc/tools/.nitignore @@ -0,0 +1,299 @@ +# All RST files under Doc/ -- except these -- must pass Sphinx nit-picky mode, +# as tested on the CI via touch-clean-files.py in doc.yml. +# Add blank lines between files and keep them sorted lexicographically +# to help avoid merge conflicts. + +Doc/c-api/allocation.rst +Doc/c-api/apiabiversion.rst +Doc/c-api/arg.rst +Doc/c-api/bool.rst +Doc/c-api/buffer.rst +Doc/c-api/bytes.rst +Doc/c-api/call.rst +Doc/c-api/capsule.rst +Doc/c-api/cell.rst +Doc/c-api/code.rst +Doc/c-api/codec.rst +Doc/c-api/complex.rst +Doc/c-api/conversion.rst +Doc/c-api/datetime.rst +Doc/c-api/descriptor.rst +Doc/c-api/dict.rst +Doc/c-api/exceptions.rst +Doc/c-api/file.rst +Doc/c-api/float.rst +Doc/c-api/gcsupport.rst +Doc/c-api/import.rst +Doc/c-api/init.rst +Doc/c-api/init_config.rst +Doc/c-api/intro.rst +Doc/c-api/iterator.rst +Doc/c-api/long.rst +Doc/c-api/mapping.rst +Doc/c-api/marshal.rst +Doc/c-api/memory.rst +Doc/c-api/memoryview.rst +Doc/c-api/module.rst +Doc/c-api/none.rst +Doc/c-api/object.rst +Doc/c-api/refcounting.rst +Doc/c-api/sequence.rst +Doc/c-api/set.rst +Doc/c-api/stable.rst +Doc/c-api/structures.rst +Doc/c-api/sys.rst +Doc/c-api/tuple.rst +Doc/c-api/type.rst +Doc/c-api/typehints.rst +Doc/c-api/typeobj.rst +Doc/c-api/unicode.rst +Doc/c-api/veryhigh.rst +Doc/c-api/weakref.rst +Doc/extending/embedding.rst +Doc/extending/extending.rst +Doc/extending/newtypes.rst +Doc/extending/newtypes_tutorial.rst +Doc/faq/design.rst +Doc/faq/extending.rst +Doc/faq/gui.rst +Doc/faq/library.rst +Doc/faq/programming.rst +Doc/glossary.rst +Doc/howto/curses.rst +Doc/howto/descriptor.rst +Doc/howto/enum.rst +Doc/howto/functional.rst +Doc/howto/instrumentation.rst +Doc/howto/isolating-extensions.rst +Doc/howto/logging-cookbook.rst +Doc/howto/logging.rst +Doc/howto/regex.rst +Doc/howto/sorting.rst +Doc/howto/unicode.rst +Doc/howto/urllib2.rst +Doc/install/index.rst +Doc/library/2to3.rst +Doc/library/__future__.rst +Doc/library/_thread.rst +Doc/library/abc.rst +Doc/library/aifc.rst +Doc/library/ast.rst +Doc/library/asyncio-dev.rst +Doc/library/asyncio-eventloop.rst +Doc/library/asyncio-extending.rst +Doc/library/asyncio-future.rst +Doc/library/asyncio-policy.rst +Doc/library/asyncio-stream.rst +Doc/library/asyncio-subprocess.rst +Doc/library/asyncio-task.rst +Doc/library/audioop.rst +Doc/library/bdb.rst +Doc/library/bisect.rst +Doc/library/bz2.rst +Doc/library/calendar.rst +Doc/library/cgi.rst +Doc/library/chunk.rst +Doc/library/cmath.rst +Doc/library/cmd.rst +Doc/library/code.rst +Doc/library/codecs.rst +Doc/library/codeop.rst +Doc/library/collections.abc.rst +Doc/library/collections.rst +Doc/library/compileall.rst +Doc/library/concurrent.futures.rst +Doc/library/concurrent.rst +Doc/library/configparser.rst +Doc/library/constants.rst +Doc/library/contextlib.rst +Doc/library/copy.rst +Doc/library/csv.rst +Doc/library/ctypes.rst +Doc/library/curses.ascii.rst +Doc/library/curses.rst +Doc/library/datetime.rst +Doc/library/dbm.rst +Doc/library/decimal.rst +Doc/library/devmode.rst +Doc/library/difflib.rst +Doc/library/dis.rst +Doc/library/doctest.rst +Doc/library/email.charset.rst +Doc/library/email.compat32-message.rst +Doc/library/email.encoders.rst +Doc/library/email.errors.rst +Doc/library/email.generator.rst +Doc/library/email.headerregistry.rst +Doc/library/email.message.rst +Doc/library/email.mime.rst +Doc/library/email.parser.rst +Doc/library/email.policy.rst +Doc/library/enum.rst +Doc/library/exceptions.rst +Doc/library/faulthandler.rst +Doc/library/fcntl.rst +Doc/library/filecmp.rst +Doc/library/fileinput.rst +Doc/library/fractions.rst +Doc/library/ftplib.rst +Doc/library/functions.rst +Doc/library/functools.rst +Doc/library/getopt.rst +Doc/library/getpass.rst +Doc/library/gettext.rst +Doc/library/graphlib.rst +Doc/library/gzip.rst +Doc/library/hashlib.rst +Doc/library/http.client.rst +Doc/library/http.cookiejar.rst +Doc/library/http.cookies.rst +Doc/library/http.server.rst +Doc/library/idle.rst +Doc/library/importlib.resources.abc.rst +Doc/library/importlib.resources.rst +Doc/library/importlib.rst +Doc/library/inspect.rst +Doc/library/io.rst +Doc/library/json.rst +Doc/library/locale.rst +Doc/library/logging.config.rst +Doc/library/logging.handlers.rst +Doc/library/logging.rst +Doc/library/lzma.rst +Doc/library/mailbox.rst +Doc/library/mmap.rst +Doc/library/msilib.rst +Doc/library/msvcrt.rst +Doc/library/multiprocessing.rst +Doc/library/multiprocessing.shared_memory.rst +Doc/library/netrc.rst +Doc/library/nntplib.rst +Doc/library/numbers.rst +Doc/library/operator.rst +Doc/library/optparse.rst +Doc/library/os.path.rst +Doc/library/os.rst +Doc/library/ossaudiodev.rst +Doc/library/pickle.rst +Doc/library/pickletools.rst +Doc/library/platform.rst +Doc/library/plistlib.rst +Doc/library/poplib.rst +Doc/library/posix.rst +Doc/library/pprint.rst +Doc/library/profile.rst +Doc/library/pty.rst +Doc/library/py_compile.rst +Doc/library/pyclbr.rst +Doc/library/pydoc.rst +Doc/library/pyexpat.rst +Doc/library/random.rst +Doc/library/re.rst +Doc/library/readline.rst +Doc/library/reprlib.rst +Doc/library/resource.rst +Doc/library/rlcompleter.rst +Doc/library/sched.rst +Doc/library/select.rst +Doc/library/selectors.rst +Doc/library/shelve.rst +Doc/library/shutil.rst +Doc/library/signal.rst +Doc/library/site.rst +Doc/library/smtplib.rst +Doc/library/socket.rst +Doc/library/socketserver.rst +Doc/library/ssl.rst +Doc/library/stat.rst +Doc/library/stdtypes.rst +Doc/library/string.rst +Doc/library/struct.rst +Doc/library/subprocess.rst +Doc/library/sunau.rst +Doc/library/sys.rst +Doc/library/sys_path_init.rst +Doc/library/sysconfig.rst +Doc/library/syslog.rst +Doc/library/tarfile.rst +Doc/library/telnetlib.rst +Doc/library/tempfile.rst +Doc/library/termios.rst +Doc/library/test.rst +Doc/library/textwrap.rst +Doc/library/threading.rst +Doc/library/time.rst +Doc/library/tkinter.rst +Doc/library/tkinter.scrolledtext.rst +Doc/library/tkinter.tix.rst +Doc/library/tkinter.ttk.rst +Doc/library/traceback.rst +Doc/library/tty.rst +Doc/library/turtle.rst +Doc/library/unittest.mock-examples.rst +Doc/library/unittest.mock.rst +Doc/library/unittest.rst +Doc/library/urllib.error.rst +Doc/library/urllib.parse.rst +Doc/library/urllib.request.rst +Doc/library/uuid.rst +Doc/library/wave.rst +Doc/library/weakref.rst +Doc/library/webbrowser.rst +Doc/library/winreg.rst +Doc/library/winsound.rst +Doc/library/wsgiref.rst +Doc/library/xdrlib.rst +Doc/library/xml.dom.minidom.rst +Doc/library/xml.dom.pulldom.rst +Doc/library/xml.dom.rst +Doc/library/xml.etree.elementtree.rst +Doc/library/xml.rst +Doc/library/xml.sax.handler.rst +Doc/library/xml.sax.reader.rst +Doc/library/xml.sax.rst +Doc/library/xml.sax.utils.rst +Doc/library/xmlrpc.client.rst +Doc/library/xmlrpc.rst +Doc/library/xmlrpc.server.rst +Doc/library/zlib.rst +Doc/license.rst +Doc/reference/compound_stmts.rst +Doc/reference/datamodel.rst +Doc/reference/expressions.rst +Doc/reference/import.rst +Doc/reference/lexical_analysis.rst +Doc/reference/simple_stmts.rst +Doc/tutorial/appendix.rst +Doc/tutorial/classes.rst +Doc/tutorial/controlflow.rst +Doc/tutorial/datastructures.rst +Doc/tutorial/errors.rst +Doc/tutorial/inputoutput.rst +Doc/tutorial/interactive.rst +Doc/tutorial/introduction.rst +Doc/tutorial/modules.rst +Doc/tutorial/stdlib2.rst +Doc/using/cmdline.rst +Doc/using/configure.rst +Doc/using/unix.rst +Doc/using/windows.rst +Doc/whatsnew/2.0.rst +Doc/whatsnew/2.1.rst +Doc/whatsnew/2.2.rst +Doc/whatsnew/2.3.rst +Doc/whatsnew/2.4.rst +Doc/whatsnew/2.5.rst +Doc/whatsnew/2.6.rst +Doc/whatsnew/2.7.rst +Doc/whatsnew/3.0.rst +Doc/whatsnew/3.1.rst +Doc/whatsnew/3.2.rst +Doc/whatsnew/3.3.rst +Doc/whatsnew/3.4.rst +Doc/whatsnew/3.5.rst +Doc/whatsnew/3.6.rst +Doc/whatsnew/3.7.rst +Doc/whatsnew/3.8.rst +Doc/whatsnew/3.9.rst +Doc/whatsnew/3.10.rst +Doc/whatsnew/3.11.rst diff --git a/Doc/tools/extensions/c_annotations.py b/Doc/tools/extensions/c_annotations.py index 5af56433f41573..3551bfa4c0f133 100644 --- a/Doc/tools/extensions/c_annotations.py +++ b/Doc/tools/extensions/c_annotations.py @@ -20,6 +20,7 @@ """ from os import path +import docutils from docutils import nodes from docutils.parsers.rst import directives from docutils.parsers.rst import Directive @@ -41,6 +42,16 @@ } +# Monkeypatch nodes.Node.findall for forwards compatability +# This patch can be dropped when the minimum Sphinx version is 4.4.0 +# or the minimum Docutils version is 0.18.1. +if docutils.__version_info__ < (0, 18, 1): + def findall(self, *args, **kwargs): + return iter(self.traverse(*args, **kwargs)) + + nodes.Node.findall = findall + + class RCEntry: def __init__(self, name): self.name = name @@ -87,7 +98,7 @@ def __init__(self, refcount_filename, stable_abi_file): self.stable_abi_data[name] = record def add_annotations(self, app, doctree): - for node in doctree.traverse(addnodes.desc_content): + for node in doctree.findall(addnodes.desc_content): par = node.parent if par['domain'] != 'c': continue diff --git a/Doc/tools/templates/layout.html b/Doc/tools/templates/layout.html index 460161cd320223..b91f8138553e62 100644 --- a/Doc/tools/templates/layout.html +++ b/Doc/tools/templates/layout.html @@ -11,11 +11,6 @@ {%- if is_deployment_preview %} <div id="deployment-preview-warning" style="padding: .5em; text-align: center; background-color: #fff2ba; color: #6a580e;"> - <div style="float: right; margin-top: -10px; margin-left: 10px;"> - <a href="https://www.netlify.com"> - <img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" /> - </a> - </div> {% trans %}This is a deploy preview created from a <a href="{{ repository_url }}/pull/{{ pr_id }}">pull request</a>. For authoritative documentation, see the {% endtrans %} <a href="https://docs.python.org/3/{{ pagename }}{{ file_suffix }}">{% trans %} the current stable release{% endtrans %}</a>. diff --git a/Doc/tools/touch-clean-files.py b/Doc/tools/touch-clean-files.py new file mode 100644 index 00000000000000..19bc1be31deb26 --- /dev/null +++ b/Doc/tools/touch-clean-files.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python3 +""" +Touch files that must pass Sphinx nit-picky mode +so they are rebuilt and we can catch regressions. +""" + +from pathlib import Path + +wrong_directory_msg = "Must run this script from the repo root" +assert Path("Doc").exists() and Path("Doc").is_dir(), wrong_directory_msg + +# Exclude these whether they're dirty or clean, +# because they trigger a rebuild of dirty files. +EXCLUDE_FILES = { + Path("Doc/whatsnew/changelog.rst"), +} + +# Subdirectories of Doc/ to exclude. +EXCLUDE_SUBDIRS = { + ".env", + ".venv", + "env", + "includes", + "venv", +} + +ALL_RST = { + rst for rst in Path("Doc/").rglob("*.rst") if rst.parts[1] not in EXCLUDE_SUBDIRS +} + +with Path("Doc/tools/.nitignore").open() as clean_files: + DIRTY = { + Path(filename.strip()) + for filename in clean_files + if filename.strip() and not filename.startswith("#") + } + +CLEAN = ALL_RST - DIRTY - EXCLUDE_FILES + +print("Touching:") +for filename in sorted(CLEAN): + print(filename) + filename.touch() +print(f"Touched {len(CLEAN)} files") diff --git a/Doc/tools/warnings-to-gh-actions.py b/Doc/tools/warnings-to-gh-actions.py new file mode 100644 index 00000000000000..da33a4ede07abc --- /dev/null +++ b/Doc/tools/warnings-to-gh-actions.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 + +""" +Convert Sphinx warning messages to GitHub Actions. + +Converts lines like: + .../Doc/library/cgi.rst:98: WARNING: reference target not found +to: + ::warning file=.../Doc/library/cgi.rst,line=98::reference target not found + +Non-matching lines are echoed unchanged. + +see: https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-a-warning-message +""" + +import re +import sys + +pattern = re.compile(r'(?P<file>[^:]+):(?P<line>\d+): WARNING: (?P<msg>.+)') + +for line in sys.stdin: + if match := pattern.fullmatch(line.strip()): + print('::warning file={file},line={line}::{msg}'.format_map(match)) + else: + print(line) diff --git a/Doc/tutorial/errors.rst b/Doc/tutorial/errors.rst index e09c829b8e9721..ca5dc3314c63b6 100644 --- a/Doc/tutorial/errors.rst +++ b/Doc/tutorial/errors.rst @@ -160,7 +160,7 @@ accessing ``.args``. :: >>> try: ... raise Exception('spam', 'eggs') ... except Exception as inst: - ... print(type(inst)) # the exception instance + ... print(type(inst)) # the exception type ... print(inst.args) # arguments stored in .args ... print(inst) # __str__ allows args to be printed directly, ... # but may be overridden in exception subclasses diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst index 2a4d070ec057df..b35e8454fa2a1a 100644 --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -495,7 +495,8 @@ Miscellaneous options Reserved for various implementation-specific options. CPython currently defines the following possible values: - * ``-X faulthandler`` to enable :mod:`faulthandler`; + * ``-X faulthandler`` to enable :mod:`faulthandler`. + See also :envvar:`PYTHONFAULTHANDLER`. * ``-X showrefcount`` to output the total reference count and number of used memory blocks when the program finishes or after each statement in the interactive interpreter. This only works on :ref:`debug builds @@ -503,8 +504,9 @@ Miscellaneous options * ``-X tracemalloc`` to start tracing Python memory allocations using the :mod:`tracemalloc` module. By default, only the most recent frame is stored in a traceback of a trace. Use ``-X tracemalloc=NFRAME`` to start - tracing with a traceback limit of *NFRAME* frames. See the - :func:`tracemalloc.start` for more information. + tracing with a traceback limit of *NFRAME* frames. + See :func:`tracemalloc.start` and :envvar:`PYTHONTRACEMALLOC` + for more information. * ``-X int_max_str_digits`` configures the :ref:`integer string conversion length limitation <int_max_str_digits>`. See also :envvar:`PYTHONINTMAXSTRDIGITS`. @@ -519,6 +521,7 @@ Miscellaneous options * ``-X utf8`` enables the :ref:`Python UTF-8 Mode <utf8-mode>`. ``-X utf8=0`` explicitly disables :ref:`Python UTF-8 Mode <utf8-mode>` (even when it would otherwise activate automatically). + See also :envvar:`PYTHONUTF8`. * ``-X pycache_prefix=PATH`` enables writing ``.pyc`` files to a parallel tree rooted at the given directory instead of to the code tree. See also :envvar:`PYTHONPYCACHEPREFIX`. @@ -861,7 +864,9 @@ conflict. Python memory allocations using the :mod:`tracemalloc` module. The value of the variable is the maximum number of frames stored in a traceback of a trace. For example, ``PYTHONTRACEMALLOC=1`` stores only the most recent - frame. See the :func:`tracemalloc.start` for more information. + frame. + See the :func:`tracemalloc.start` function for more information. + This is equivalent to setting the :option:`-X` ``tracemalloc`` option. .. versionadded:: 3.4 @@ -869,8 +874,8 @@ conflict. .. envvar:: PYTHONPROFILEIMPORTTIME If this environment variable is set to a non-empty string, Python will - show how long each import takes. This is exactly equivalent to setting - ``-X importtime`` on the command line. + show how long each import takes. + This is equivalent to setting the :option:`-X` ``importtime`` option. .. versionadded:: 3.7 @@ -1012,6 +1017,7 @@ conflict. If this environment variable is set to a non-empty string, enable :ref:`Python Development Mode <devmode>`, introducing additional runtime checks that are too expensive to be enabled by default. + This is equivalent to setting the :option:`-X` ``dev`` option. .. versionadded:: 3.7 diff --git a/Doc/using/unix.rst b/Doc/using/unix.rst index 067ff4cce5e48d..0044eb07f56eec 100644 --- a/Doc/using/unix.rst +++ b/Doc/using/unix.rst @@ -54,13 +54,6 @@ On FreeBSD and OpenBSD pkg_add ftp://ftp.openbsd.org/pub/OpenBSD/4.2/packages/i386/python-2.5.1p2.tgz -On OpenSolaris --------------- - -You can get Python from `OpenCSW <https://www.opencsw.org/>`_. Various versions -of Python are available and can be installed with e.g. ``pkgutil -i python27``. - - .. _building-python-on-unix: Building Python diff --git a/Doc/using/venv-create.inc b/Doc/using/venv-create.inc index 43ee6b7807d57e..2fc90126482268 100644 --- a/Doc/using/venv-create.inc +++ b/Doc/using/venv-create.inc @@ -61,12 +61,16 @@ The command, if run with ``-h``, will show the available options:: environment (pip is bootstrapped by default) --prompt PROMPT Provides an alternative prompt prefix for this environment. - --upgrade-deps Upgrade core dependencies: pip setuptools to the + --upgrade-deps Upgrade core dependencies (pip) to the latest version in PyPI Once an environment has been created, you may wish to activate it, e.g. by sourcing an activate script in its bin directory. +.. versionchanged:: 3.12 + + ``setuptools`` is no longer a core venv dependency. + .. versionchanged:: 3.9 Add ``--upgrade-deps`` option to upgrade pip + setuptools to the latest on PyPI @@ -104,4 +108,3 @@ invoked to bootstrap ``pip`` into the virtual environment. Multiple paths can be given to ``venv``, in which case an identical virtual environment will be created, according to the given options, at each provided path. - diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst index 1c4e41c0e0e239..380950eb507ffb 100644 --- a/Doc/using/windows.rst +++ b/Doc/using/windows.rst @@ -470,7 +470,7 @@ user's system, including environment variables, system registry settings, and installed packages. The standard library is included as pre-compiled and optimized ``.pyc`` files in a ZIP, and ``python3.dll``, ``python37.dll``, ``python.exe`` and ``pythonw.exe`` are all provided. Tcl/tk (including all -dependants, such as Idle), pip and the Python documentation are not included. +dependents, such as Idle), pip and the Python documentation are not included. .. note:: diff --git a/Doc/whatsnew/2.6.rst b/Doc/whatsnew/2.6.rst index 34f2656f765c7d..4ee2aacb108a36 100644 --- a/Doc/whatsnew/2.6.rst +++ b/Doc/whatsnew/2.6.rst @@ -172,7 +172,7 @@ this edition of "What's New in Python" links to the bug/patch item for each change. Hosting of the Python bug tracker is kindly provided by -`Upfront Systems <http://www.upfrontsoftware.co.za>`__ +`Upfront Systems <https://upfrontsoftware.co.za>`__ of Stellenbosch, South Africa. Martin von Löwis put a lot of effort into importing existing bugs and patches from SourceForge; his scripts for this import operation are at diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst index 810a2cd2537c34..36afcb163f1afc 100644 --- a/Doc/whatsnew/2.7.rst +++ b/Doc/whatsnew/2.7.rst @@ -2104,7 +2104,7 @@ Changes to Python's build process and to the C API include: * The latest release of the GNU Debugger, GDB 7, can be `scripted using Python - <https://sourceware.org/gdb/current/onlinedocs/gdb/Python.html>`__. + <https://web.archive.org/web/20110715084810/http://sourceware.org/gdb/current/onlinedocs/gdb/Python.html>`__. When you begin debugging an executable program P, GDB will look for a file named ``P-gdb.py`` and automatically read it. Dave Malcolm contributed a :file:`python-gdb.py` that adds a number of diff --git a/Doc/whatsnew/3.11.rst b/Doc/whatsnew/3.11.rst index 10fcfb6a0b5639..687719a260a61c 100644 --- a/Doc/whatsnew/3.11.rst +++ b/Doc/whatsnew/3.11.rst @@ -666,19 +666,11 @@ enum for :meth:`~object.__str__` and :meth:`~object.__format__` (used by :func:`str`, :func:`format` and :term:`f-string`\s). -* Changed :class:`~enum.IntEnum`, :class:`~enum.IntFlag` and :class:`~enum.StrEnum` - to now inherit from :class:`~enum.ReprEnum`, - so their :func:`str` output now matches :func:`format` - (both ``str(AnIntEnum.ONE)`` and ``format(AnIntEnum.ONE)`` return ``'1'``, - whereas before ``str(AnIntEnum.ONE)`` returned ``'AnIntEnum.ONE'``. - -* Changed :meth:`Enum.__format__() <enum.Enum.__format__>` - (the default for :func:`format`, :meth:`str.format` and :term:`f-string`\s) - of enums with mixed-in types (e.g. :class:`int`, :class:`str`) - to also include the class name in the output, not just the member's key. - This matches the existing behavior of :meth:`enum.Enum.__str__`, - returning e.g. ``'AnEnum.MEMBER'`` for an enum ``AnEnum(str, Enum)`` - instead of just ``'MEMBER'``. +* Changed :meth:`Enum.__format__() <enum.Enum.__format__>` (the default for + :func:`format`, :meth:`str.format` and :term:`f-string`\s) to always produce + the same result as :meth:`Enum.__str__()`: for enums inheriting from + :class:`~enum.ReprEnum` it will be the member's value; for all other enums + it will be the enum and member name (e.g. ``Color.RED``). * Added a new *boundary* class parameter to :class:`~enum.Flag` enums and the :class:`~enum.FlagBoundary` enum with its options, diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index b55b9619fac226..a3fce7ccacf7c1 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -66,6 +66,10 @@ Summary -- Release highlights .. PEP-sized items next. +New typing features: + +* :ref:`whatsnew312-pep692` + Important deprecations, removals or restrictions: * :pep:`623`, Remove wstr from Unicode @@ -137,6 +141,43 @@ New Features (Design by Pablo Galindo. Contributed by Pablo Galindo and Christian Heimes with contributions from Gregory P. Smith [Google] and Mark Shannon in :gh:`96123`.) +* The extraction methods in :mod:`tarfile`, and :func:`shutil.unpack_archive`, + have a new a *filter* argument that allows limiting tar features than may be + surprising or dangerous, such as creating files outside the destination + directory. + See :ref:`tarfile-extraction-filter` for details. + In Python 3.14, the default will switch to ``'data'``. + (Contributed by Petr Viktorin in :pep:`706`.) + +New Features Related to Type Hints +================================== + +This section covers major changes affecting :pep:`484` type hints and +the :mod:`typing` module. + +.. _whatsnew312-pep692: + +PEP 692: Using ``TypedDict`` for more precise ``**kwargs`` typing +----------------------------------------------------------------- + +Typing ``**kwargs`` in a function signature as introduced by :pep:`484` allowed +for valid annotations only in cases where all of the ``**kwargs`` were of the +same type. + +This PEP specifies a more precise way of typing ``**kwargs`` by relying on +typed dictionaries:: + + from typing import TypedDict, Unpack + + class Movie(TypedDict): + name: str + year: int + + def foo(**kwargs: Unpack[Movie]): ... + +See :pep:`692` for more details. + +(PEP written by Franek Magiera) Other Language Changes @@ -190,7 +231,16 @@ Other Language Changes (Contributed by Nikita Sobolev in :gh:`100581`.) * :class:`slice` objects are now hashable, allowing them to be used as dict keys and - set items. (Contributed by Furkan Onder in :gh:`101264`.) + set items. (Contributed by Will Bradshaw and Furkan Onder in :gh:`101264`.) + +* Exceptions raised in a typeobject's ``__set_name__`` method are no longer + wrapped by a :exc:`RuntimeError`. Context information is added to the + exception as a :pep:`678` note. (Contributed by Irit Katriel in :gh:`77757`.) + +* When a ``try-except*`` construct handles the entire :exc:`ExceptionGroup` + and raises one other exception, that exception is no longer wrapped in an + :exc:`ExceptionGroup`. (Contributed by Irit Katriel in :gh:`103590`.) + New Modules =========== @@ -210,6 +260,11 @@ array asyncio ------- +* The performance of writing to sockets in :mod:`asyncio` has been + significantly improved. ``asyncio`` now avoids unnecessary copying when + writing to sockets and uses :meth:`~socket.socket.sendmsg` if the platform + supports it. (Contributed by Kumar Aditya in :gh:`91166`.) + * On Linux, :mod:`asyncio` uses :class:`~asyncio.PidfdChildWatcher` by default if :func:`os.pidfd_open` is available and functional instead of :class:`~asyncio.ThreadedChildWatcher`. @@ -241,20 +296,38 @@ asyncio :mod:`asyncio` does not support legacy generator-based coroutines. (Contributed by Kumar Aditya in :gh:`102748`.) -* :func:`asyncio.wait` now accepts generators yielding tasks. +* :func:`asyncio.wait` and :func:`asyncio.as_completed` now accepts generators + yielding tasks. (Contributed by Kumar Aditya in :gh:`78530`.) +calendar +-------- + +* Add enums :data:`~calendar.Month` and :data:`~calendar.Day`. + (Contributed by Prince Roshan in :gh:`103636`.) + +csv +--- + +* Add :data:`~csv.QUOTE_NOTNULL` and :data:`~csv.QUOTE_STRINGS` flags to + provide finer grained control of ``None`` and empty strings by + :class:`~csv.writer` objects. + inspect ------- * Add :func:`inspect.markcoroutinefunction` to mark sync functions that return - a :term:`coroutine` for use with :func:`iscoroutinefunction`. + a :term:`coroutine` for use with :func:`inspect.iscoroutinefunction`. (Contributed Carlton Gibson in :gh:`99247`.) * Add :func:`inspect.getasyncgenstate` and :func:`inspect.getasyncgenlocals` for determining the current state of asynchronous generators. (Contributed by Thomas Krennwallner in :issue:`35759`.) +* The performance of :func:`inspect.getattr_static` has been considerably + improved. Most calls to the function should be around 2x faster than they + were in Python 3.11. (Contributed by Alex Waygood in :gh:`103193`.) + pathlib ------- @@ -277,7 +350,7 @@ dis * Pseudo instruction opcodes (which are used by the compiler but do not appear in executable bytecode) are now exposed in the :mod:`dis` module. - :data:`~dis.HAVE_ARGUMENT` is still relevant to real opcodes, + :opcode:`HAVE_ARGUMENT` is still relevant to real opcodes, but it is not useful for pseudo instructions. Use the new :data:`~dis.hasarg` collection instead. (Contributed by Irit Katriel in :gh:`94216`.) @@ -288,6 +361,13 @@ fractions * Objects of type :class:`fractions.Fraction` now support float-style formatting. (Contributed by Mark Dickinson in :gh:`100161`.) +itertools +--------- + +* Added :class:`itertools.batched()` for collecting into even-sized + tuples where the last batch may be shorter than the rest. + (Contributed by Raymond Hettinger in :gh:`98363`.) + math ---- @@ -337,6 +417,26 @@ shutil of the process to *root_dir* to perform archiving. (Contributed by Serhiy Storchaka in :gh:`74696`.) +* :func:`shutil.rmtree` now accepts a new argument *onexc* which is an + error handler like *onerror* but which expects an exception instance + rather than a *(typ, val, tb)* triplet. *onerror* is deprecated and + will be removed in Python 3.14. + (Contributed by Irit Katriel in :gh:`102828`.) + +* :func:`shutil.which` now consults the *PATHEXT* environment variable to + find matches within *PATH* on Windows even when the given *cmd* includes + a directory component. + (Contributed by Charles Machalow in :gh:`103179`.) + + :func:`shutil.which` will call ``NeedCurrentDirectoryForExePathW`` when + querying for executables on Windows to determine if the current working + directory should be prepended to the search path. + (Contributed by Charles Machalow in :gh:`103179`.) + + :func:`shutil.which` will return a path matching the *cmd* with a component + from ``PATHEXT`` prior to a direct match elsewhere in the search path on + Windows. + (Contributed by Charles Machalow in :gh:`103179`.) sqlite3 ------- @@ -351,6 +451,16 @@ sqlite3 :ref:`transaction handling <sqlite3-transaction-control-autocommit>`. (Contributed by Erlend E. Aasland in :gh:`83638`.) +* Add *entrypoint* keyword-only parameter to + :meth:`~sqlite3.Connection.load_extension`, + for overriding the SQLite extension entry point. + (Contributed by Erlend E. Aasland in :gh:`103015`.) + +* Add :meth:`~sqlite3.Connection.getconfig` and + :meth:`~sqlite3.Connection.setconfig` to :class:`~sqlite3.Connection` + to make configuration changes to a database connection. + (Contributed by Erlend E. Aasland in :gh:`103489`.) + threading --------- @@ -359,12 +469,40 @@ threading profiling functions in all running threads in addition to the calling one. (Contributed by Pablo Galindo in :gh:`93503`.) +types +----- + +* Add :func:`types.get_original_bases` to allow for further introspection of + :ref:`user-defined-generics` when subclassed. (Contributed by + James Hilton-Balfe and Alex Waygood in :gh:`101827`.) + unicodedata ----------- * The Unicode database has been updated to version 15.0.0. (Contributed by Benjamin Peterson in :gh:`96734`). +unittest +-------- + +Added ``--durations`` command line option, showing the N slowest test cases:: + + python3 -m unittest --durations=3 lib.tests.test_threading + ..... + Slowest test durations + ---------------------------------------------------------------------- + 1.210s test_timeout (Lib.test.test_threading.BarrierTests) + 1.003s test_default_timeout (Lib.test.test_threading.BarrierTests) + 0.518s test_timeout (Lib.test.test_threading.EventTests) + + (0.000 durations hidden. Use -v to show these durations.) + ---------------------------------------------------------------------- + Ran 158 tests in 9.869s + + OK (skipped=3) + +(Contributed by Giampaolo Rodola in :issue:`4080`) + uuid ---- @@ -374,8 +512,12 @@ uuid tempfile -------- -The :class:`tempfile.NamedTemporaryFile` function has a new optional parameter -*delete_on_close* (Contributed by Evgeny Zorin in :gh:`58451`.) +* The :class:`tempfile.NamedTemporaryFile` function has a new optional parameter + *delete_on_close* (Contributed by Evgeny Zorin in :gh:`58451`.) +* :func:`tempfile.mkdtemp` now always returns an absolute path, even if the + argument provided to the *dir* parameter is a relative path. + +.. _whatsnew-typing-py312: typing ------ @@ -385,6 +527,50 @@ typing same name on a base class, as per :pep:`698`. (Contributed by Steven Troxler in :gh:`101564`.) +* :func:`isinstance` checks against + :func:`runtime-checkable protocols <typing.runtime_checkable>` now use + :func:`inspect.getattr_static` rather than :func:`hasattr` to lookup whether + attributes exist. This means that descriptors and :meth:`~object.__getattr__` + methods are no longer unexpectedly evaluated during ``isinstance()`` checks + against runtime-checkable protocols. However, it may also mean that some + objects which used to be considered instances of a runtime-checkable protocol + may no longer be considered instances of that protocol on Python 3.12+, and + vice versa. Most users are unlikely to be affected by this change. + (Contributed by Alex Waygood in :gh:`102433`.) + +* The members of a runtime-checkable protocol are now considered "frozen" at + runtime as soon as the class has been created. Monkey-patching attributes + onto a runtime-checkable protocol will still work, but will have no impact on + :func:`isinstance` checks comparing objects to the protocol. For example:: + + >>> from typing import Protocol, runtime_checkable + >>> @runtime_checkable + ... class HasX(Protocol): + ... x = 1 + ... + >>> class Foo: ... + ... + >>> f = Foo() + >>> isinstance(f, HasX) + False + >>> f.x = 1 + >>> isinstance(f, HasX) + True + >>> HasX.y = 2 + >>> isinstance(f, HasX) # unchanged, even though HasX now also has a "y" attribute + True + + This change was made in order to speed up ``isinstance()`` checks against + runtime-checkable protocols. + +* The performance profile of :func:`isinstance` checks against + :func:`runtime-checkable protocols <typing.runtime_checkable>` has changed + significantly. Most ``isinstance()`` checks against protocols with only a few + members should be at least 2x faster than in 3.11, and some may be 20x + faster or more. However, ``isinstance()`` checks against protocols with seven + or more members may be slower than in Python 3.11. (Contributed by Alex + Waygood in :gh:`74690` and :gh:`103193`.) + sys --- @@ -397,6 +583,16 @@ sys with contributions from Gregory P. Smith [Google] and Mark Shannon in :gh:`96123`.) +* Add :data:`sys.last_exc` which holds the last unhandled exception that + was raised (for post-mortem debugging use cases). Deprecate the + three fields that have the same information in its legacy form: + :data:`sys.last_type`, :data:`sys.last_value` and :data:`sys.last_traceback`. + (Contributed by Irit Katriel in :gh:`102778`.) + +* :func:`sys._current_exceptions` now returns a mapping from thread-id to an + exception instance, rather than to a ``(typ, exc, tb)`` tuple. + (Contributed by Irit Katriel in :gh:`103176`.) + Optimizations ============= @@ -407,13 +603,21 @@ Optimizations * Added experimental support for using the BOLT binary optimizer in the build process, which improves performance by 1-5%. - (Contributed by Kevin Modzelewski in :gh:`90536`.) + (Contributed by Kevin Modzelewski in :gh:`90536` and tuned by Dong-hee Na in :gh:`101525`) * Speed up the regular expression substitution (functions :func:`re.sub` and - :func:`re.subn` and corresponding :class:`re.Pattern` methods) for + :func:`re.subn` and corresponding :class:`!re.Pattern` methods) for replacement strings containing group references by 2--3 times. (Contributed by Serhiy Storchaka in :gh:`91524`.) +* Speed up :class:`asyncio.Task` creation by deferring expensive string formatting. + (Contributed by Itamar O in :gh:`103793`.) + +* Added :func:`asyncio.eager_task_factory` and :func:`asyncio.create_eager_task_factory` + functions to allow opting an event loop in to eager task execution, + speeding up some use-cases by up to 50%. + (Contributed by Jacob Bower & Itamar O in :gh:`102853`) + CPython bytecode changes ======================== @@ -423,6 +627,9 @@ CPython bytecode changes :opcode:`LOAD_METHOD` instruction if the low bit of its oparg is set. (Contributed by Ken Jin in :gh:`93429`.) +* Removed the :opcode:`!JUMP_IF_FALSE_OR_POP` and :opcode:`!JUMP_IF_TRUE_OR_POP` + instructions. (Contributed by Irit Katriel in :gh:`102859`.) + Demos and Tools =============== @@ -467,7 +674,7 @@ Deprecated :exc:`ImportWarning`). (Contributed by Brett Cannon in :gh:`65961`.) -* The :meth:`~asyncio.DefaultEventLoopPolicy.get_event_loop` method of the +* The :meth:`~asyncio.get_event_loop` method of the default event loop policy now emits a :exc:`DeprecationWarning` if there is no current event loop set and it decides to create one. (Contributed by Serhiy Storchaka and Guido van Rossum in :gh:`100160`.) @@ -488,6 +695,21 @@ Deprecated contain the creation time, which is also available in the new ``st_birthtime`` field. (Contributed by Steve Dower in :gh:`99726`.) +* The :data:`sys.last_type`, :data:`sys.last_value` and :data:`sys.last_traceback` + fields are deprecated. Use :data:`sys.last_exc` instead. + (Contributed by Irit Katriel in :gh:`102778`.) + +* The *onerror* argument of :func:`shutil.rmtree` is deprecated as will be removed + in Python 3.14. Use *onexc* instead. (Contributed by Irit Katriel in :gh:`102828`.) + +* Extracting tar archives without specifying *filter* is deprecated until + Python 3.14, when ``'data'`` filter will become the default. + See :ref:`tarfile-extraction-filter` for details. + +* ``calendar.January`` and ``calendar.February`` constants are deprecated and + replaced by :data:`calendar.Month.JANUARY` and :data:`calendar.Month.FEBRUARY`. + (Contributed by Prince Roshan in :gh:`103636`.) + Pending Removal in Python 3.13 ------------------------------ @@ -518,13 +740,13 @@ Modules (see :pep:`594`): APIs: -* :class:`configparser.LegacyInterpolation` (:gh:`90765`) +* :class:`!configparser.LegacyInterpolation` (:gh:`90765`) * :func:`locale.getdefaultlocale` (:gh:`90817`) -* :meth:`turtle.RawTurtle.settiltangle` (:gh:`50096`) -* :func:`unittest.findTestCases` (:gh:`50096`) -* :func:`unittest.makeSuite` (:gh:`50096`) -* :func:`unittest.getTestCaseNames` (:gh:`50096`) -* :class:`webbrowser.MacOSX` (:gh:`86421`) +* :meth:`!turtle.RawTurtle.settiltangle` (:gh:`50096`) +* :func:`!unittest.findTestCases` (:gh:`50096`) +* :func:`!unittest.makeSuite` (:gh:`50096`) +* :func:`!unittest.getTestCaseNames` (:gh:`50096`) +* :class:`!webbrowser.MacOSX` (:gh:`86421`) Pending Removal in Python 3.14 ------------------------------ @@ -532,9 +754,9 @@ Pending Removal in Python 3.14 * Deprecated the following :mod:`importlib.abc` classes, scheduled for removal in Python 3.14: - * :class:`importlib.abc.ResourceReader` - * :class:`importlib.abc.Traversable` - * :class:`importlib.abc.TraversableResources` + * :class:`!importlib.abc.ResourceReader` + * :class:`!importlib.abc.Traversable` + * :class:`!importlib.abc.TraversableResources` Use :mod:`importlib.resources.abc` classes instead: @@ -543,9 +765,12 @@ Pending Removal in Python 3.14 (Contributed by Jason R. Coombs and Hugo van Kemenade in :gh:`93963`.) -* Creating :c:data:`immutable types <Py_TPFLAGS_IMMUTABLETYPE>` with mutable +* Creating immutable types (:data:`Py_TPFLAGS_IMMUTABLETYPE`) with mutable bases using the C API. +* Deprecated the *isdst* parameter in :func:`email.utils.localtime`. + (Contributed by Alan Williams in :gh:`72346`.) + * ``__package__`` and ``__cached__`` will cease to be set or taken into consideration by the import system (:gh:`97879`). @@ -564,6 +789,15 @@ Pending Removal in Python 3.14 functions that have been deprecated since Python 2 but only gained a proper :exc:`DeprecationWarning` in 3.12. Remove them in 3.14. +* Accessing ``co_lnotab`` was deprecated in :pep:`626` since 3.10 + and was planned to be removed in 3.12 + but it only got a proper :exc:`DeprecationWarning` in 3.12. + May be removed in 3.14. + (Contributed by Nikita Sobolev in :gh:`101866`.) + +* The *onerror* argument of :func:`shutil.rmtree` is deprecated in 3.12, + and will be removed in 3.14. + Pending Removal in Future Versions ---------------------------------- @@ -591,6 +825,24 @@ Removed project can be installed: it still provides ``distutils``. (Contributed by Victor Stinner in :gh:`92584`.) +* Remove the bundled setuptools wheel from :mod:`ensurepip`, + and stop installing setuptools in environments created by :mod:`venv`. + + ``pip (>= 22.1)`` does not require setuptools to be installed in the + environment. ``setuptools``-based (and ``distutils``-based) packages + can still be used with ``pip install``, since pip will provide + ``setuptools`` in the build environment it uses for building a + package. + + ``easy_install``, ``pkg_resources``, ``setuptools`` and ``distutils`` + are no longer provided by default in environments created with + ``venv`` or bootstrapped with ``ensurepip``, since they are part of + the ``setuptools`` package. For projects relying on these at runtime, + the ``setuptools`` project should be declared as a dependency and + installed separately (typically, using pip). + + (Contributed by Pradyun Gedam in :gh:`95299`.) + * Removed many old deprecated :mod:`unittest` features: - A number of :class:`~unittest.TestCase` method aliases: @@ -672,11 +924,11 @@ Removed * Remove ``io.OpenWrapper`` and ``_pyio.OpenWrapper``, deprecated in Python 3.10: just use :func:`open` instead. The :func:`open` (:func:`io.open`) - function is a built-in function. Since Python 3.10, :func:`_pyio.open` is + function is a built-in function. Since Python 3.10, :func:`!_pyio.open` is also a static method. (Contributed by Victor Stinner in :gh:`94169`.) -* Remove the :func:`ssl.RAND_pseudo_bytes` function, deprecated in Python 3.6: +* Remove the :func:`!ssl.RAND_pseudo_bytes` function, deprecated in Python 3.6: use :func:`os.urandom` or :func:`ssl.RAND_bytes` instead. (Contributed by Victor Stinner in :gh:`94199`.) @@ -686,13 +938,13 @@ Removed extension if it was not present. (Contributed by Victor Stinner in :gh:`94196`.) -* Remove the :func:`ssl.match_hostname` function. The - :func:`ssl.match_hostname` was deprecated in Python 3.7. OpenSSL performs +* Remove the :func:`!ssl.match_hostname` function. + It was deprecated in Python 3.7. OpenSSL performs hostname matching since Python 3.7, Python no longer uses the - :func:`ssl.match_hostname` function. + :func:`!ssl.match_hostname` function. (Contributed by Victor Stinner in :gh:`94199`.) -* Remove the :func:`locale.format` function, deprecated in Python 3.7: +* Remove the :func:`!locale.format` function, deprecated in Python 3.7: use :func:`locale.format_string` instead. (Contributed by Victor Stinner in :gh:`94226`.) @@ -702,9 +954,9 @@ Removed a C implementation of :func:`~hashlib.pbkdf2_hmac()` which is faster. (Contributed by Victor Stinner in :gh:`94199`.) -* :mod:`xml.etree`: Remove the ``ElementTree.Element.copy()`` method of the +* :mod:`xml.etree.ElementTree`: Remove the ``ElementTree.Element.copy()`` method of the pure Python implementation, deprecated in Python 3.10, use the - :func:`copy.copy` function instead. The C implementation of :mod:`xml.etree` + :func:`copy.copy` function instead. The C implementation of :mod:`xml.etree.ElementTree` has no ``copy()`` method, only a ``__copy__()`` method. (Contributed by Victor Stinner in :gh:`94383`.) @@ -713,10 +965,10 @@ Removed :pep:`451` for the rationale. (Contributed by Victor Stinner in :gh:`94379`.) -* Remove the :func:`ssl.wrap_socket` function, deprecated in Python 3.7: +* Remove the :func:`!ssl.wrap_socket` function, deprecated in Python 3.7: instead, create a :class:`ssl.SSLContext` object and call its :class:`ssl.SSLContext.wrap_socket` method. Any package that still uses - :func:`ssl.wrap_socket` is broken and insecure. The function neither sends a + :func:`!ssl.wrap_socket` is broken and insecure. The function neither sends a SNI TLS extension nor validates server hostname. Code is subject to `CWE-295 <https://cwe.mitre.org/data/definitions/295.html>`_: Improper Certificate Validation. @@ -726,11 +978,14 @@ Removed completed: * References to, and support for ``module_repr()`` has been eradicated. - + (Contributed by Barry Warsaw in :gh:`97850`.) * ``importlib.util.set_package`` has been removed. (Contributed by Brett Cannon in :gh:`65961`.) +* The ``imp`` module has been removed. (Contributed by Barry Warsaw in + :gh:`98040`.) + * Removed the ``suspicious`` rule from the documentation Makefile, and removed ``Doc/tools/rstlint.py``, both in favor of `sphinx-lint <https://github.com/sphinx-contrib/sphinx-lint>`_. @@ -748,6 +1003,10 @@ Removed *context* parameter instead. (Contributed by Victor Stinner in :gh:`94172`.) +* Remove support for obsolete browsers from :mod:`webbrowser`. + Removed browsers include: Grail, Mosaic, Netscape, Galeon, Skipstone, + Iceape, Firebird, and Firefox versions 35 and below (:gh:`102871`). + Porting to Python 3.12 ====================== @@ -811,6 +1070,14 @@ Changes in the Python API synchronization is needed, implement locking within the cached property getter function or around multi-threaded access points. +* :func:`sys._current_exceptions` now returns a mapping from thread-id to an + exception instance, rather than to a ``(typ, exc, tb)`` tuple. + (Contributed by Irit Katriel in :gh:`103176`.) + +* When extracting tar files using :mod:`tarfile` or + :func:`shutil.unpack_archive`, pass the *filter* argument to limit features + that may be surprising or dangerous. + See :ref:`tarfile-extraction-filter` for details. Build Changes ============= @@ -883,7 +1150,7 @@ New Features The :const:`Py_TPFLAGS_HAVE_VECTORCALL` flag is now removed from a class when the class's :py:meth:`~object.__call__` method is reassigned. This makes vectorcall safe to use with mutable types (i.e. heap types - without the :const:`immutable <Py_TPFLAGS_IMMUTABLETYPE>` flag). + without the immutable flag, :const:`Py_TPFLAGS_IMMUTABLETYPE`). Mutable types that do not override :c:member:`~PyTypeObject.tp_call` now inherit the ``Py_TPFLAGS_HAVE_VECTORCALL`` flag. (Contributed by Petr Viktorin in :gh:`93274`.) @@ -916,7 +1183,7 @@ New Features (Contributed by Andrew Frost in :gh:`92257`.) * The C API now permits registering callbacks via :c:func:`PyDict_AddWatcher`, - :c:func:`PyDict_AddWatch` and related APIs to be called whenever a dictionary + :c:func:`PyDict_Watch` and related APIs to be called whenever a dictionary is modified. This is intended for use by optimizing interpreters, JIT compilers, or debuggers. (Contributed by Carl Meyer in :gh:`91052`.) @@ -942,15 +1209,37 @@ New Features This is less error prone and a bit more efficient. (Contributed by Mark Shannon in :gh:`101578`.) +* Add ``_PyErr_ChainExceptions1``, which takes an exception instance, + to replace the legacy-API ``_PyErr_ChainExceptions``, which is now + deprecated. (Contributed by Mark Shannon in :gh:`101578`.) + * Add :c:func:`PyException_GetArgs` and :c:func:`PyException_SetArgs` as convenience functions for retrieving and modifying the :attr:`~BaseException.args` passed to the exception's constructor. (Contributed by Mark Shannon in :gh:`101578`.) * Add :c:func:`PyErr_DisplayException`, which takes an exception instance, - to replace the legacy-api :c:func:`PyErr_Display`. (Contributed by + to replace the legacy-api :c:func:`!PyErr_Display`. (Contributed by Irit Katriel in :gh:`102755`). +* :pep:`683`: Introduced Immortal Objects to Python which allows objects + to bypass reference counts and introduced changes to the C-API: + + - ``_Py_IMMORTAL_REFCNT``: The reference count that defines an object + as immortal. + - ``_Py_IsImmortal`` Checks if an object has the immortal reference count. + - ``PyObject_HEAD_INIT`` This will now initialize reference count to + ``_Py_IMMORTAL_REFCNT`` when used with ``Py_BUILD_CORE``. + - ``SSTATE_INTERNED_IMMORTAL`` An identifier for interned unicode objects + that are immortal. + - ``SSTATE_INTERNED_IMMORTAL_STATIC`` An identifier for interned unicode + objects that are immortal and static + - ``sys.getunicodeinternedsize`` This returns the total number of unicode + objects that have been interned. This is now needed for refleak.py to + correctly track reference counts and allocated blocks + + (Contributed by Eddie Elizondo in :gh:`84436`.) + Porting to Python 3.12 ---------------------- @@ -995,7 +1284,7 @@ Porting to Python 3.12 supported, but does not fully support multiple inheritance (:gh:`95589`), and performance may be worse. Classes declaring :const:`Py_TPFLAGS_MANAGED_DICT` should call - :c:func:`_PyObject_VisitManagedDict` and :c:func:`_PyObject_ClearManagedDict` + :c:func:`!_PyObject_VisitManagedDict` and :c:func:`!_PyObject_ClearManagedDict` to traverse and clear their instance's dictionaries. To clear weakrefs, call :c:func:`PyObject_ClearWeakRefs`, as before. @@ -1014,6 +1303,11 @@ Porting to Python 3.12 functions that set the error indicator now normalize the exception before storing it. (Contributed by Mark Shannon in :gh:`101578`.) +* ``_Py_RefTotal`` is no longer authoritative and only kept around + for ABI compabitility. Note that it is an internal global and only + available on debug builds. If you happen to be using it then you'll + need to start using ``_Py_GetGlobalRefTotal()``. + Deprecated ---------- @@ -1035,17 +1329,17 @@ Deprecated * :c:var:`Py_HashRandomizationFlag`: use :c:member:`PyConfig.use_hash_seed` and :c:member:`PyConfig.hash_seed` * :c:var:`Py_IsolatedFlag`: use :c:member:`PyConfig.isolated` - * :c:var:`Py_LegacyWindowsFSEncodingFlag`: use :c:member:`PyConfig.legacy_windows_fs_encoding` + * :c:var:`Py_LegacyWindowsFSEncodingFlag`: use :c:member:`PyPreConfig.legacy_windows_fs_encoding` * :c:var:`Py_LegacyWindowsStdioFlag`: use :c:member:`PyConfig.legacy_windows_stdio` - * :c:var:`Py_FileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` - * :c:var:`Py_FileSystemDefaultEncodeErrors`: use :c:member:`PyConfig.filesystem_errors` - * :c:var:`Py_UTF8Mode`: use :c:member:`PyPreConfig.utf8_mode` (see :c:func:`Py_PreInitialize`) + * :c:var:`!Py_FileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` + * :c:var:`!Py_FileSystemDefaultEncodeErrors`: use :c:member:`PyConfig.filesystem_errors` + * :c:var:`!Py_UTF8Mode`: use :c:member:`PyPreConfig.utf8_mode` (see :c:func:`Py_PreInitialize`) The :c:func:`Py_InitializeFromConfig` API should be used with :c:type:`PyConfig` instead. (Contributed by Victor Stinner in :gh:`77782`.) -* Creating :c:data:`immutable types <Py_TPFLAGS_IMMUTABLETYPE>` with mutable +* Creating immutable types (:const:`Py_TPFLAGS_IMMUTABLETYPE`) with mutable bases is deprecated and will be disabled in Python 3.14. * The ``structmember.h`` header is deprecated, though it continues to be @@ -1084,9 +1378,11 @@ Deprecated :c:func:`PyErr_SetRaisedException` instead. (Contributed by Mark Shannon in :gh:`101578`.) -* :c:func:`PyErr_Display` is deprecated. Use :c:func:`PyErr_DisplayException` +* :c:func:`!PyErr_Display` is deprecated. Use :c:func:`PyErr_DisplayException` instead. (Contributed by Irit Katriel in :gh:`102755`). +* ``_PyErr_ChainExceptions`` is deprecated. Use ``_PyErr_ChainExceptions1`` + instead. (Contributed by Irit Katriel in :gh:`102192`.) Removed ------- @@ -1098,18 +1394,17 @@ Removed * Legacy Unicode APIs have been removed. See :pep:`623` for detail. - * :c:macro:`PyUnicode_WCHAR_KIND` - * :c:func:`PyUnicode_AS_UNICODE` - * :c:func:`PyUnicode_AsUnicode` - * :c:func:`PyUnicode_AsUnicodeAndSize` - * :c:func:`PyUnicode_AS_DATA` - * :c:func:`PyUnicode_FromUnicode` - * :c:func:`PyUnicode_GET_SIZE` - * :c:func:`PyUnicode_GetSize` - * :c:func:`PyUnicode_GET_DATA_SIZE` - -* Remove the ``PyUnicode_InternImmortal()`` function and the - ``SSTATE_INTERNED_IMMORTAL`` macro. + * :c:macro:`!PyUnicode_WCHAR_KIND` + * :c:func:`!PyUnicode_AS_UNICODE` + * :c:func:`!PyUnicode_AsUnicode` + * :c:func:`!PyUnicode_AsUnicodeAndSize` + * :c:func:`!PyUnicode_AS_DATA` + * :c:func:`!PyUnicode_FromUnicode` + * :c:func:`!PyUnicode_GET_SIZE` + * :c:func:`!PyUnicode_GetSize` + * :c:func:`!PyUnicode_GET_DATA_SIZE` + +* Remove the ``PyUnicode_InternImmortal()`` function macro. (Contributed by Victor Stinner in :gh:`85858`.) * Remove ``Jython`` compatibility hacks from several stdlib modules and tests. diff --git a/Grammar/Tokens b/Grammar/Tokens index 1f3e3b09913653..096876fdd130f8 100644 --- a/Grammar/Tokens +++ b/Grammar/Tokens @@ -53,6 +53,7 @@ ATEQUAL '@=' RARROW '->' ELLIPSIS '...' COLONEQUAL ':=' +EXCLAMATION '!' OP AWAIT @@ -60,6 +61,9 @@ ASYNC TYPE_IGNORE TYPE_COMMENT SOFT_KEYWORD +FSTRING_START +FSTRING_MIDDLE +FSTRING_END ERRORTOKEN # These aren't used by the C tokenizer but are needed for tokenize.py diff --git a/Grammar/python.gram b/Grammar/python.gram index 2498251293e80e..6361dcd0985b99 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -194,7 +194,7 @@ yield_stmt[stmt_ty]: y=yield_expr { _PyAST_Expr(y, EXTRA) } assert_stmt[stmt_ty]: 'assert' a=expression b=[',' z=expression { z }] { _PyAST_Assert(a, b, EXTRA) } -import_stmt[stmt_ty]: +import_stmt[stmt_ty]: | invalid_import | import_name | import_from @@ -415,8 +415,8 @@ try_stmt[stmt_ty]: | invalid_try_stmt | 'try' &&':' b=block f=finally_block { _PyAST_Try(b, NULL, NULL, f, EXTRA) } | 'try' &&':' b=block ex[asdl_excepthandler_seq*]=except_block+ el=[else_block] f=[finally_block] { _PyAST_Try(b, ex, el, f, EXTRA) } - | 'try' &&':' b=block ex[asdl_excepthandler_seq*]=except_star_block+ el=[else_block] f=[finally_block] { - CHECK_VERSION(stmt_ty, 11, "Exception groups are", + | 'try' &&':' b=block ex[asdl_excepthandler_seq*]=except_star_block+ el=[else_block] f=[finally_block] { + CHECK_VERSION(stmt_ty, 11, "Exception groups are", _PyAST_TryStar(b, ex, el, f, EXTRA)) } @@ -807,7 +807,7 @@ atom[expr_ty]: | 'True' { _PyAST_Constant(Py_True, NULL, EXTRA) } | 'False' { _PyAST_Constant(Py_False, NULL, EXTRA) } | 'None' { _PyAST_Constant(Py_None, NULL, EXTRA) } - | &STRING strings + | &(STRING|FSTRING_START) strings | NUMBER | &'(' (tuple | group | genexp) | &'[' (list | listcomp) @@ -877,7 +877,25 @@ lambda_param[arg_ty]: a=NAME { _PyAST_arg(a->v.Name.id, NULL, NULL, EXTRA) } # LITERALS # ======== -strings[expr_ty] (memo): a=STRING+ { _PyPegen_concatenate_strings(p, a) } +fstring_middle[expr_ty]: + | fstring_replacement_field + | t=FSTRING_MIDDLE { _PyPegen_constant_from_token(p, t) } +fstring_replacement_field[expr_ty]: + | '{' a=(yield_expr | star_expressions) debug_expr="="? conversion=[fstring_conversion] format=[fstring_full_format_spec] rbrace='}' { + _PyPegen_formatted_value(p, a, debug_expr, conversion, format, rbrace, EXTRA) } + | invalid_replacement_field +fstring_conversion[ResultTokenWithMetadata*]: + | conv_token="!" conv=NAME { _PyPegen_check_fstring_conversion(p, conv_token, conv) } +fstring_full_format_spec[ResultTokenWithMetadata*]: + | colon=':' spec=fstring_format_spec* { _PyPegen_setup_full_format_spec(p, colon, (asdl_expr_seq *) spec, EXTRA) } +fstring_format_spec[expr_ty]: + | t=FSTRING_MIDDLE { _PyPegen_constant_from_token(p, t) } + | fstring_replacement_field +fstring[expr_ty]: + | a=FSTRING_START b=fstring_middle* c=FSTRING_END { _PyPegen_joined_str(p, a, (asdl_expr_seq*)b, c) } + +string[expr_ty]: s[Token*]=STRING { _PyPegen_constant_from_string(p, s) } +strings[expr_ty] (memo): a[asdl_expr_seq*]=(fstring|string)+ { _PyPegen_concatenate_strings(p, a, EXTRA) } list[expr_ty]: | '[' a=[star_named_expressions] ']' { _PyAST_List(a, Load, EXTRA) } @@ -1118,6 +1136,8 @@ invalid_expression: _PyPegen_check_legacy_stmt(p, a) ? NULL : p->tokens[p->mark-1]->level == 0 ? NULL : RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "invalid syntax. Perhaps you forgot a comma?") } | a=disjunction 'if' b=disjunction !('else'|':') { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "expected 'else' after 'if' expression") } + | a='lambda' [lambda_params] b=':' &(FSTRING_MIDDLE | fstring_replacement_field) { + RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "f-string: lambda expressions are not allowed without parentheses") } invalid_named_expression(memo): | a=expression ':=' expression { @@ -1241,7 +1261,7 @@ invalid_group: invalid_import: | a='import' dotted_name 'from' dotted_name { RAISE_SYNTAX_ERROR_STARTING_FROM(a, "Did you mean to use 'from ... import ...' instead?") } - + invalid_import_from_targets: | import_from_as_names ',' NEWLINE { RAISE_SYNTAX_ERROR("trailing comma not allowed without surrounding parentheses") } @@ -1335,3 +1355,24 @@ invalid_kvpair: | expression a=':' &('}'|',') {RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "expression expected after dictionary key and ':'") } invalid_starred_expression: | a='*' expression '=' b=expression { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "cannot assign to iterable argument unpacking") } +invalid_replacement_field: + | '{' a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before '='") } + | '{' a='!' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before '!'") } + | '{' a=':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before ':'") } + | '{' a='}' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before '}'") } + | '{' !(yield_expr | star_expressions) { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting a valid expression after '{'")} + | '{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}') { + PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting '=', or '!', or ':', or '}'") } + | '{' (yield_expr | star_expressions) '=' !('!' | ':' | '}') { + PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting '!', or ':', or '}'") } + | '{' (yield_expr | star_expressions) '='? invalid_conversion_character + | '{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}') { + PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting ':' or '}'") } + | '{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}' { + PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting '}', or format specs") } + | '{' (yield_expr | star_expressions) '='? ['!' NAME] !'}' { + PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting '}'") } + +invalid_conversion_character: + | '!' &(':' | '}') { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: missing conversion character") } + | '!' !NAME { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: invalid conversion character") } diff --git a/Include/boolobject.h b/Include/boolobject.h index ca21fbfad8e827..976fa35201d035 100644 --- a/Include/boolobject.h +++ b/Include/boolobject.h @@ -11,8 +11,7 @@ PyAPI_DATA(PyTypeObject) PyBool_Type; #define PyBool_Check(x) Py_IS_TYPE((x), &PyBool_Type) -/* Py_False and Py_True are the only two bools in existence. -Don't forget to apply Py_INCREF() when returning either!!! */ +/* Py_False and Py_True are the only two bools in existence. */ /* Don't use these directly */ PyAPI_DATA(PyLongObject) _Py_FalseStruct; @@ -31,8 +30,8 @@ PyAPI_FUNC(int) Py_IsFalse(PyObject *x); #define Py_IsFalse(x) Py_Is((x), Py_False) /* Macros for returning Py_True or Py_False, respectively */ -#define Py_RETURN_TRUE return Py_NewRef(Py_True) -#define Py_RETURN_FALSE return Py_NewRef(Py_False) +#define Py_RETURN_TRUE return Py_True +#define Py_RETURN_FALSE return Py_False /* Function to return a bool from a C long */ PyAPI_FUNC(PyObject *) PyBool_FromLong(long); diff --git a/Include/cpython/code.h b/Include/cpython/code.h index abcf1250603dfe..6bead361c79245 100644 --- a/Include/cpython/code.h +++ b/Include/cpython/code.h @@ -3,10 +3,22 @@ #ifndef Py_LIMITED_API #ifndef Py_CODE_H #define Py_CODE_H + #ifdef __cplusplus extern "C" { #endif + +/* Count of all "real" monitoring events (not derived from other events) */ +#define PY_MONITORING_UNGROUPED_EVENTS 14 +/* Count of all monitoring events */ +#define PY_MONITORING_EVENTS 16 + +/* Table of which tools are active for each monitored event. */ +typedef struct _Py_Monitors { + uint8_t tools[PY_MONITORING_UNGROUPED_EVENTS]; +} _Py_Monitors; + /* Each instruction in a code object is a fixed-width value, * currently 2 bytes: 1-byte opcode + 1-byte oparg. The EXTENDED_ARG * opcode allows for larger values but the current limit is 3 uses @@ -56,6 +68,35 @@ typedef struct { PyObject *_co_freevars; } _PyCoCached; +/* Ancilliary data structure used for instrumentation. + Line instrumentation creates an array of + these. One entry per code unit.*/ +typedef struct { + uint8_t original_opcode; + int8_t line_delta; +} _PyCoLineInstrumentationData; + +/* Main data structure used for instrumentation. + * This is allocated when needed for instrumentation + */ +typedef struct { + /* Monitoring specific to this code object */ + _Py_Monitors local_monitors; + /* Monitoring that is active on this code object */ + _Py_Monitors active_monitors; + /* The tools that are to be notified for events for the matching code unit */ + uint8_t *tools; + /* Information to support line events */ + _PyCoLineInstrumentationData *lines; + /* The tools that are to be notified for line events for the matching code unit */ + uint8_t *line_tools; + /* Information to support instruction events */ + /* The underlying instructions, which can themselves be instrumented */ + uint8_t *per_instruction_opcodes; + /* The tools that are to be notified for instruction events for the matching code unit */ + uint8_t *per_instruction_tools; +} _PyCoMonitoringData; + // To avoid repeating ourselves in deepfreeze.py, all PyCodeObject members are // defined in this macro: #define _PyCode_DEF(SIZE) { \ @@ -87,7 +128,6 @@ typedef struct { PyObject *co_exceptiontable; /* Byte string encoding exception handling \ table */ \ int co_flags; /* CO_..., see below */ \ - short _co_linearray_entry_size; /* Size of each entry in _co_linearray */ \ \ /* The rest are not so impactful on performance. */ \ int co_argcount; /* #arguments, except *args */ \ @@ -114,8 +154,9 @@ typedef struct { PyObject *co_linetable; /* bytes object that holds location info */ \ PyObject *co_weakreflist; /* to support weakrefs to code objects */ \ _PyCoCached *_co_cached; /* cached co_* attributes */ \ + uint64_t _co_instrumentation_version; /* current instrumentation version */ \ + _PyCoMonitoringData *_co_monitoring; /* Monitoring data */ \ int _co_firsttraceable; /* index of first traceable instruction */ \ - char *_co_linearray; /* array of line offsets */ \ /* Scratch space for extra data relating to the code object. \ Type is a void* to keep the format private in codeobject.c to force \ people to go through the proper APIs. */ \ diff --git a/Include/cpython/initconfig.h b/Include/cpython/initconfig.h index a070fa9ff3a038..79c1023baa9a0f 100644 --- a/Include/cpython/initconfig.h +++ b/Include/cpython/initconfig.h @@ -25,6 +25,7 @@ PyAPI_FUNC(PyStatus) PyStatus_Exit(int exitcode); PyAPI_FUNC(int) PyStatus_IsError(PyStatus err); PyAPI_FUNC(int) PyStatus_IsExit(PyStatus err); PyAPI_FUNC(int) PyStatus_Exception(PyStatus err); +PyAPI_FUNC(PyObject *) _PyErr_SetFromPyStatus(PyStatus status); /* --- PyWideStringList ------------------------------------------------ */ @@ -244,6 +245,8 @@ PyAPI_FUNC(PyStatus) PyConfig_SetWideStringList(PyConfig *config, /* --- PyInterpreterConfig ------------------------------------ */ typedef struct { + // XXX "allow_object_sharing"? "own_objects"? + int use_main_obmalloc; int allow_fork; int allow_exec; int allow_threads; @@ -253,6 +256,7 @@ typedef struct { #define _PyInterpreterConfig_INIT \ { \ + .use_main_obmalloc = 0, \ .allow_fork = 0, \ .allow_exec = 0, \ .allow_threads = 1, \ @@ -262,6 +266,7 @@ typedef struct { #define _PyInterpreterConfig_LEGACY_INIT \ { \ + .use_main_obmalloc = 1, \ .allow_fork = 1, \ .allow_exec = 1, \ .allow_threads = 1, \ diff --git a/Include/cpython/longintrepr.h b/Include/cpython/longintrepr.h index 810daa83165e71..c4cf820da5e4f2 100644 --- a/Include/cpython/longintrepr.h +++ b/Include/cpython/longintrepr.h @@ -80,7 +80,7 @@ typedef long stwodigits; /* signed variant of twodigits */ */ typedef struct _PyLongValue { - Py_ssize_t ob_size; /* Number of items in variable part */ + uintptr_t lv_tag; /* Number of digits, sign and flags */ digit ob_digit[1]; } _PyLongValue; @@ -94,6 +94,10 @@ PyAPI_FUNC(PyLongObject *) _PyLong_New(Py_ssize_t); /* Return a copy of src. */ PyAPI_FUNC(PyObject *) _PyLong_Copy(PyLongObject *src); +PyAPI_FUNC(PyLongObject *) +_PyLong_FromDigits(int negative, Py_ssize_t digit_count, digit *digits); + + #ifdef __cplusplus } #endif diff --git a/Include/cpython/object.h b/Include/cpython/object.h index 7b687d311359c3..ce4d13cd9c28fe 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -11,7 +11,11 @@ PyAPI_FUNC(void) _Py_ForgetReference(PyObject *); #endif #ifdef Py_REF_DEBUG -PyAPI_FUNC(Py_ssize_t) _Py_GetRefTotal(void); +/* These are useful as debugging aids when chasing down refleaks. */ +PyAPI_FUNC(Py_ssize_t) _Py_GetGlobalRefTotal(void); +# define _Py_GetRefTotal() _Py_GetGlobalRefTotal() +PyAPI_FUNC(Py_ssize_t) _Py_GetLegacyRefTotal(void); +PyAPI_FUNC(Py_ssize_t) _PyInterpreterState_GetRefTotal(PyInterpreterState *); #endif @@ -230,7 +234,18 @@ struct _typeobject { * It should should be treated as an opaque blob * by code other than the specializer and interpreter. */ struct _specialization_cache { + // In order to avoid bloating the bytecode with lots of inline caches, the + // members of this structure have a somewhat unique contract. They are set + // by the specialization machinery, and are invalidated by PyType_Modified. + // The rules for using them are as follows: + // - If getitem is non-NULL, then it is the same Python function that + // PyType_Lookup(cls, "__getitem__") would return. + // - If getitem is NULL, then getitem_version is meaningless. + // - If getitem->func_version == getitem_version, then getitem can be called + // with two positional arguments and no keyword arguments, and has neither + // *args nor **kwargs (as required by BINARY_SUBSCR_GETITEM): PyObject *getitem; + uint32_t getitem_version; }; /* The *real* layout of a type object when allocated on the heap */ @@ -549,3 +564,10 @@ PyAPI_FUNC(int) PyType_AddWatcher(PyType_WatchCallback callback); PyAPI_FUNC(int) PyType_ClearWatcher(int watcher_id); PyAPI_FUNC(int) PyType_Watch(int watcher_id, PyObject *type); PyAPI_FUNC(int) PyType_Unwatch(int watcher_id, PyObject *type); + +/* Attempt to assign a version tag to the given type. + * + * Returns 1 if the type already had a valid version tag or a new one was + * assigned, or 0 if a new tag could not be assigned. + */ +PyAPI_FUNC(int) PyUnstable_Type_AssignVersionTag(PyTypeObject *type); diff --git a/Include/cpython/pyerrors.h b/Include/cpython/pyerrors.h index d0300f6ee56a25..758804ade2baa7 100644 --- a/Include/cpython/pyerrors.h +++ b/Include/cpython/pyerrors.h @@ -98,7 +98,7 @@ PyAPI_FUNC(void) _PyErr_GetExcInfo(PyThreadState *, PyObject **, PyObject **, Py /* Context manipulation (PEP 3134) */ -PyAPI_FUNC(void) _PyErr_ChainExceptions(PyObject *, PyObject *, PyObject *); +Py_DEPRECATED(3.12) PyAPI_FUNC(void) _PyErr_ChainExceptions(PyObject *, PyObject *, PyObject *); PyAPI_FUNC(void) _PyErr_ChainExceptions1(PyObject *); /* Like PyErr_Format(), but saves current exception as __context__ and @@ -116,24 +116,6 @@ PyAPI_FUNC(int) _PyException_AddNote( PyObject *exc, PyObject *note); -/* Helper that attempts to replace the current exception with one of the - * same type but with a prefix added to the exception text. The resulting - * exception description looks like: - * - * prefix (exc_type: original_exc_str) - * - * Only some exceptions can be safely replaced. If the function determines - * it isn't safe to perform the replacement, it will leave the original - * unmodified exception in place. - * - * Returns a borrowed reference to the new exception (if any), NULL if the - * existing exception was left in place. - */ -PyAPI_FUNC(PyObject *) _PyErr_TrySetFromCause( - const char *prefix_format, /* ASCII-encoded string */ - ... - ); - /* In signalmodule.c */ int PySignal_SetWakeupFd(int fd); diff --git a/Include/cpython/pylifecycle.h b/Include/cpython/pylifecycle.h index e1f83acbffc360..79d55711319e55 100644 --- a/Include/cpython/pylifecycle.h +++ b/Include/cpython/pylifecycle.h @@ -62,5 +62,10 @@ PyAPI_FUNC(int) _Py_CoerceLegacyLocale(int warn); PyAPI_FUNC(int) _Py_LegacyLocaleDetected(int warn); PyAPI_FUNC(char *) _Py_SetLocaleFromEnv(int category); -PyAPI_FUNC(PyThreadState *) _Py_NewInterpreterFromConfig( - const _PyInterpreterConfig *); +PyAPI_FUNC(PyStatus) _Py_NewInterpreterFromConfig( + PyThreadState **tstate_p, + const _PyInterpreterConfig *config); + +typedef void (*atexit_datacallbackfunc)(void *); +PyAPI_FUNC(int) _Py_AtExit( + PyInterpreterState *, atexit_datacallbackfunc, void *); diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h index 3efb241e8237e7..f33c72d4cf4d2a 100644 --- a/Include/cpython/pystate.h +++ b/Include/cpython/pystate.h @@ -11,6 +11,10 @@ is available in a given context. For example, forking the process might not be allowed in the current interpreter (i.e. os.fork() would fail). */ +/* Set if the interpreter share obmalloc runtime state + with the main interpreter. */ +#define Py_RTFLAGS_USE_MAIN_OBMALLOC (1UL << 5) + /* Set if import should check a module for subinterpreter support. */ #define Py_RTFLAGS_MULTI_INTERP_EXTENSIONS (1UL << 8) @@ -58,12 +62,6 @@ typedef int (*Py_tracefunc)(PyObject *, PyFrameObject *, int, PyObject *); #define PyTrace_C_RETURN 6 #define PyTrace_OPCODE 7 - -typedef struct { - PyCodeObject *code; // The code object for the bounds. May be NULL. - PyCodeAddressRange bounds; // Only valid if code != NULL. -} PyTraceInfo; - // Internal structure: you should not use it directly, but use public functions // like PyThreadState_EnterTracing() and PyThreadState_LeaveTracing(). typedef struct _PyCFrame { @@ -77,7 +75,6 @@ typedef struct _PyCFrame { * discipline and make sure that instances of this struct cannot * accessed outside of their lifetime. */ - uint8_t use_tracing; // 0 or 255 (or'ed into opcode, hence 8-bit type) /* Pointer to the currently executing frame (it can be NULL) */ struct _PyInterpreterFrame *current_frame; struct _PyCFrame *previous; @@ -157,7 +154,7 @@ struct _ts { This is to prevent the actual trace/profile code from being recorded in the trace/profile. */ int tracing; - int tracing_what; /* The event currently being traced, if any. */ + int what_event; /* The event currently being monitored, if any. */ /* Pointer to current _PyCFrame in the C stack frame of the currently, * or most recently, executing _PyEval_EvalFrameDefault. */ @@ -228,8 +225,6 @@ struct _ts { /* Unique thread state id. */ uint64_t id; - PyTraceInfo trace_info; - _PyStackChunk *datastack_chunk; PyObject **datastack_top; PyObject **datastack_limit; diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 75a74ffa2f9dff..3394726dfffd72 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -98,9 +98,16 @@ typedef struct { Py_ssize_t length; /* Number of code points in the string */ Py_hash_t hash; /* Hash value; -1 if not set */ struct { - /* If interned is set, the two references from the - dictionary to this object are *not* counted in ob_refcnt. */ - unsigned int interned:1; + /* If interned is non-zero, the two references from the + dictionary to this object are *not* counted in ob_refcnt. + The possible values here are: + 0: Not Interned + 1: Interned + 2: Interned and Immortal + 3: Interned, Immortal, and Static + This categorization allows the runtime to determine the right + cleanup mechanism at runtime shutdown. */ + unsigned int interned:2; /* Character size: - PyUnicode_1BYTE_KIND (1): @@ -135,7 +142,7 @@ typedef struct { unsigned int ascii:1; /* Padding to ensure that PyUnicode_DATA() is always aligned to 4 bytes (see issue #19537 on m68k). */ - unsigned int :26; + unsigned int :25; } state; } PyASCIIObject; @@ -183,6 +190,8 @@ PyAPI_FUNC(int) _PyUnicode_CheckConsistency( /* Interning state. */ #define SSTATE_NOT_INTERNED 0 #define SSTATE_INTERNED_MORTAL 1 +#define SSTATE_INTERNED_IMMORTAL 2 +#define SSTATE_INTERNED_IMMORTAL_STATIC 3 /* Use only if you know it's a string */ static inline unsigned int PyUnicode_CHECK_INTERNED(PyObject *op) { diff --git a/Include/internal/pycore_atexit.h b/Include/internal/pycore_atexit.h new file mode 100644 index 00000000000000..b4663b396852f3 --- /dev/null +++ b/Include/internal/pycore_atexit.h @@ -0,0 +1,56 @@ +#ifndef Py_INTERNAL_ATEXIT_H +#define Py_INTERNAL_ATEXIT_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + + +//############### +// runtime atexit + +typedef void (*atexit_callbackfunc)(void); + +struct _atexit_runtime_state { +#define NEXITFUNCS 32 + atexit_callbackfunc callbacks[NEXITFUNCS]; + int ncallbacks; +}; + + +//################### +// interpreter atexit + +struct atexit_callback; +typedef struct atexit_callback { + atexit_datacallbackfunc func; + void *data; + struct atexit_callback *next; +} atexit_callback; + +typedef struct { + PyObject *func; + PyObject *args; + PyObject *kwargs; +} atexit_py_callback; + +struct atexit_state { + atexit_callback *ll_callbacks; + atexit_callback *last_ll_callback; + + // XXX The rest of the state could be moved to the atexit module state + // and a low-level callback added for it during module exec. + // For the moment we leave it here. + atexit_py_callback **callbacks; + int ncallbacks; + int callback_len; +}; + + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_ATEXIT_H */ diff --git a/Include/internal/pycore_bytesobject.h b/Include/internal/pycore_bytesobject.h index 9173a4f105f800..d36fa9569d64a5 100644 --- a/Include/internal/pycore_bytesobject.h +++ b/Include/internal/pycore_bytesobject.h @@ -9,11 +9,6 @@ extern "C" { #endif -/* runtime lifecycle */ - -extern PyStatus _PyBytes_InitTypes(PyInterpreterState *); - - /* Substring Search. Returns the index of the first occurrence of diff --git a/Include/internal/pycore_call.h b/Include/internal/pycore_call.h index 55378e3dfebf24..5d9342b562b002 100644 --- a/Include/internal/pycore_call.h +++ b/Include/internal/pycore_call.h @@ -116,6 +116,16 @@ _PyObject_FastCallTstate(PyThreadState *tstate, PyObject *func, PyObject *const return _PyObject_VectorcallTstate(tstate, func, args, (size_t)nargs, NULL); } +PyObject *const * +_PyStack_UnpackDict(PyThreadState *tstate, + PyObject *const *args, Py_ssize_t nargs, + PyObject *kwargs, PyObject **p_kwnames); + +void +_PyStack_UnpackDict_Free(PyObject *const *stack, Py_ssize_t nargs, + PyObject *kwnames); + +void _PyStack_UnpackDict_FreeNoDecRef(PyObject *const *stack, PyObject *kwnames); #ifdef __cplusplus } diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h index d742f247ce7b79..c1f017fdb753c5 100644 --- a/Include/internal/pycore_code.h +++ b/Include/internal/pycore_code.h @@ -47,12 +47,19 @@ typedef struct { typedef struct { uint16_t counter; - uint16_t type_version[2]; - uint16_t func_version; } _PyBinarySubscrCache; #define INLINE_CACHE_ENTRIES_BINARY_SUBSCR CACHE_ENTRIES(_PyBinarySubscrCache) +typedef struct { + uint16_t counter; + uint16_t class_version[2]; + uint16_t self_type_version[2]; + uint16_t method[4]; +} _PySuperAttrCache; + +#define INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR CACHE_ENTRIES(_PySuperAttrCache) + typedef struct { uint16_t counter; uint16_t version[2]; @@ -75,7 +82,6 @@ typedef struct { typedef struct { uint16_t counter; uint16_t func_version[2]; - uint16_t min_args; } _PyCallCache; #define INLINE_CACHE_ENTRIES_CALL CACHE_ENTRIES(_PyCallCache) @@ -221,6 +227,8 @@ extern int _PyLineTable_PreviousAddressRange(PyCodeAddressRange *range); /* Specialization functions */ +extern void _Py_Specialize_LoadSuperAttr(PyObject *global_super, PyObject *cls, PyObject *self, + _Py_CODEUNIT *instr, PyObject *name, int load_method); extern void _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name); extern void _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, @@ -235,7 +243,7 @@ extern void _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, PyObject *kwnames); extern void _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, int oparg, PyObject **locals); -extern void _Py_Specialize_CompareAndBranch(PyObject *lhs, PyObject *rhs, +extern void _Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, int oparg); extern void _Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr, int oparg); @@ -445,32 +453,6 @@ adaptive_counter_backoff(uint16_t counter) { /* Line array cache for tracing */ -extern int _PyCode_CreateLineArray(PyCodeObject *co); - -static inline int -_PyCode_InitLineArray(PyCodeObject *co) -{ - if (co->_co_linearray) { - return 0; - } - return _PyCode_CreateLineArray(co); -} - -static inline int -_PyCode_LineNumberFromArray(PyCodeObject *co, int index) -{ - assert(co->_co_linearray != NULL); - assert(index >= 0); - assert(index < Py_SIZE(co)); - if (co->_co_linearray_entry_size == 2) { - return ((int16_t *)co->_co_linearray)[index]; - } - else { - assert(co->_co_linearray_entry_size == 4); - return ((int32_t *)co->_co_linearray)[index]; - } -} - typedef struct _PyShimCodeDef { const uint8_t *code; int codelen; @@ -504,6 +486,10 @@ extern uint32_t _Py_next_func_version; #define COMPARISON_NOT_EQUALS (COMPARISON_UNORDERED | COMPARISON_LESS_THAN | COMPARISON_GREATER_THAN) +extern int _Py_Instrument(PyCodeObject *co, PyInterpreterState *interp); + +extern int _Py_GetBaseOpcode(PyCodeObject *code, int offset); + #ifdef __cplusplus } diff --git a/Include/internal/pycore_compile.h b/Include/internal/pycore_compile.h index 511f0689c93822..80e2a03bca9faf 100644 --- a/Include/internal/pycore_compile.h +++ b/Include/internal/pycore_compile.h @@ -19,6 +19,7 @@ PyAPI_FUNC(PyCodeObject*) _PyAST_Compile( int optimize, struct _arena *arena); +static const _PyCompilerSrcLocation NO_LOCATION = {-1, -1, -1, -1}; typedef struct { int optimize; @@ -33,6 +34,66 @@ extern int _PyAST_Optimize( struct _arena *arena, _PyASTOptimizeState *state); +typedef struct { + int h_offset; + int h_startdepth; + int h_preserve_lasti; +} _PyCompile_ExceptHandlerInfo; + +typedef struct { + int i_opcode; + int i_oparg; + _PyCompilerSrcLocation i_loc; + _PyCompile_ExceptHandlerInfo i_except_handler_info; +} _PyCompile_Instruction; + +typedef struct { + _PyCompile_Instruction *s_instrs; + int s_allocated; + int s_used; + + int *s_labelmap; /* label id --> instr offset */ + int s_labelmap_size; + int s_next_free_label; /* next free label id */ +} _PyCompile_InstructionSequence; + +typedef struct { + PyObject *u_name; + PyObject *u_qualname; /* dot-separated qualified name (lazy) */ + + /* The following fields are dicts that map objects to + the index of them in co_XXX. The index is used as + the argument for opcodes that refer to those collections. + */ + PyObject *u_consts; /* all constants */ + PyObject *u_names; /* all names */ + PyObject *u_varnames; /* local variables */ + PyObject *u_cellvars; /* cell variables */ + PyObject *u_freevars; /* free variables */ + PyObject *u_fasthidden; /* dict; keys are names that are fast-locals only + temporarily within an inlined comprehension. When + value is True, treat as fast-local. */ + + Py_ssize_t u_argcount; /* number of arguments for block */ + Py_ssize_t u_posonlyargcount; /* number of positional only arguments for block */ + Py_ssize_t u_kwonlyargcount; /* number of keyword only arguments for block */ + + int u_firstlineno; /* the first lineno of the block */ +} _PyCompile_CodeUnitMetadata; + + +/* Utility for a number of growing arrays used in the compiler */ +int _PyCompile_EnsureArrayLargeEnough( + int idx, + void **array, + int *alloc, + int default_alloc, + size_t item_size); + +int _PyCompile_ConstCacheMergeOne(PyObject *const_cache, PyObject **obj); + +int _PyCompile_InstrSize(int opcode, int oparg); + /* Access compiler internals for unit testing */ PyAPI_FUNC(PyObject*) _PyCompile_CodeGen( @@ -45,6 +106,10 @@ PyAPI_FUNC(PyObject*) _PyCompile_OptimizeCfg( PyObject *instructions, PyObject *consts); +PyAPI_FUNC(PyCodeObject*) +_PyCompile_Assemble(_PyCompile_CodeUnitMetadata *umd, PyObject *filename, + PyObject *instructions); + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_fileutils_windows.h b/Include/internal/pycore_fileutils_windows.h index 44874903b092f3..e804d385e76708 100644 --- a/Include/internal/pycore_fileutils_windows.h +++ b/Include/internal/pycore_fileutils_windows.h @@ -25,8 +25,8 @@ typedef struct _FILE_STAT_BASIC_INFORMATION { ULONG DeviceType; ULONG DeviceCharacteristics; ULONG Reserved; - FILE_ID_128 FileId128; LARGE_INTEGER VolumeSerialNumber; + FILE_ID_128 FileId128; } FILE_STAT_BASIC_INFORMATION; typedef enum _FILE_INFO_BY_NAME_CLASS { @@ -75,6 +75,24 @@ static inline BOOL _Py_GetFileInformationByName( return GetFileInformationByName(FileName, FileInformationClass, FileInfoBuffer, FileInfoBufferSize); } +static inline BOOL _Py_GetFileInformationByName_ErrorIsTrustworthy(int error) +{ + switch(error) { + case ERROR_FILE_NOT_FOUND: + case ERROR_PATH_NOT_FOUND: + case ERROR_NOT_READY: + case ERROR_BAD_NET_NAME: + case ERROR_BAD_NETPATH: + case ERROR_BAD_PATHNAME: + case ERROR_INVALID_NAME: + case ERROR_FILENAME_EXCED_RANGE: + return TRUE; + case ERROR_NOT_SUPPORTED: + return FALSE; + } + return FALSE; +} + #endif #endif diff --git a/Include/internal/pycore_flowgraph.h b/Include/internal/pycore_flowgraph.h new file mode 100644 index 00000000000000..720feb18636959 --- /dev/null +++ b/Include/internal/pycore_flowgraph.h @@ -0,0 +1,120 @@ +#ifndef Py_INTERNAL_CFG_H +#define Py_INTERNAL_CFG_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +#include "pycore_opcode_utils.h" +#include "pycore_compile.h" + + +typedef struct { + int i_opcode; + int i_oparg; + _PyCompilerSrcLocation i_loc; + struct _PyCfgBasicblock_ *i_target; /* target block (if jump instruction) */ + struct _PyCfgBasicblock_ *i_except; /* target block when exception is raised */ +} _PyCfgInstruction; + +typedef struct { + int id; +} _PyCfgJumpTargetLabel; + + +typedef struct { + struct _PyCfgBasicblock_ *handlers[CO_MAXBLOCKS+1]; + int depth; +} _PyCfgExceptStack; + +typedef struct _PyCfgBasicblock_ { + /* Each basicblock in a compilation unit is linked via b_list in the + reverse order that the block are allocated. b_list points to the next + block in this list, not to be confused with b_next, which is next by + control flow. */ + struct _PyCfgBasicblock_ *b_list; + /* The label of this block if it is a jump target, -1 otherwise */ + _PyCfgJumpTargetLabel b_label; + /* Exception stack at start of block, used by assembler to create the exception handling table */ + _PyCfgExceptStack *b_exceptstack; + /* pointer to an array of instructions, initially NULL */ + _PyCfgInstruction *b_instr; + /* If b_next is non-NULL, it is a pointer to the next + block reached by normal control flow. */ + struct _PyCfgBasicblock_ *b_next; + /* number of instructions used */ + int b_iused; + /* length of instruction array (b_instr) */ + int b_ialloc; + /* Used by add_checks_for_loads_of_unknown_variables */ + uint64_t b_unsafe_locals_mask; + /* Number of predecessors that a block has. */ + int b_predecessors; + /* depth of stack upon entry of block, computed by stackdepth() */ + int b_startdepth; + /* instruction offset for block, computed by assemble_jump_offsets() */ + int b_offset; + /* Basic block is an exception handler that preserves lasti */ + unsigned b_preserve_lasti : 1; + /* Used by compiler passes to mark whether they have visited a basic block. */ + unsigned b_visited : 1; + /* b_except_handler is used by the cold-detection algorithm to mark exception targets */ + unsigned b_except_handler : 1; + /* b_cold is true if this block is not perf critical (like an exception handler) */ + unsigned b_cold : 1; + /* b_warm is used by the cold-detection algorithm to mark blocks which are definitely not cold */ + unsigned b_warm : 1; +} _PyCfgBasicblock; + +int _PyBasicblock_InsertInstruction(_PyCfgBasicblock *block, int pos, _PyCfgInstruction *instr); + +typedef struct cfg_builder_ { + /* The entryblock, at which control flow begins. All blocks of the + CFG are reachable through the b_next links */ + _PyCfgBasicblock *g_entryblock; + /* Pointer to the most recently allocated block. By following + b_list links, you can reach all allocated blocks. */ + _PyCfgBasicblock *g_block_list; + /* pointer to the block currently being constructed */ + _PyCfgBasicblock *g_curblock; + /* label for the next instruction to be placed */ + _PyCfgJumpTargetLabel g_current_label; +} _PyCfgBuilder; + +int _PyCfgBuilder_UseLabel(_PyCfgBuilder *g, _PyCfgJumpTargetLabel lbl); +int _PyCfgBuilder_Addop(_PyCfgBuilder *g, int opcode, int oparg, _PyCompilerSrcLocation loc); + +int _PyCfgBuilder_Init(_PyCfgBuilder *g); +void _PyCfgBuilder_Fini(_PyCfgBuilder *g); + +_PyCfgInstruction* _PyCfg_BasicblockLastInstr(const _PyCfgBasicblock *b); +int _PyCfg_OptimizeCodeUnit(_PyCfgBuilder *g, PyObject *consts, PyObject *const_cache, + int code_flags, int nlocals, int nparams, int firstlineno); +int _PyCfg_Stackdepth(_PyCfgBasicblock *entryblock, int code_flags); +void _PyCfg_ConvertPseudoOps(_PyCfgBasicblock *entryblock); +int _PyCfg_ResolveJumps(_PyCfgBuilder *g); + + +static inline int +basicblock_nofallthrough(const _PyCfgBasicblock *b) { + _PyCfgInstruction *last = _PyCfg_BasicblockLastInstr(b); + return (last && + (IS_SCOPE_EXIT_OPCODE(last->i_opcode) || + IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode))); +} + +#define BB_NO_FALLTHROUGH(B) (basicblock_nofallthrough(B)) +#define BB_HAS_FALLTHROUGH(B) (!basicblock_nofallthrough(B)) + +PyCodeObject * +_PyAssemble_MakeCodeObject(_PyCompile_CodeUnitMetadata *u, PyObject *const_cache, + PyObject *consts, int maxdepth, _PyCompile_InstructionSequence *instrs, + int nlocalsplus, int code_flags, PyObject *filename); + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_CFG_H */ diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h index 5806cf05f174a9..d8d7fe9ef2ebde 100644 --- a/Include/internal/pycore_frame.h +++ b/Include/internal/pycore_frame.h @@ -19,6 +19,7 @@ struct _frame { struct _PyInterpreterFrame *f_frame; /* points to the frame data */ PyObject *f_trace; /* Trace function */ int f_lineno; /* Current line number. Only valid if non-zero */ + int f_last_traced_line; /* The last line traced for this frame */ char f_trace_lines; /* Emit per-line trace events? */ char f_trace_opcodes; /* Emit per-opcode trace events? */ char f_fast_as_locals; /* Have the fast locals of this frame been converted to a dict? */ @@ -60,7 +61,13 @@ typedef struct _PyInterpreterFrame { // over, or (in the case of a newly-created frame) a totally invalid value: _Py_CODEUNIT *prev_instr; int stacktop; /* Offset of TOS from localsplus */ - uint16_t yield_offset; + /* The return_offset determines where a `RETURN` should go in the caller, + * relative to `prev_instr`. + * It is only meaningful to the callee, + * so it needs to be set in any CALL (to a Python function) + * or SEND (to a coroutine or generator). + * If there is no callee, then it is meaningless. */ + uint16_t return_offset; char owner; /* Locals and stack */ PyObject *localsplus[1]; @@ -120,7 +127,7 @@ _PyFrame_Initialize( frame->stacktop = code->co_nlocalsplus; frame->frame_obj = NULL; frame->prev_instr = _PyCode_CODE(code) - 1; - frame->yield_offset = 0; + frame->return_offset = 0; frame->owner = FRAME_OWNED_BY_THREAD; for (int i = null_locals_from; i < code->co_nlocalsplus; i++) { @@ -137,10 +144,16 @@ _PyFrame_GetLocalsArray(_PyInterpreterFrame *frame) return frame->localsplus; } +/* Fetches the stack pointer, and sets stacktop to -1. + Having stacktop <= 0 ensures that invalid + values are not visible to the cycle GC. + We choose -1 rather than 0 to assist debugging. */ static inline PyObject** _PyFrame_GetStackPointer(_PyInterpreterFrame *frame) { - return frame->localsplus+frame->stacktop; + PyObject **sp = frame->localsplus + frame->stacktop; + frame->stacktop = -1; + return sp; } static inline void diff --git a/Include/internal/pycore_global_objects.h b/Include/internal/pycore_global_objects.h index 9957da1fc5f22a..64d9384df9c5c5 100644 --- a/Include/internal/pycore_global_objects.h +++ b/Include/internal/pycore_global_objects.h @@ -23,13 +23,6 @@ extern "C" { // Only immutable objects should be considered runtime-global. // All others must be per-interpreter. -#define _Py_CACHED_OBJECT(NAME) \ - _PyRuntime.cached_objects.NAME - -struct _Py_cached_objects { - PyObject *interned_strings; -}; - #define _Py_GLOBAL_OBJECT(NAME) \ _PyRuntime.static_objects.NAME #define _Py_SINGLETON(NAME) \ @@ -65,6 +58,8 @@ struct _Py_static_objects { (interp)->cached_objects.NAME struct _Py_interp_cached_objects { + PyObject *interned_strings; + /* AST */ PyObject *str_replace_inf; diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h index 4b12ae523c3260..f0740b68dd1114 100644 --- a/Include/internal/pycore_global_objects_fini_generated.h +++ b/Include/internal/pycore_global_objects_fini_generated.h @@ -8,15 +8,13 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif -#include "pycore_object.h" // _PyObject_IMMORTAL_REFCNT - #ifdef Py_DEBUG static inline void _PyStaticObject_CheckRefcnt(PyObject *obj) { - if (Py_REFCNT(obj) < _PyObject_IMMORTAL_REFCNT) { + if (Py_REFCNT(obj) < _Py_IMMORTAL_REFCNT) { _PyObject_ASSERT_FAILED_MSG(obj, "immortal object has less refcnt than expected " - "_PyObject_IMMORTAL_REFCNT"); + "_Py_IMMORTAL_REFCNT"); } } #endif @@ -884,6 +882,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dst_dir_fd)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(duration)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(e)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(eager_start)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(effective_ids)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(element_factory)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(encode)); @@ -892,6 +891,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(end_lineno)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(end_offset)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(endpos)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(entrypoint)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(env)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(errors)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(event)); @@ -973,6 +973,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(instructions)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(intern)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(intersection)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(is_running)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(isatty)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(isinstance)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(isoformat)); @@ -995,6 +996,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(kw2)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(lambda)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last_exc)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last_node)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last_traceback)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last_type)); @@ -1027,6 +1029,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(memlimit)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(message)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(metaclass)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(metadata)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(method)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(mod)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(mode)); diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h index 17fb9ffbbf9f11..234d5e2a09892c 100644 --- a/Include/internal/pycore_global_strings.h +++ b/Include/internal/pycore_global_strings.h @@ -370,6 +370,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(dst_dir_fd) STRUCT_FOR_ID(duration) STRUCT_FOR_ID(e) + STRUCT_FOR_ID(eager_start) STRUCT_FOR_ID(effective_ids) STRUCT_FOR_ID(element_factory) STRUCT_FOR_ID(encode) @@ -378,6 +379,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(end_lineno) STRUCT_FOR_ID(end_offset) STRUCT_FOR_ID(endpos) + STRUCT_FOR_ID(entrypoint) STRUCT_FOR_ID(env) STRUCT_FOR_ID(errors) STRUCT_FOR_ID(event) @@ -459,6 +461,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(instructions) STRUCT_FOR_ID(intern) STRUCT_FOR_ID(intersection) + STRUCT_FOR_ID(is_running) STRUCT_FOR_ID(isatty) STRUCT_FOR_ID(isinstance) STRUCT_FOR_ID(isoformat) @@ -481,6 +484,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(kw2) STRUCT_FOR_ID(lambda) STRUCT_FOR_ID(last) + STRUCT_FOR_ID(last_exc) STRUCT_FOR_ID(last_node) STRUCT_FOR_ID(last_traceback) STRUCT_FOR_ID(last_type) @@ -513,6 +517,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(memlimit) STRUCT_FOR_ID(message) STRUCT_FOR_ID(metaclass) + STRUCT_FOR_ID(metadata) STRUCT_FOR_ID(method) STRUCT_FOR_ID(mod) STRUCT_FOR_ID(mode) diff --git a/Include/internal/pycore_import.h b/Include/internal/pycore_import.h index 69ed6273b7e609..0a9f24efbdb908 100644 --- a/Include/internal/pycore_import.h +++ b/Include/internal/pycore_import.h @@ -14,13 +14,21 @@ struct _import_runtime_state { which is just about every time an extension module is imported. See PyInterpreterState.modules_by_index for more info. */ Py_ssize_t last_module_index; - /* A dict mapping (filename, name) to PyModuleDef for modules. - Only legacy (single-phase init) extension modules are added - and only if they support multiple initialization (m_size >- 0) - or are imported in the main interpreter. - This is initialized lazily in _PyImport_FixupExtensionObject(). - Modules are added there and looked up in _imp.find_extension(). */ - PyObject *extensions; + struct { + /* A thread state tied to the main interpreter, + used exclusively for when the extensions dict is access/modified + from an arbitrary thread. */ + PyThreadState main_tstate; + /* A lock to guard the dict. */ + PyThread_type_lock mutex; + /* A dict mapping (filename, name) to PyModuleDef for modules. + Only legacy (single-phase init) extension modules are added + and only if they support multiple initialization (m_size >- 0) + or are imported in the main interpreter. + This is initialized lazily in _PyImport_FixupExtensionObject(). + Modules are added there and looked up in _imp.find_extension(). */ + PyObject *dict; + } extensions; /* Package context -- the full module name for package imports */ const char * pkgcontext; }; diff --git a/Include/internal/pycore_initconfig.h b/Include/internal/pycore_initconfig.h index 69f88d7d1d46b8..4cbd14a61d4545 100644 --- a/Include/internal/pycore_initconfig.h +++ b/Include/internal/pycore_initconfig.h @@ -44,8 +44,6 @@ struct pyruntimestate; #define _PyStatus_UPDATE_FUNC(err) \ do { (err).func = _PyStatus_GET_FUNC(); } while (0) -PyObject* _PyErr_SetFromPyStatus(PyStatus status); - /* --- PyWideStringList ------------------------------------------------ */ #define _PyWideStringList_INIT (PyWideStringList){.length = 0, .items = NULL} diff --git a/Include/internal/pycore_instruments.h b/Include/internal/pycore_instruments.h new file mode 100644 index 00000000000000..e94d8755546efd --- /dev/null +++ b/Include/internal/pycore_instruments.h @@ -0,0 +1,107 @@ + +#ifndef Py_INTERNAL_INSTRUMENT_H +#define Py_INTERNAL_INSTRUMENT_H + + +#include "pycore_bitutils.h" // _Py_popcount32 +#include "pycore_frame.h" + +#include "cpython/code.h" + +#ifdef __cplusplus +extern "C" { +#endif + +#define PY_MONITORING_TOOL_IDS 8 + +/* Local events. + * These require bytecode instrumentation */ + +#define PY_MONITORING_EVENT_PY_START 0 +#define PY_MONITORING_EVENT_PY_RESUME 1 +#define PY_MONITORING_EVENT_PY_RETURN 2 +#define PY_MONITORING_EVENT_PY_YIELD 3 +#define PY_MONITORING_EVENT_CALL 4 +#define PY_MONITORING_EVENT_LINE 5 +#define PY_MONITORING_EVENT_INSTRUCTION 6 +#define PY_MONITORING_EVENT_JUMP 7 +#define PY_MONITORING_EVENT_BRANCH 8 +#define PY_MONITORING_EVENT_STOP_ITERATION 9 + +#define PY_MONITORING_INSTRUMENTED_EVENTS 10 + +/* Other events, mainly exceptions */ + +#define PY_MONITORING_EVENT_RAISE 10 +#define PY_MONITORING_EVENT_EXCEPTION_HANDLED 11 +#define PY_MONITORING_EVENT_PY_UNWIND 12 +#define PY_MONITORING_EVENT_PY_THROW 13 + + +/* Ancilliary events */ + +#define PY_MONITORING_EVENT_C_RETURN 14 +#define PY_MONITORING_EVENT_C_RAISE 15 + + +typedef uint32_t _PyMonitoringEventSet; + +/* Tool IDs */ + +/* These are defined in PEP 669 for convenience to avoid clashes */ +#define PY_MONITORING_DEBUGGER_ID 0 +#define PY_MONITORING_COVERAGE_ID 1 +#define PY_MONITORING_PROFILER_ID 2 +#define PY_MONITORING_OPTIMIZER_ID 5 + +/* Internal IDs used to suuport sys.setprofile() and sys.settrace() */ +#define PY_MONITORING_SYS_PROFILE_ID 6 +#define PY_MONITORING_SYS_TRACE_ID 7 + + +PyObject *_PyMonitoring_RegisterCallback(int tool_id, int event_id, PyObject *obj); + +int _PyMonitoring_SetEvents(int tool_id, _PyMonitoringEventSet events); + +extern int +_Py_call_instrumentation(PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *instr); + +extern int +_Py_call_instrumentation_line(PyThreadState *tstate, _PyInterpreterFrame* frame, + _Py_CODEUNIT *instr); + +extern int +_Py_call_instrumentation_instruction( + PyThreadState *tstate, _PyInterpreterFrame* frame, _Py_CODEUNIT *instr); + +int +_Py_call_instrumentation_jump( + PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, _Py_CODEUNIT *target); + +extern int +_Py_call_instrumentation_arg(PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, PyObject *arg); + +extern int +_Py_call_instrumentation_2args(PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, PyObject *arg0, PyObject *arg1); + +extern void +_Py_call_instrumentation_exc0(PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *instr); + +extern void +_Py_call_instrumentation_exc2(PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, PyObject *arg0, PyObject *arg1); + +extern int +_Py_Instrumentation_GetLine(PyCodeObject *code, int index); + +extern PyObject _PyInstrumentation_MISSING; + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_INSTRUMENT_H */ diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 84303318d21811..7276ce35ba68f0 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -10,8 +10,9 @@ extern "C" { #include <stdbool.h> -#include "pycore_atomic.h" // _Py_atomic_address #include "pycore_ast_state.h" // struct ast_state +#include "pycore_atexit.h" // struct atexit_state +#include "pycore_atomic.h" // _Py_atomic_address #include "pycore_ceval_state.h" // struct _ceval_state #include "pycore_code.h" // struct callable_cache #include "pycore_context.h" // struct _Py_context_state @@ -22,34 +23,22 @@ extern "C" { #include "pycore_function.h" // FUNC_MAX_WATCHERS #include "pycore_genobject.h" // struct _Py_async_gen_state #include "pycore_gc.h" // struct _gc_runtime_state +#include "pycore_global_objects.h" // struct _Py_interp_static_objects #include "pycore_import.h" // struct _import_state +#include "pycore_instruments.h" // PY_MONITORING_EVENTS #include "pycore_list.h" // struct _Py_list_state -#include "pycore_global_objects.h" // struct _Py_interp_static_objects +#include "pycore_object_state.h" // struct _py_object_state +#include "pycore_obmalloc.h" // struct obmalloc_state #include "pycore_tuple.h" // struct _Py_tuple_state #include "pycore_typeobject.h" // struct type_cache #include "pycore_unicodeobject.h" // struct _Py_unicode_state #include "pycore_warnings.h" // struct _warnings_runtime_state -// atexit state -typedef struct { - PyObject *func; - PyObject *args; - PyObject *kwargs; -} atexit_callback; - -struct atexit_state { - atexit_callback **callbacks; - int ncallbacks; - int callback_len; -}; - - struct _Py_long_state { int max_str_digits; }; - /* interpreter state */ /* PyInterpreterState holds the global state for one of the runtime's @@ -61,6 +50,9 @@ struct _is { PyInterpreterState *next; + uint64_t monitoring_version; + uint64_t last_restart_version; + struct pythreads { uint64_t next_unique_id; /* The linked list of threads, newest first. */ @@ -91,6 +83,8 @@ struct _is { int _initialized; int finalizing; + struct _obmalloc_state obmalloc; + struct _ceval_state ceval; struct _gc_runtime_state gc; @@ -138,6 +132,7 @@ struct _is { // One bit is set for each non-NULL entry in code_watchers uint8_t active_code_watchers; + struct _py_object_state object_state; struct _Py_unicode_state unicode; struct _Py_float_state float_state; struct _Py_long_state long_state; @@ -159,6 +154,15 @@ struct _is { struct callable_cache callable_cache; PyCodeObject *interpreter_trampoline; + _Py_Monitors monitors; + bool f_opcode_trace_set; + bool sys_profile_initialized; + bool sys_trace_initialized; + Py_ssize_t sys_profiling_threads; /* Count of threads with c_profilefunc set */ + Py_ssize_t sys_tracing_threads; /* Count of threads with c_tracefunc set */ + PyObject *monitoring_callables[PY_MONITORING_TOOL_IDS][PY_MONITORING_EVENTS]; + PyObject *monitoring_tool_names[PY_MONITORING_TOOL_IDS]; + struct _Py_interp_cached_objects cached_objects; struct _Py_interp_static_objects static_objects; diff --git a/Include/internal/pycore_long.h b/Include/internal/pycore_long.h index 8c1d017bb95e4e..fe86581e81f6b5 100644 --- a/Include/internal/pycore_long.h +++ b/Include/internal/pycore_long.h @@ -82,8 +82,6 @@ PyObject *_PyLong_Add(PyLongObject *left, PyLongObject *right); PyObject *_PyLong_Multiply(PyLongObject *left, PyLongObject *right); PyObject *_PyLong_Subtract(PyLongObject *left, PyLongObject *right); -int _PyLong_AssignValue(PyObject **target, Py_ssize_t value); - /* Used by Python/mystrtoul.c, _PyBytes_FromHex(), _PyBytes_DecodeEscape(), etc. */ PyAPI_DATA(unsigned char) _PyLong_DigitValue[256]; @@ -110,25 +108,155 @@ PyAPI_FUNC(char*) _PyLong_FormatBytesWriter( int base, int alternate); -/* Return 1 if the argument is positive single digit int */ +/* Long value tag bits: + * 0-1: Sign bits value = (1-sign), ie. negative=2, positive=0, zero=1. + * 2: Reserved for immortality bit + * 3+ Unsigned digit count + */ +#define SIGN_MASK 3 +#define SIGN_ZERO 1 +#define SIGN_NEGATIVE 2 +#define NON_SIZE_BITS 3 + +/* All *compact" values are guaranteed to fit into + * a Py_ssize_t with at least one bit to spare. + * In other words, for 64 bit machines, compact + * will be signed 63 (or fewer) bit values + */ + +/* Return 1 if the argument is compact int */ +static inline int +_PyLong_IsNonNegativeCompact(const PyLongObject* op) { + assert(PyLong_Check(op)); + return op->long_value.lv_tag <= (1 << NON_SIZE_BITS); +} + +static inline int +_PyLong_IsCompact(const PyLongObject* op) { + assert(PyLong_Check(op)); + return op->long_value.lv_tag < (2 << NON_SIZE_BITS); +} + static inline int -_PyLong_IsPositiveSingleDigit(PyObject* sub) { - /* For a positive single digit int, the value of Py_SIZE(sub) is 0 or 1. - - We perform a fast check using a single comparison by casting from int - to uint which casts negative numbers to large positive numbers. - For details see Section 14.2 "Bounds Checking" in the Agner Fog - optimization manual found at: - https://www.agner.org/optimize/optimizing_cpp.pdf - - The function is not affected by -fwrapv, -fno-wrapv and -ftrapv - compiler options of GCC and clang - */ - assert(PyLong_CheckExact(sub)); - Py_ssize_t signed_size = Py_SIZE(sub); - return ((size_t)signed_size) <= 1; +_PyLong_BothAreCompact(const PyLongObject* a, const PyLongObject* b) { + assert(PyLong_Check(a)); + assert(PyLong_Check(b)); + return (a->long_value.lv_tag | b->long_value.lv_tag) < (2 << NON_SIZE_BITS); +} + +/* Returns a *compact* value, iff `_PyLong_IsCompact` is true for `op`. + * + * "Compact" values have at least one bit to spare, + * so that addition and subtraction can be performed on the values + * without risk of overflow. + */ +static inline Py_ssize_t +_PyLong_CompactValue(const PyLongObject *op) +{ + assert(PyLong_Check(op)); + assert(_PyLong_IsCompact(op)); + Py_ssize_t sign = 1 - (op->long_value.lv_tag & SIGN_MASK); + return sign * (Py_ssize_t)op->long_value.ob_digit[0]; +} + +static inline bool +_PyLong_IsZero(const PyLongObject *op) +{ + return (op->long_value.lv_tag & SIGN_MASK) == SIGN_ZERO; +} + +static inline bool +_PyLong_IsNegative(const PyLongObject *op) +{ + return (op->long_value.lv_tag & SIGN_MASK) == SIGN_NEGATIVE; +} + +static inline bool +_PyLong_IsPositive(const PyLongObject *op) +{ + return (op->long_value.lv_tag & SIGN_MASK) == 0; +} + +static inline Py_ssize_t +_PyLong_DigitCount(const PyLongObject *op) +{ + assert(PyLong_Check(op)); + return op->long_value.lv_tag >> NON_SIZE_BITS; } +/* Equivalent to _PyLong_DigitCount(op) * _PyLong_NonCompactSign(op) */ +static inline Py_ssize_t +_PyLong_SignedDigitCount(const PyLongObject *op) +{ + assert(PyLong_Check(op)); + Py_ssize_t sign = 1 - (op->long_value.lv_tag & SIGN_MASK); + return sign * (Py_ssize_t)(op->long_value.lv_tag >> NON_SIZE_BITS); +} + +static inline int +_PyLong_CompactSign(const PyLongObject *op) +{ + assert(PyLong_Check(op)); + assert(_PyLong_IsCompact(op)); + return 1 - (op->long_value.lv_tag & SIGN_MASK); +} + +static inline int +_PyLong_NonCompactSign(const PyLongObject *op) +{ + assert(PyLong_Check(op)); + assert(!_PyLong_IsCompact(op)); + return 1 - (op->long_value.lv_tag & SIGN_MASK); +} + +/* Do a and b have the same sign? */ +static inline int +_PyLong_SameSign(const PyLongObject *a, const PyLongObject *b) +{ + return (a->long_value.lv_tag & SIGN_MASK) == (b->long_value.lv_tag & SIGN_MASK); +} + +#define TAG_FROM_SIGN_AND_SIZE(sign, size) ((1 - (sign)) | ((size) << NON_SIZE_BITS)) + +static inline void +_PyLong_SetSignAndDigitCount(PyLongObject *op, int sign, Py_ssize_t size) +{ + assert(size >= 0); + assert(-1 <= sign && sign <= 1); + assert(sign != 0 || size == 0); + op->long_value.lv_tag = TAG_FROM_SIGN_AND_SIZE(sign, (size_t)size); +} + +static inline void +_PyLong_SetDigitCount(PyLongObject *op, Py_ssize_t size) +{ + assert(size >= 0); + op->long_value.lv_tag = (((size_t)size) << NON_SIZE_BITS) | (op->long_value.lv_tag & SIGN_MASK); +} + +#define NON_SIZE_MASK ~((1 << NON_SIZE_BITS) - 1) + +static inline void +_PyLong_FlipSign(PyLongObject *op) { + unsigned int flipped_sign = 2 - (op->long_value.lv_tag & SIGN_MASK); + op->long_value.lv_tag &= NON_SIZE_MASK; + op->long_value.lv_tag |= flipped_sign; +} + +#define _PyLong_DIGIT_INIT(val) \ + { \ + .ob_base = _PyObject_HEAD_INIT(&PyLong_Type) \ + .long_value = { \ + .lv_tag = TAG_FROM_SIGN_AND_SIZE( \ + (val) == 0 ? 0 : ((val) < 0 ? -1 : 1), \ + (val) == 0 ? 0 : 1), \ + { ((val) >= 0 ? (val) : -(val)) }, \ + } \ + } + +#define _PyLong_FALSE_TAG TAG_FROM_SIGN_AND_SIZE(0, 0) +#define _PyLong_TRUE_TAG TAG_FROM_SIGN_AND_SIZE(1, 1) + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index 318e6f3371c0c3..2ca047846e0935 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -14,21 +14,25 @@ extern "C" { #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_runtime.h" // _PyRuntime -/* This value provides *effective* immortality, meaning the object should never - be deallocated (until runtime finalization). See PEP 683 for more details about - immortality, as well as a proposed mechanism for proper immortality. */ -#define _PyObject_IMMORTAL_REFCNT 999999999 - -#define _PyObject_IMMORTAL_INIT(type) \ - { \ - .ob_refcnt = _PyObject_IMMORTAL_REFCNT, \ - .ob_type = (type), \ - } -#define _PyVarObject_IMMORTAL_INIT(type, size) \ - { \ - .ob_base = _PyObject_IMMORTAL_INIT(type), \ - .ob_size = size, \ - } +/* We need to maintain an internal copy of Py{Var}Object_HEAD_INIT to avoid + designated initializer conflicts in C++20. If we use the deinition in + object.h, we will be mixing designated and non-designated initializers in + pycore objects which is forbiddent in C++20. However, if we then use + designated initializers in object.h then Extensions without designated break. + Furthermore, we can't use designated initializers in Extensions since these + are not supported pre-C++20. Thus, keeping an internal copy here is the most + backwards compatible solution */ +#define _PyObject_HEAD_INIT(type) \ + { \ + _PyObject_EXTRA_INIT \ + .ob_refcnt = _Py_IMMORTAL_REFCNT, \ + .ob_type = (type) \ + }, +#define _PyVarObject_HEAD_INIT(type, size) \ + { \ + .ob_base = _PyObject_HEAD_INIT(type) \ + .ob_size = size \ + }, PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalRefcountErrorFunc( const char *func, @@ -43,28 +47,41 @@ PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalRefcountErrorFunc( built against the pre-3.12 stable ABI. */ PyAPI_DATA(Py_ssize_t) _Py_RefTotal; -extern void _Py_AddRefTotal(Py_ssize_t); -extern void _Py_IncRefTotal(void); -extern void _Py_DecRefTotal(void); -# define _Py_DEC_REFTOTAL() _Py_RefTotal-- +extern void _Py_AddRefTotal(PyInterpreterState *, Py_ssize_t); +extern void _Py_IncRefTotal(PyInterpreterState *); +extern void _Py_DecRefTotal(PyInterpreterState *); + +# define _Py_DEC_REFTOTAL(interp) \ + interp->object_state.reftotal-- #endif // Increment reference count by n static inline void _Py_RefcntAdd(PyObject* op, Py_ssize_t n) { #ifdef Py_REF_DEBUG - _Py_AddRefTotal(n); + _Py_AddRefTotal(_PyInterpreterState_GET(), n); #endif op->ob_refcnt += n; } #define _Py_RefcntAdd(op, n) _Py_RefcntAdd(_PyObject_CAST(op), n) +static inline void _Py_SetImmortal(PyObject *op) +{ + if (op) { + op->ob_refcnt = _Py_IMMORTAL_REFCNT; + } +} +#define _Py_SetImmortal(op) _Py_SetImmortal(_PyObject_CAST(op)) + static inline void _Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct) { + if (_Py_IsImmortal(op)) { + return; + } _Py_DECREF_STAT_INC(); #ifdef Py_REF_DEBUG - _Py_DEC_REFTOTAL(); + _Py_DEC_REFTOTAL(_PyInterpreterState_GET()); #endif if (--op->ob_refcnt != 0) { assert(op->ob_refcnt > 0); @@ -80,9 +97,12 @@ _Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct) static inline void _Py_DECREF_NO_DEALLOC(PyObject *op) { + if (_Py_IsImmortal(op)) { + return; + } _Py_DECREF_STAT_INC(); #ifdef Py_REF_DEBUG - _Py_DEC_REFTOTAL(); + _Py_DEC_REFTOTAL(_PyInterpreterState_GET()); #endif op->ob_refcnt--; #ifdef Py_DEBUG @@ -135,8 +155,9 @@ static inline void _PyObject_InitVar(PyVarObject *op, PyTypeObject *typeobj, Py_ssize_t size) { assert(op != NULL); - Py_SET_SIZE(op, size); + assert(typeobj != &PyLong_Type); _PyObject_Init((PyObject *)op, typeobj); + Py_SET_SIZE(op, size); } @@ -225,6 +246,8 @@ static inline void _PyObject_GC_UNTRACK( #endif #ifdef Py_REF_DEBUG +extern void _PyInterpreterState_FinalizeRefTotal(PyInterpreterState *); +extern void _Py_FinalizeRefTotal(_PyRuntimeState *); extern void _PyDebug_PrintTotalRefs(void); #endif @@ -370,7 +393,6 @@ extern void _PyObject_FreeInstanceAttributes(PyObject *obj); extern int _PyObject_IsInstanceDictEmpty(PyObject *); extern int _PyType_HasSubclasses(PyTypeObject *); extern PyObject* _PyType_GetSubclasses(PyTypeObject *); -extern PyObject* _PyObject_GenericTryGetAttr(PyObject *, PyObject *); // Access macro to the members which are floating "behind" the object static inline PyMemberDef* _PyHeapType_GET_MEMBERS(PyHeapTypeObject *etype) { diff --git a/Include/internal/pycore_object_state.h b/Include/internal/pycore_object_state.h new file mode 100644 index 00000000000000..94005d77881432 --- /dev/null +++ b/Include/internal/pycore_object_state.h @@ -0,0 +1,31 @@ +#ifndef Py_INTERNAL_OBJECT_STATE_H +#define Py_INTERNAL_OBJECT_STATE_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +struct _py_object_runtime_state { +#ifdef Py_REF_DEBUG + Py_ssize_t interpreter_leaks; +#else + int _not_used; +#endif +}; + +struct _py_object_state { +#ifdef Py_REF_DEBUG + Py_ssize_t reftotal; +#else + int _not_used; +#endif +}; + + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_OBJECT_STATE_H */ diff --git a/Include/internal/pycore_obmalloc.h b/Include/internal/pycore_obmalloc.h index a5c7f4528f9126..ca2a0419b4f038 100644 --- a/Include/internal/pycore_obmalloc.h +++ b/Include/internal/pycore_obmalloc.h @@ -657,8 +657,12 @@ struct _obmalloc_usage { #endif /* WITH_PYMALLOC_RADIX_TREE */ -struct _obmalloc_state { +struct _obmalloc_global_state { int dump_debug_stats; + Py_ssize_t interpreter_leaks; +}; + +struct _obmalloc_state { struct _obmalloc_pools pools; struct _obmalloc_mgmt mgmt; struct _obmalloc_usage usage; @@ -675,7 +679,11 @@ void _PyObject_VirtualFree(void *, size_t size); /* This function returns the number of allocated memory blocks, regardless of size */ -PyAPI_FUNC(Py_ssize_t) _Py_GetAllocatedBlocks(void); +extern Py_ssize_t _Py_GetGlobalAllocatedBlocks(void); +#define _Py_GetAllocatedBlocks() \ + _Py_GetGlobalAllocatedBlocks() +extern Py_ssize_t _PyInterpreterState_GetAllocatedBlocks(PyInterpreterState *); +extern void _PyInterpreterState_FinalizeAllocatedBlocks(PyInterpreterState *); #ifdef WITH_PYMALLOC diff --git a/Include/internal/pycore_obmalloc_init.h b/Include/internal/pycore_obmalloc_init.h index c9f197e72de9f5..8ee72ff2d4126f 100644 --- a/Include/internal/pycore_obmalloc_init.h +++ b/Include/internal/pycore_obmalloc_init.h @@ -54,9 +54,13 @@ extern "C" { # error "NB_SMALL_SIZE_CLASSES should be less than 64" #endif -#define _obmalloc_state_INIT(obmalloc) \ +#define _obmalloc_global_state_INIT \ { \ .dump_debug_stats = -1, \ + } + +#define _obmalloc_state_INIT(obmalloc) \ + { \ .pools = { \ .used = _obmalloc_pools_INIT(obmalloc.pools), \ }, \ diff --git a/Include/internal/pycore_opcode.h b/Include/internal/pycore_opcode.h index 264c2e960528eb..52ee70a7bfdc54 100644 --- a/Include/internal/pycore_opcode.h +++ b/Include/internal/pycore_opcode.h @@ -12,27 +12,18 @@ extern "C" { #include "opcode.h" +extern const uint32_t _PyOpcode_Jump[9]; + extern const uint8_t _PyOpcode_Caches[256]; extern const uint8_t _PyOpcode_Deopt[256]; #ifdef NEED_OPCODE_TABLES -static const uint32_t _PyOpcode_RelativeJump[9] = { +const uint32_t _PyOpcode_Jump[9] = { 0U, 0U, 536870912U, - 135118848U, - 4163U, - 0U, - 0U, - 0U, - 48U, -}; -static const uint32_t _PyOpcode_Jump[9] = { - 0U, - 0U, - 536870912U, - 135118848U, + 135020544U, 4163U, 0U, 0U, @@ -41,7 +32,7 @@ static const uint32_t _PyOpcode_Jump[9] = { }; const uint8_t _PyOpcode_Caches[256] = { - [BINARY_SUBSCR] = 4, + [BINARY_SUBSCR] = 1, [STORE_SUBSCR] = 1, [UNPACK_SEQUENCE] = 1, [FOR_ITER] = 1, @@ -51,8 +42,8 @@ const uint8_t _PyOpcode_Caches[256] = { [LOAD_GLOBAL] = 4, [BINARY_OP] = 1, [SEND] = 1, - [COMPARE_AND_BRANCH] = 1, - [CALL] = 4, + [LOAD_SUPER_ATTR] = 9, + [CALL] = 3, }; const uint8_t _PyOpcode_Deopt[256] = { @@ -105,11 +96,10 @@ const uint8_t _PyOpcode_Deopt[256] = { [CHECK_EG_MATCH] = CHECK_EG_MATCH, [CHECK_EXC_MATCH] = CHECK_EXC_MATCH, [CLEANUP_THROW] = CLEANUP_THROW, - [COMPARE_AND_BRANCH] = COMPARE_AND_BRANCH, - [COMPARE_AND_BRANCH_FLOAT] = COMPARE_AND_BRANCH, - [COMPARE_AND_BRANCH_INT] = COMPARE_AND_BRANCH, - [COMPARE_AND_BRANCH_STR] = COMPARE_AND_BRANCH, [COMPARE_OP] = COMPARE_OP, + [COMPARE_OP_FLOAT] = COMPARE_OP, + [COMPARE_OP_INT] = COMPARE_OP, + [COMPARE_OP_STR] = COMPARE_OP, [CONTAINS_OP] = CONTAINS_OP, [COPY] = COPY, [COPY_FREE_VARS] = COPY_FREE_VARS, @@ -123,6 +113,7 @@ const uint8_t _PyOpcode_Deopt[256] = { [DICT_UPDATE] = DICT_UPDATE, [END_ASYNC_FOR] = END_ASYNC_FOR, [END_FOR] = END_FOR, + [END_SEND] = END_SEND, [EXTENDED_ARG] = EXTENDED_ARG, [FORMAT_VALUE] = FORMAT_VALUE, [FOR_ITER] = FOR_ITER, @@ -138,13 +129,28 @@ const uint8_t _PyOpcode_Deopt[256] = { [GET_YIELD_FROM_ITER] = GET_YIELD_FROM_ITER, [IMPORT_FROM] = IMPORT_FROM, [IMPORT_NAME] = IMPORT_NAME, + [INSTRUMENTED_CALL] = INSTRUMENTED_CALL, + [INSTRUMENTED_CALL_FUNCTION_EX] = INSTRUMENTED_CALL_FUNCTION_EX, + [INSTRUMENTED_END_FOR] = INSTRUMENTED_END_FOR, + [INSTRUMENTED_END_SEND] = INSTRUMENTED_END_SEND, + [INSTRUMENTED_FOR_ITER] = INSTRUMENTED_FOR_ITER, + [INSTRUMENTED_INSTRUCTION] = INSTRUMENTED_INSTRUCTION, + [INSTRUMENTED_JUMP_BACKWARD] = INSTRUMENTED_JUMP_BACKWARD, + [INSTRUMENTED_JUMP_FORWARD] = INSTRUMENTED_JUMP_FORWARD, + [INSTRUMENTED_LINE] = INSTRUMENTED_LINE, + [INSTRUMENTED_POP_JUMP_IF_FALSE] = INSTRUMENTED_POP_JUMP_IF_FALSE, + [INSTRUMENTED_POP_JUMP_IF_NONE] = INSTRUMENTED_POP_JUMP_IF_NONE, + [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = INSTRUMENTED_POP_JUMP_IF_NOT_NONE, + [INSTRUMENTED_POP_JUMP_IF_TRUE] = INSTRUMENTED_POP_JUMP_IF_TRUE, + [INSTRUMENTED_RESUME] = INSTRUMENTED_RESUME, + [INSTRUMENTED_RETURN_CONST] = INSTRUMENTED_RETURN_CONST, + [INSTRUMENTED_RETURN_VALUE] = INSTRUMENTED_RETURN_VALUE, + [INSTRUMENTED_YIELD_VALUE] = INSTRUMENTED_YIELD_VALUE, [INTERPRETER_EXIT] = INTERPRETER_EXIT, [IS_OP] = IS_OP, [JUMP_BACKWARD] = JUMP_BACKWARD, [JUMP_BACKWARD_NO_INTERRUPT] = JUMP_BACKWARD_NO_INTERRUPT, [JUMP_FORWARD] = JUMP_FORWARD, - [JUMP_IF_FALSE_OR_POP] = JUMP_IF_FALSE_OR_POP, - [JUMP_IF_TRUE_OR_POP] = JUMP_IF_TRUE_OR_POP, [KW_NAMES] = KW_NAMES, [LIST_APPEND] = LIST_APPEND, [LIST_EXTEND] = LIST_EXTEND, @@ -175,6 +181,8 @@ const uint8_t _PyOpcode_Deopt[256] = { [LOAD_GLOBAL_BUILTIN] = LOAD_GLOBAL, [LOAD_GLOBAL_MODULE] = LOAD_GLOBAL, [LOAD_NAME] = LOAD_NAME, + [LOAD_SUPER_ATTR] = LOAD_SUPER_ATTR, + [LOAD_SUPER_ATTR_METHOD] = LOAD_SUPER_ATTR, [MAKE_CELL] = MAKE_CELL, [MAKE_FUNCTION] = MAKE_FUNCTION, [MAP_ADD] = MAP_ADD, @@ -193,6 +201,7 @@ const uint8_t _PyOpcode_Deopt[256] = { [PUSH_NULL] = PUSH_NULL, [RAISE_VARARGS] = RAISE_VARARGS, [RERAISE] = RERAISE, + [RESERVED] = RESERVED, [RESUME] = RESUME, [RETURN_CONST] = RETURN_CONST, [RETURN_GENERATOR] = RETURN_GENERATOR, @@ -231,23 +240,25 @@ const uint8_t _PyOpcode_Deopt[256] = { #endif // NEED_OPCODE_TABLES #ifdef Py_DEBUG -static const char *const _PyOpcode_OpName[264] = { +static const char *const _PyOpcode_OpName[267] = { [CACHE] = "CACHE", [POP_TOP] = "POP_TOP", [PUSH_NULL] = "PUSH_NULL", [INTERPRETER_EXIT] = "INTERPRETER_EXIT", [END_FOR] = "END_FOR", + [END_SEND] = "END_SEND", [BINARY_OP_ADD_FLOAT] = "BINARY_OP_ADD_FLOAT", [BINARY_OP_ADD_INT] = "BINARY_OP_ADD_INT", [BINARY_OP_ADD_UNICODE] = "BINARY_OP_ADD_UNICODE", - [BINARY_OP_INPLACE_ADD_UNICODE] = "BINARY_OP_INPLACE_ADD_UNICODE", [NOP] = "NOP", - [BINARY_OP_MULTIPLY_FLOAT] = "BINARY_OP_MULTIPLY_FLOAT", + [BINARY_OP_INPLACE_ADD_UNICODE] = "BINARY_OP_INPLACE_ADD_UNICODE", [UNARY_NEGATIVE] = "UNARY_NEGATIVE", [UNARY_NOT] = "UNARY_NOT", + [BINARY_OP_MULTIPLY_FLOAT] = "BINARY_OP_MULTIPLY_FLOAT", [BINARY_OP_MULTIPLY_INT] = "BINARY_OP_MULTIPLY_INT", - [BINARY_OP_SUBTRACT_FLOAT] = "BINARY_OP_SUBTRACT_FLOAT", [UNARY_INVERT] = "UNARY_INVERT", + [BINARY_OP_SUBTRACT_FLOAT] = "BINARY_OP_SUBTRACT_FLOAT", + [RESERVED] = "RESERVED", [BINARY_OP_SUBTRACT_INT] = "BINARY_OP_SUBTRACT_INT", [BINARY_SUBSCR_DICT] = "BINARY_SUBSCR_DICT", [BINARY_SUBSCR_GETITEM] = "BINARY_SUBSCR_GETITEM", @@ -255,21 +266,21 @@ static const char *const _PyOpcode_OpName[264] = { [BINARY_SUBSCR_TUPLE_INT] = "BINARY_SUBSCR_TUPLE_INT", [CALL_PY_EXACT_ARGS] = "CALL_PY_EXACT_ARGS", [CALL_PY_WITH_DEFAULTS] = "CALL_PY_WITH_DEFAULTS", - [CALL_BOUND_METHOD_EXACT_ARGS] = "CALL_BOUND_METHOD_EXACT_ARGS", - [CALL_BUILTIN_CLASS] = "CALL_BUILTIN_CLASS", [BINARY_SUBSCR] = "BINARY_SUBSCR", [BINARY_SLICE] = "BINARY_SLICE", [STORE_SLICE] = "STORE_SLICE", - [CALL_BUILTIN_FAST_WITH_KEYWORDS] = "CALL_BUILTIN_FAST_WITH_KEYWORDS", - [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = "CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS", + [CALL_BOUND_METHOD_EXACT_ARGS] = "CALL_BOUND_METHOD_EXACT_ARGS", + [CALL_BUILTIN_CLASS] = "CALL_BUILTIN_CLASS", [GET_LEN] = "GET_LEN", [MATCH_MAPPING] = "MATCH_MAPPING", [MATCH_SEQUENCE] = "MATCH_SEQUENCE", [MATCH_KEYS] = "MATCH_KEYS", - [CALL_NO_KW_BUILTIN_FAST] = "CALL_NO_KW_BUILTIN_FAST", + [CALL_BUILTIN_FAST_WITH_KEYWORDS] = "CALL_BUILTIN_FAST_WITH_KEYWORDS", [PUSH_EXC_INFO] = "PUSH_EXC_INFO", [CHECK_EXC_MATCH] = "CHECK_EXC_MATCH", [CHECK_EG_MATCH] = "CHECK_EG_MATCH", + [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = "CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS", + [CALL_NO_KW_BUILTIN_FAST] = "CALL_NO_KW_BUILTIN_FAST", [CALL_NO_KW_BUILTIN_O] = "CALL_NO_KW_BUILTIN_O", [CALL_NO_KW_ISINSTANCE] = "CALL_NO_KW_ISINSTANCE", [CALL_NO_KW_LEN] = "CALL_NO_KW_LEN", @@ -279,8 +290,6 @@ static const char *const _PyOpcode_OpName[264] = { [CALL_NO_KW_METHOD_DESCRIPTOR_O] = "CALL_NO_KW_METHOD_DESCRIPTOR_O", [CALL_NO_KW_STR_1] = "CALL_NO_KW_STR_1", [CALL_NO_KW_TUPLE_1] = "CALL_NO_KW_TUPLE_1", - [CALL_NO_KW_TYPE_1] = "CALL_NO_KW_TYPE_1", - [COMPARE_AND_BRANCH_FLOAT] = "COMPARE_AND_BRANCH_FLOAT", [WITH_EXCEPT_START] = "WITH_EXCEPT_START", [GET_AITER] = "GET_AITER", [GET_ANEXT] = "GET_ANEXT", @@ -288,39 +297,39 @@ static const char *const _PyOpcode_OpName[264] = { [BEFORE_WITH] = "BEFORE_WITH", [END_ASYNC_FOR] = "END_ASYNC_FOR", [CLEANUP_THROW] = "CLEANUP_THROW", - [COMPARE_AND_BRANCH_INT] = "COMPARE_AND_BRANCH_INT", - [COMPARE_AND_BRANCH_STR] = "COMPARE_AND_BRANCH_STR", - [FOR_ITER_LIST] = "FOR_ITER_LIST", - [FOR_ITER_TUPLE] = "FOR_ITER_TUPLE", + [CALL_NO_KW_TYPE_1] = "CALL_NO_KW_TYPE_1", + [COMPARE_OP_FLOAT] = "COMPARE_OP_FLOAT", + [COMPARE_OP_INT] = "COMPARE_OP_INT", + [COMPARE_OP_STR] = "COMPARE_OP_STR", [STORE_SUBSCR] = "STORE_SUBSCR", [DELETE_SUBSCR] = "DELETE_SUBSCR", + [FOR_ITER_LIST] = "FOR_ITER_LIST", + [FOR_ITER_TUPLE] = "FOR_ITER_TUPLE", [FOR_ITER_RANGE] = "FOR_ITER_RANGE", [FOR_ITER_GEN] = "FOR_ITER_GEN", + [LOAD_SUPER_ATTR_METHOD] = "LOAD_SUPER_ATTR_METHOD", [LOAD_ATTR_CLASS] = "LOAD_ATTR_CLASS", + [GET_ITER] = "GET_ITER", + [GET_YIELD_FROM_ITER] = "GET_YIELD_FROM_ITER", [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN", + [LOAD_BUILD_CLASS] = "LOAD_BUILD_CLASS", [LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE", [LOAD_ATTR_MODULE] = "LOAD_ATTR_MODULE", - [GET_ITER] = "GET_ITER", - [GET_YIELD_FROM_ITER] = "GET_YIELD_FROM_ITER", + [LOAD_ASSERTION_ERROR] = "LOAD_ASSERTION_ERROR", + [RETURN_GENERATOR] = "RETURN_GENERATOR", [LOAD_ATTR_PROPERTY] = "LOAD_ATTR_PROPERTY", - [LOAD_BUILD_CLASS] = "LOAD_BUILD_CLASS", [LOAD_ATTR_SLOT] = "LOAD_ATTR_SLOT", [LOAD_ATTR_WITH_HINT] = "LOAD_ATTR_WITH_HINT", - [LOAD_ASSERTION_ERROR] = "LOAD_ASSERTION_ERROR", - [RETURN_GENERATOR] = "RETURN_GENERATOR", [LOAD_ATTR_METHOD_LAZY_DICT] = "LOAD_ATTR_METHOD_LAZY_DICT", [LOAD_ATTR_METHOD_NO_DICT] = "LOAD_ATTR_METHOD_NO_DICT", [LOAD_ATTR_METHOD_WITH_VALUES] = "LOAD_ATTR_METHOD_WITH_VALUES", [LOAD_CONST__LOAD_FAST] = "LOAD_CONST__LOAD_FAST", + [RETURN_VALUE] = "RETURN_VALUE", [LOAD_FAST__LOAD_CONST] = "LOAD_FAST__LOAD_CONST", + [SETUP_ANNOTATIONS] = "SETUP_ANNOTATIONS", [LOAD_FAST__LOAD_FAST] = "LOAD_FAST__LOAD_FAST", [LOAD_GLOBAL_BUILTIN] = "LOAD_GLOBAL_BUILTIN", - [RETURN_VALUE] = "RETURN_VALUE", [LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE", - [SETUP_ANNOTATIONS] = "SETUP_ANNOTATIONS", - [STORE_ATTR_INSTANCE_VALUE] = "STORE_ATTR_INSTANCE_VALUE", - [STORE_ATTR_SLOT] = "STORE_ATTR_SLOT", - [STORE_ATTR_WITH_HINT] = "STORE_ATTR_WITH_HINT", [POP_EXCEPT] = "POP_EXCEPT", [STORE_NAME] = "STORE_NAME", [DELETE_NAME] = "DELETE_NAME", @@ -343,9 +352,9 @@ static const char *const _PyOpcode_OpName[264] = { [IMPORT_NAME] = "IMPORT_NAME", [IMPORT_FROM] = "IMPORT_FROM", [JUMP_FORWARD] = "JUMP_FORWARD", - [JUMP_IF_FALSE_OR_POP] = "JUMP_IF_FALSE_OR_POP", - [JUMP_IF_TRUE_OR_POP] = "JUMP_IF_TRUE_OR_POP", - [STORE_FAST__LOAD_FAST] = "STORE_FAST__LOAD_FAST", + [STORE_ATTR_INSTANCE_VALUE] = "STORE_ATTR_INSTANCE_VALUE", + [STORE_ATTR_SLOT] = "STORE_ATTR_SLOT", + [STORE_ATTR_WITH_HINT] = "STORE_ATTR_WITH_HINT", [POP_JUMP_IF_FALSE] = "POP_JUMP_IF_FALSE", [POP_JUMP_IF_TRUE] = "POP_JUMP_IF_TRUE", [LOAD_GLOBAL] = "LOAD_GLOBAL", @@ -373,7 +382,7 @@ static const char *const _PyOpcode_OpName[264] = { [STORE_DEREF] = "STORE_DEREF", [DELETE_DEREF] = "DELETE_DEREF", [JUMP_BACKWARD] = "JUMP_BACKWARD", - [COMPARE_AND_BRANCH] = "COMPARE_AND_BRANCH", + [LOAD_SUPER_ATTR] = "LOAD_SUPER_ATTR", [CALL_FUNCTION_EX] = "CALL_FUNCTION_EX", [LOAD_FAST_AND_CLEAR] = "LOAD_FAST_AND_CLEAR", [EXTENDED_ARG] = "EXTENDED_ARG", @@ -385,21 +394,21 @@ static const char *const _PyOpcode_OpName[264] = { [YIELD_VALUE] = "YIELD_VALUE", [RESUME] = "RESUME", [MATCH_CLASS] = "MATCH_CLASS", + [STORE_FAST__LOAD_FAST] = "STORE_FAST__LOAD_FAST", [STORE_FAST__STORE_FAST] = "STORE_FAST__STORE_FAST", - [STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT", [FORMAT_VALUE] = "FORMAT_VALUE", [BUILD_CONST_KEY_MAP] = "BUILD_CONST_KEY_MAP", [BUILD_STRING] = "BUILD_STRING", + [STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT", [STORE_SUBSCR_LIST_INT] = "STORE_SUBSCR_LIST_INT", [UNPACK_SEQUENCE_LIST] = "UNPACK_SEQUENCE_LIST", [UNPACK_SEQUENCE_TUPLE] = "UNPACK_SEQUENCE_TUPLE", - [UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE", [LIST_EXTEND] = "LIST_EXTEND", [SET_UPDATE] = "SET_UPDATE", [DICT_MERGE] = "DICT_MERGE", [DICT_UPDATE] = "DICT_UPDATE", + [UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE", [SEND_GEN] = "SEND_GEN", - [167] = "<167>", [168] = "<168>", [169] = "<169>", [170] = "<170>", @@ -470,24 +479,24 @@ static const char *const _PyOpcode_OpName[264] = { [235] = "<235>", [236] = "<236>", [237] = "<237>", - [238] = "<238>", - [239] = "<239>", - [240] = "<240>", - [241] = "<241>", - [242] = "<242>", - [243] = "<243>", - [244] = "<244>", - [245] = "<245>", - [246] = "<246>", - [247] = "<247>", - [248] = "<248>", - [249] = "<249>", - [250] = "<250>", - [251] = "<251>", - [252] = "<252>", - [253] = "<253>", - [254] = "<254>", - [DO_TRACING] = "DO_TRACING", + [INSTRUMENTED_POP_JUMP_IF_NONE] = "INSTRUMENTED_POP_JUMP_IF_NONE", + [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = "INSTRUMENTED_POP_JUMP_IF_NOT_NONE", + [INSTRUMENTED_RESUME] = "INSTRUMENTED_RESUME", + [INSTRUMENTED_CALL] = "INSTRUMENTED_CALL", + [INSTRUMENTED_RETURN_VALUE] = "INSTRUMENTED_RETURN_VALUE", + [INSTRUMENTED_YIELD_VALUE] = "INSTRUMENTED_YIELD_VALUE", + [INSTRUMENTED_CALL_FUNCTION_EX] = "INSTRUMENTED_CALL_FUNCTION_EX", + [INSTRUMENTED_JUMP_FORWARD] = "INSTRUMENTED_JUMP_FORWARD", + [INSTRUMENTED_JUMP_BACKWARD] = "INSTRUMENTED_JUMP_BACKWARD", + [INSTRUMENTED_RETURN_CONST] = "INSTRUMENTED_RETURN_CONST", + [INSTRUMENTED_FOR_ITER] = "INSTRUMENTED_FOR_ITER", + [INSTRUMENTED_POP_JUMP_IF_FALSE] = "INSTRUMENTED_POP_JUMP_IF_FALSE", + [INSTRUMENTED_POP_JUMP_IF_TRUE] = "INSTRUMENTED_POP_JUMP_IF_TRUE", + [INSTRUMENTED_END_FOR] = "INSTRUMENTED_END_FOR", + [INSTRUMENTED_END_SEND] = "INSTRUMENTED_END_SEND", + [INSTRUMENTED_INSTRUCTION] = "INSTRUMENTED_INSTRUCTION", + [INSTRUMENTED_LINE] = "INSTRUMENTED_LINE", + [255] = "<255>", [SETUP_FINALLY] = "SETUP_FINALLY", [SETUP_CLEANUP] = "SETUP_CLEANUP", [SETUP_WITH] = "SETUP_WITH", @@ -495,12 +504,14 @@ static const char *const _PyOpcode_OpName[264] = { [JUMP] = "JUMP", [JUMP_NO_INTERRUPT] = "JUMP_NO_INTERRUPT", [LOAD_METHOD] = "LOAD_METHOD", + [LOAD_SUPER_METHOD] = "LOAD_SUPER_METHOD", + [LOAD_ZERO_SUPER_METHOD] = "LOAD_ZERO_SUPER_METHOD", + [LOAD_ZERO_SUPER_ATTR] = "LOAD_ZERO_SUPER_ATTR", [STORE_FAST_MAYBE_NULL] = "STORE_FAST_MAYBE_NULL", }; #endif #define EXTRA_CASES \ - case 167: \ case 168: \ case 169: \ case 170: \ @@ -567,23 +578,7 @@ static const char *const _PyOpcode_OpName[264] = { case 235: \ case 236: \ case 237: \ - case 238: \ - case 239: \ - case 240: \ - case 241: \ - case 242: \ - case 243: \ - case 244: \ - case 245: \ - case 246: \ - case 247: \ - case 248: \ - case 249: \ - case 250: \ - case 251: \ - case 252: \ - case 253: \ - case 254: \ + case 255: \ ; #ifdef __cplusplus diff --git a/Include/internal/pycore_opcode_utils.h b/Include/internal/pycore_opcode_utils.h new file mode 100644 index 00000000000000..1d5ff988290bd4 --- /dev/null +++ b/Include/internal/pycore_opcode_utils.h @@ -0,0 +1,92 @@ +#ifndef Py_INTERNAL_OPCODE_UTILS_H +#define Py_INTERNAL_OPCODE_UTILS_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +#include "pycore_opcode.h" // _PyOpcode_Jump + + +#define MAX_REAL_OPCODE 254 + +#define IS_WITHIN_OPCODE_RANGE(opcode) \ + (((opcode) >= 0 && (opcode) <= MAX_REAL_OPCODE) || \ + IS_PSEUDO_OPCODE(opcode)) + +#define IS_JUMP_OPCODE(opcode) \ + is_bit_set_in_table(_PyOpcode_Jump, opcode) + +#define IS_BLOCK_PUSH_OPCODE(opcode) \ + ((opcode) == SETUP_FINALLY || \ + (opcode) == SETUP_WITH || \ + (opcode) == SETUP_CLEANUP) + +#define HAS_TARGET(opcode) \ + (IS_JUMP_OPCODE(opcode) || IS_BLOCK_PUSH_OPCODE(opcode)) + +/* opcodes that must be last in the basicblock */ +#define IS_TERMINATOR_OPCODE(opcode) \ + (IS_JUMP_OPCODE(opcode) || IS_SCOPE_EXIT_OPCODE(opcode)) + +/* opcodes which are not emitted in codegen stage, only by the assembler */ +#define IS_ASSEMBLER_OPCODE(opcode) \ + ((opcode) == JUMP_FORWARD || \ + (opcode) == JUMP_BACKWARD || \ + (opcode) == JUMP_BACKWARD_NO_INTERRUPT) + +#define IS_BACKWARDS_JUMP_OPCODE(opcode) \ + ((opcode) == JUMP_BACKWARD || \ + (opcode) == JUMP_BACKWARD_NO_INTERRUPT) + +#define IS_UNCONDITIONAL_JUMP_OPCODE(opcode) \ + ((opcode) == JUMP || \ + (opcode) == JUMP_NO_INTERRUPT || \ + (opcode) == JUMP_FORWARD || \ + (opcode) == JUMP_BACKWARD || \ + (opcode) == JUMP_BACKWARD_NO_INTERRUPT) + +#define IS_SCOPE_EXIT_OPCODE(opcode) \ + ((opcode) == RETURN_VALUE || \ + (opcode) == RETURN_CONST || \ + (opcode) == RAISE_VARARGS || \ + (opcode) == RERAISE) + +#define IS_SUPERINSTRUCTION_OPCODE(opcode) \ + ((opcode) == LOAD_FAST__LOAD_FAST || \ + (opcode) == LOAD_FAST__LOAD_CONST || \ + (opcode) == LOAD_CONST__LOAD_FAST || \ + (opcode) == STORE_FAST__LOAD_FAST || \ + (opcode) == STORE_FAST__STORE_FAST) + + +#define LOG_BITS_PER_INT 5 +#define MASK_LOW_LOG_BITS 31 + +static inline int +is_bit_set_in_table(const uint32_t *table, int bitindex) { + /* Is the relevant bit set in the relevant word? */ + /* 512 bits fit into 9 32-bits words. + * Word is indexed by (bitindex>>ln(size of int in bits)). + * Bit within word is the low bits of bitindex. + */ + if (bitindex >= 0 && bitindex < 512) { + uint32_t word = table[bitindex >> LOG_BITS_PER_INT]; + return (word >> (bitindex & MASK_LOW_LOG_BITS)) & 1; + } + else { + return 0; + } +} + +#undef LOG_BITS_PER_INT +#undef MASK_LOW_LOG_BITS + + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_OPCODE_UTILS_H */ diff --git a/Include/internal/pycore_pyerrors.h b/Include/internal/pycore_pyerrors.h index 1bb4a9aa103898..4620a269644917 100644 --- a/Include/internal/pycore_pyerrors.h +++ b/Include/internal/pycore_pyerrors.h @@ -109,6 +109,8 @@ extern PyObject* _Py_Offer_Suggestions(PyObject* exception); PyAPI_FUNC(Py_ssize_t) _Py_UTF8_Edit_Cost(PyObject *str_a, PyObject *str_b, Py_ssize_t max_cost); +void _PyErr_FormatNote(const char *format, ...); + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index a899e848bb8b3c..f96261a650dac7 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -64,6 +64,7 @@ extern void _PyAtExit_Fini(PyInterpreterState *interp); extern void _PyThread_FiniType(PyInterpreterState *interp); extern void _Py_Deepfreeze_Fini(void); extern void _PyArg_Fini(void); +extern void _Py_FinalizeAllocatedBlocks(_PyRuntimeState *); extern PyStatus _PyGILState_Init(PyInterpreterState *interp); extern PyStatus _PyGILState_SetTstate(PyThreadState *tstate); diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index 7046ec8d9adaaf..180ea676bc22eb 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -33,6 +33,13 @@ _Py_IsMainInterpreter(PyInterpreterState *interp) return (interp == _PyInterpreterState_Main()); } +static inline int +_Py_IsMainInterpreterFinalizing(PyInterpreterState *interp) +{ + return (_PyRuntimeState_GetFinalizing(interp->runtime) != NULL && + interp == &interp->runtime->_main_interpreter); +} + static inline const PyConfig * _Py_GetMainConfig(void) @@ -64,17 +71,14 @@ _Py_ThreadCanHandlePendingCalls(void) /* Variable and macro for in-line access to current thread and interpreter state */ -static inline PyThreadState* -_PyRuntimeState_GetThreadState(_PyRuntimeState *runtime) -{ - return (PyThreadState*)_Py_atomic_load_relaxed(&runtime->tstate_current); -} +#if defined(HAVE_THREAD_LOCAL) && !defined(Py_BUILD_CORE_MODULE) +extern _Py_thread_local PyThreadState *_Py_tss_tstate; +#endif +PyAPI_DATA(PyThreadState *) _PyThreadState_GetCurrent(void); /* Get the current Python thread state. - Efficient macro reading directly the 'tstate_current' atomic - variable. The macro is unsafe: it does not check for error and it can - return NULL. + This function is unsafe: it does not check for error and it can return NULL. The caller must hold the GIL. @@ -82,9 +86,20 @@ _PyRuntimeState_GetThreadState(_PyRuntimeState *runtime) static inline PyThreadState* _PyThreadState_GET(void) { - return _PyRuntimeState_GetThreadState(&_PyRuntime); +#if defined(HAVE_THREAD_LOCAL) && !defined(Py_BUILD_CORE_MODULE) + return _Py_tss_tstate; +#else + return _PyThreadState_GetCurrent(); +#endif +} + +static inline PyThreadState* +_PyRuntimeState_GetThreadState(_PyRuntimeState *Py_UNUSED(runtime)) +{ + return _PyThreadState_GET(); } + static inline void _Py_EnsureFuncTstateNotNULL(const char *func, PyThreadState *tstate) { @@ -127,15 +142,10 @@ PyAPI_FUNC(void) _PyThreadState_Init( PyThreadState *tstate); PyAPI_FUNC(void) _PyThreadState_DeleteExcept(PyThreadState *tstate); - -static inline void -_PyThreadState_UpdateTracingState(PyThreadState *tstate) -{ - bool use_tracing = - (tstate->tracing == 0) && - (tstate->c_tracefunc != NULL || tstate->c_profilefunc != NULL); - tstate->cframe->use_tracing = (use_tracing ? 255 : 0); -} +extern void _PyThreadState_InitDetached(PyThreadState *, PyInterpreterState *); +extern void _PyThreadState_ClearDetached(PyThreadState *); +extern void _PyThreadState_BindDetached(PyThreadState *); +extern void _PyThreadState_UnbindDetached(PyThreadState *); /* Other */ diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index 520109ca440444..d1b165d0ab9c38 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -8,6 +8,7 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +#include "pycore_atexit.h" // struct atexit_runtime_state #include "pycore_atomic.h" /* _Py_atomic_address */ #include "pycore_ceval_state.h" // struct _ceval_runtime_state #include "pycore_floatobject.h" // struct _Py_float_runtime_state @@ -15,14 +16,15 @@ extern "C" { #include "pycore_global_objects.h" // struct _Py_global_objects #include "pycore_import.h" // struct _import_runtime_state #include "pycore_interp.h" // PyInterpreterState +#include "pycore_object_state.h" // struct _py_object_runtime_state #include "pycore_parser.h" // struct _parser_runtime_state #include "pycore_pymem.h" // struct _pymem_allocators #include "pycore_pyhash.h" // struct pyhash_runtime_state #include "pycore_pythread.h" // struct _pythread_runtime_state -#include "pycore_obmalloc.h" // struct obmalloc_state #include "pycore_signal.h" // struct _signals_runtime_state #include "pycore_time.h" // struct _time_runtime_state #include "pycore_tracemalloc.h" // struct _tracemalloc_runtime_state +#include "pycore_typeobject.h" // struct types_runtime_state #include "pycore_unicodeobject.h" // struct _Py_unicode_runtime_ids struct _getargs_runtime_state { @@ -85,7 +87,7 @@ typedef struct pyruntimestate { _Py_atomic_address _finalizing; struct _pymem_allocators allocators; - struct _obmalloc_state obmalloc; + struct _obmalloc_global_state obmalloc; struct pyhash_runtime_state pyhash_state; struct _time_runtime_state time; struct _pythread_runtime_state threads; @@ -117,9 +119,6 @@ typedef struct pyruntimestate { unsigned long main_thread; - /* Assuming the current thread holds the GIL, this is the - PyThreadState for the current thread. */ - _Py_atomic_address tstate_current; /* Used for the thread state bound to the current thread. */ Py_tss_t autoTSSkey; @@ -130,9 +129,7 @@ typedef struct pyruntimestate { struct _parser_runtime_state parser; -#define NEXITFUNCS 32 - void (*exitfuncs[NEXITFUNCS])(void); - int nexitfuncs; + struct _atexit_runtime_state atexit; struct _import_runtime_state imports; struct _ceval_runtime_state ceval; @@ -150,18 +147,12 @@ typedef struct pyruntimestate { void *open_code_userdata; _Py_AuditHookEntry *audit_hook_head; + struct _py_object_runtime_state object_state; struct _Py_float_runtime_state float_state; struct _Py_unicode_runtime_state unicode_state; - - struct { - /* Used to set PyTypeObject.tp_version_tag */ - // bpo-42745: next_version_tag remains shared by all interpreters - // because of static types. - unsigned int next_version_tag; - } types; + struct _types_runtime_state types; /* All the objects that are shared by the runtime's interpreters. */ - struct _Py_cached_objects cached_objects; struct _Py_static_objects static_objects; /* The following fields are here to avoid allocation during init. diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h index bdecac944dfd3a..a48461c0742872 100644 --- a/Include/internal/pycore_runtime_init.h +++ b/Include/internal/pycore_runtime_init.h @@ -8,6 +8,7 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +#include "pycore_long.h" #include "pycore_object.h" #include "pycore_parser.h" #include "pycore_pymem_init.h" @@ -28,7 +29,7 @@ extern PyTypeObject _PyExc_MemoryError; _pymem_allocators_debug_INIT, \ _pymem_allocators_obj_arena_INIT, \ }, \ - .obmalloc = _obmalloc_state_INIT(runtime.obmalloc), \ + .obmalloc = _obmalloc_global_state_INIT, \ .pyhash_state = pyhash_state_INIT, \ .signals = _signals_RUNTIME_INIT, \ .interpreters = { \ @@ -40,6 +41,11 @@ extern PyTypeObject _PyExc_MemoryError; in accordance with the specification. */ \ .autoTSSkey = Py_tss_NEEDS_INIT, \ .parser = _parser_runtime_state_INIT, \ + .imports = { \ + .extensions = { \ + .main_tstate = _PyThreadState_INIT, \ + }, \ + }, \ .ceval = { \ .perf = _PyEval_RUNTIME_PERF_INIT, \ }, \ @@ -70,13 +76,13 @@ extern PyTypeObject _PyExc_MemoryError; .latin1 = _Py_str_latin1_INIT, \ }, \ .tuple_empty = { \ - .ob_base = _PyVarObject_IMMORTAL_INIT(&PyTuple_Type, 0) \ + .ob_base = _PyVarObject_HEAD_INIT(&PyTuple_Type, 0) \ }, \ .hamt_bitmap_node_empty = { \ - .ob_base = _PyVarObject_IMMORTAL_INIT(&_PyHamt_BitmapNode_Type, 0) \ + .ob_base = _PyVarObject_HEAD_INIT(&_PyHamt_BitmapNode_Type, 0) \ }, \ .context_token_missing = { \ - .ob_base = _PyObject_IMMORTAL_INIT(&_PyContextTokenMissing_Type), \ + .ob_base = _PyObject_HEAD_INIT(&_PyContextTokenMissing_Type) \ }, \ }, \ }, \ @@ -87,6 +93,7 @@ extern PyTypeObject _PyExc_MemoryError; { \ .id_refcount = -1, \ .imports = IMPORTS_INIT, \ + .obmalloc = _obmalloc_state_INIT(INTERP.obmalloc), \ .ceval = { \ .recursion_limit = Py_DEFAULT_RECURSION_LIMIT, \ }, \ @@ -106,15 +113,18 @@ extern PyTypeObject _PyExc_MemoryError; .func_state = { \ .next_version = 1, \ }, \ + .types = { \ + .next_version_tag = _Py_TYPE_BASE_VERSION_TAG, \ + }, \ .static_objects = { \ .singletons = { \ ._not_used = 1, \ .hamt_empty = { \ - .ob_base = _PyObject_IMMORTAL_INIT(&_PyHamt_Type), \ + .ob_base = _PyObject_HEAD_INIT(&_PyHamt_Type) \ .h_root = (PyHamtNode*)&_Py_SINGLETON(hamt_bitmap_node_empty), \ }, \ .last_resort_memory_error = { \ - _PyObject_IMMORTAL_INIT(&_PyExc_MemoryError), \ + _PyObject_HEAD_INIT(&_PyExc_MemoryError) \ }, \ }, \ }, \ @@ -130,18 +140,9 @@ extern PyTypeObject _PyExc_MemoryError; // global objects -#define _PyLong_DIGIT_INIT(val) \ - { \ - .ob_base = _PyObject_IMMORTAL_INIT(&PyLong_Type), \ - .long_value = { \ - ((val) == 0 ? 0 : ((val) > 0 ? 1 : -1)), \ - { ((val) >= 0 ? (val) : -(val)) }, \ - } \ - } - #define _PyBytes_SIMPLE_INIT(CH, LEN) \ { \ - _PyVarObject_IMMORTAL_INIT(&PyBytes_Type, (LEN)), \ + _PyVarObject_HEAD_INIT(&PyBytes_Type, (LEN)) \ .ob_shash = -1, \ .ob_sval = { (CH) }, \ } @@ -152,7 +153,7 @@ extern PyTypeObject _PyExc_MemoryError; #define _PyUnicode_ASCII_BASE_INIT(LITERAL, ASCII) \ { \ - .ob_base = _PyObject_IMMORTAL_INIT(&PyUnicode_Type), \ + .ob_base = _PyObject_HEAD_INIT(&PyUnicode_Type) \ .length = sizeof(LITERAL) - 1, \ .hash = -1, \ .state = { \ diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h index b240be57369d9d..16f2147aa8e93b 100644 --- a/Include/internal/pycore_runtime_init_generated.h +++ b/Include/internal/pycore_runtime_init_generated.h @@ -876,6 +876,7 @@ extern "C" { INIT_ID(dst_dir_fd), \ INIT_ID(duration), \ INIT_ID(e), \ + INIT_ID(eager_start), \ INIT_ID(effective_ids), \ INIT_ID(element_factory), \ INIT_ID(encode), \ @@ -884,6 +885,7 @@ extern "C" { INIT_ID(end_lineno), \ INIT_ID(end_offset), \ INIT_ID(endpos), \ + INIT_ID(entrypoint), \ INIT_ID(env), \ INIT_ID(errors), \ INIT_ID(event), \ @@ -965,6 +967,7 @@ extern "C" { INIT_ID(instructions), \ INIT_ID(intern), \ INIT_ID(intersection), \ + INIT_ID(is_running), \ INIT_ID(isatty), \ INIT_ID(isinstance), \ INIT_ID(isoformat), \ @@ -987,6 +990,7 @@ extern "C" { INIT_ID(kw2), \ INIT_ID(lambda), \ INIT_ID(last), \ + INIT_ID(last_exc), \ INIT_ID(last_node), \ INIT_ID(last_traceback), \ INIT_ID(last_type), \ @@ -1019,6 +1023,7 @@ extern "C" { INIT_ID(memlimit), \ INIT_ID(message), \ INIT_ID(metaclass), \ + INIT_ID(metadata), \ INIT_ID(method), \ INIT_ID(mod), \ INIT_ID(mode), \ diff --git a/Include/internal/pycore_structseq.h b/Include/internal/pycore_structseq.h index d10a921c55ff8b..bd1e85c6883f01 100644 --- a/Include/internal/pycore_structseq.h +++ b/Include/internal/pycore_structseq.h @@ -15,7 +15,7 @@ PyAPI_FUNC(PyTypeObject *) _PyStructSequence_NewType( PyStructSequence_Desc *desc, unsigned long tp_flags); -PyAPI_FUNC(int) _PyStructSequence_InitBuiltinWithFlags( +extern int _PyStructSequence_InitBuiltinWithFlags( PyTypeObject *type, PyStructSequence_Desc *desc, unsigned long tp_flags); @@ -27,7 +27,7 @@ _PyStructSequence_InitBuiltin(PyTypeObject *type, return _PyStructSequence_InitBuiltinWithFlags(type, desc, 0); } -extern void _PyStructSequence_FiniType(PyTypeObject *type); +extern void _PyStructSequence_FiniBuiltin(PyTypeObject *type); #ifdef __cplusplus } diff --git a/Include/internal/pycore_token.h b/Include/internal/pycore_token.h index 95459ab9f7d004..b9df8766736adf 100644 --- a/Include/internal/pycore_token.h +++ b/Include/internal/pycore_token.h @@ -67,14 +67,18 @@ extern "C" { #define RARROW 51 #define ELLIPSIS 52 #define COLONEQUAL 53 -#define OP 54 -#define AWAIT 55 -#define ASYNC 56 -#define TYPE_IGNORE 57 -#define TYPE_COMMENT 58 -#define SOFT_KEYWORD 59 -#define ERRORTOKEN 60 -#define N_TOKENS 64 +#define EXCLAMATION 54 +#define OP 55 +#define AWAIT 56 +#define ASYNC 57 +#define TYPE_IGNORE 58 +#define TYPE_COMMENT 59 +#define SOFT_KEYWORD 60 +#define FSTRING_START 61 +#define FSTRING_MIDDLE 62 +#define FSTRING_END 63 +#define ERRORTOKEN 64 +#define N_TOKENS 68 #define NT_OFFSET 256 /* Special definitions for cooperation with parser */ @@ -86,6 +90,8 @@ extern "C" { (x) == NEWLINE || \ (x) == INDENT || \ (x) == DEDENT) +#define ISSTRINGLIT(x) ((x) == STRING || \ + (x) == FSTRING_MIDDLE) // Symbols exported for test_peg_generator diff --git a/Include/internal/pycore_tuple.h b/Include/internal/pycore_tuple.h index edc70843b57531..335edad89792c3 100644 --- a/Include/internal/pycore_tuple.h +++ b/Include/internal/pycore_tuple.h @@ -14,7 +14,6 @@ extern "C" { /* runtime lifecycle */ extern PyStatus _PyTuple_InitGlobalObjects(PyInterpreterState *); -extern PyStatus _PyTuple_InitTypes(PyInterpreterState *); extern void _PyTuple_Fini(PyInterpreterState *); diff --git a/Include/internal/pycore_typeobject.h b/Include/internal/pycore_typeobject.h index cc5ce2875101ea..76253fd5fd864c 100644 --- a/Include/internal/pycore_typeobject.h +++ b/Include/internal/pycore_typeobject.h @@ -11,22 +11,17 @@ extern "C" { #endif -/* runtime lifecycle */ +/* state */ -extern PyStatus _PyTypes_InitTypes(PyInterpreterState *); -extern void _PyTypes_FiniTypes(PyInterpreterState *); -extern void _PyTypes_Fini(PyInterpreterState *); - - -/* other API */ - -/* Length of array of slotdef pointers used to store slots with the - same __name__. There should be at most MAX_EQUIV-1 slotdef entries with - the same __name__, for any __name__. Since that's a static property, it is - appropriate to declare fixed-size arrays for this. */ -#define MAX_EQUIV 10 +#define _Py_TYPE_BASE_VERSION_TAG (2<<16) +#define _Py_MAX_GLOBAL_TYPE_VERSION_TAG (_Py_TYPE_BASE_VERSION_TAG - 1) -typedef struct wrapperbase pytype_slotdef; +struct _types_runtime_state { + /* Used to set PyTypeObject.tp_version_tag for core static types. */ + // bpo-42745: next_version_tag remains shared by all interpreters + // because of static types. + unsigned int next_version_tag; +}; // Type attribute lookup cache: speed up attribute and method lookups, @@ -57,6 +52,36 @@ typedef struct { PyObject *tp_weaklist; } static_builtin_state; +struct types_state { + /* Used to set PyTypeObject.tp_version_tag. + It starts at _Py_MAX_GLOBAL_TYPE_VERSION_TAG + 1, + where all those lower numbers are used for core static types. */ + unsigned int next_version_tag; + + struct type_cache type_cache; + size_t num_builtins_initialized; + static_builtin_state builtins[_Py_MAX_STATIC_BUILTIN_TYPES]; +}; + + +/* runtime lifecycle */ + +extern PyStatus _PyTypes_InitTypes(PyInterpreterState *); +extern void _PyTypes_FiniTypes(PyInterpreterState *); +extern void _PyTypes_Fini(PyInterpreterState *); + + +/* other API */ + +/* Length of array of slotdef pointers used to store slots with the + same __name__. There should be at most MAX_EQUIV-1 slotdef entries with + the same __name__, for any __name__. Since that's a static property, it is + appropriate to declare fixed-size arrays for this. */ +#define MAX_EQUIV 10 + +typedef struct wrapperbase pytype_slotdef; + + static inline PyObject ** _PyStaticType_GET_WEAKREFS_LISTPTR(static_builtin_state *state) { @@ -78,12 +103,6 @@ _PyType_GetModuleState(PyTypeObject *type) return mod->md_state; } -struct types_state { - struct type_cache type_cache; - size_t num_builtins_initialized; - static_builtin_state builtins[_Py_MAX_STATIC_BUILTIN_TYPES]; -}; - extern int _PyStaticType_InitBuiltin(PyTypeObject *type); extern static_builtin_state * _PyStaticType_GetState(PyTypeObject *); @@ -98,6 +117,11 @@ _Py_type_getattro(PyTypeObject *type, PyObject *name); PyObject *_Py_slot_tp_getattro(PyObject *self, PyObject *name); PyObject *_Py_slot_tp_getattr_hook(PyObject *self, PyObject *name); +PyObject * +_PySuper_Lookup(PyTypeObject *su_type, PyObject *su_obj, PyObject *name, int *meth_found); +PyObject * +_PySuper_LookupDescr(PyTypeObject *su_type, PyObject *su_obj, PyObject *name); + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_unicodeobject.h b/Include/internal/pycore_unicodeobject.h index 19faceebf1d8ee..1bb0f366e78163 100644 --- a/Include/internal/pycore_unicodeobject.h +++ b/Include/internal/pycore_unicodeobject.h @@ -12,6 +12,7 @@ extern "C" { #include "pycore_ucnhash.h" // _PyUnicode_Name_CAPI void _PyUnicode_ExactDealloc(PyObject *op); +Py_ssize_t _PyUnicode_InternedSize(void); /* runtime lifecycle */ @@ -59,6 +60,7 @@ struct _Py_unicode_state { struct _Py_unicode_ids ids; }; +extern void _PyUnicode_InternInPlace(PyInterpreterState *interp, PyObject **p); extern void _PyUnicode_ClearInterned(PyInterpreterState *interp); diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h index fea9b6dbb1a75f..cd41b731537f0e 100644 --- a/Include/internal/pycore_unicodeobject_generated.h +++ b/Include/internal/pycore_unicodeobject_generated.h @@ -10,2000 +10,2015 @@ extern "C" { /* The following is auto-generated by Tools/build/generate_global_objects.py. */ static inline void -_PyUnicode_InitStaticStrings(void) { +_PyUnicode_InitStaticStrings(PyInterpreterState *interp) { PyObject *string; string = &_Py_ID(CANCELLED); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(FINISHED); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(False); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(JSONDecodeError); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(PENDING); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(Py_Repr); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(TextIOWrapper); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(True); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(WarningMessage); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_WindowsConsoleIO); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__IOBase_closed); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__abc_tpflags__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__abs__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__abstractmethods__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__add__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__aenter__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__aexit__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__aiter__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__all__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__and__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__anext__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__annotations__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__args__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__asyncio_running_event_loop__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__await__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__bases__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__bool__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__build_class__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__builtins__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__bytes__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__call__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__cantrace__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__class__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__class_getitem__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__classcell__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__complex__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__contains__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__copy__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__ctypes_from_outparam__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__del__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__delattr__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__delete__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__delitem__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__dict__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__dictoffset__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__dir__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__divmod__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__doc__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__enter__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__eq__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__exit__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__file__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__float__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__floordiv__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__format__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__fspath__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__ge__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__get__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__getattr__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__getattribute__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__getinitargs__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__getitem__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__getnewargs__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__getnewargs_ex__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__getstate__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__gt__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__hash__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__iadd__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__iand__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__ifloordiv__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__ilshift__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__imatmul__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__imod__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__import__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__imul__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__index__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__init__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__init_subclass__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__instancecheck__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__int__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__invert__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__ior__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__ipow__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__irshift__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__isabstractmethod__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__isub__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__iter__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__itruediv__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__ixor__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__le__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__len__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__length_hint__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__lltrace__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__loader__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__lshift__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__lt__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__main__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__matmul__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__missing__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__mod__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__module__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__mro_entries__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__mul__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__name__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__ne__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__neg__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__new__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__newobj__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__newobj_ex__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__next__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__notes__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__or__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__orig_class__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__origin__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__package__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__parameters__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__path__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__pos__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__pow__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__prepare__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__qualname__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__radd__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__rand__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__rdivmod__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__reduce__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__reduce_ex__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__repr__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__reversed__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__rfloordiv__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__rlshift__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__rmatmul__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__rmod__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__rmul__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__ror__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__round__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__rpow__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__rrshift__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__rshift__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__rsub__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__rtruediv__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__rxor__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__set__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__set_name__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__setattr__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__setitem__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__setstate__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__sizeof__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__slotnames__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__slots__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__spec__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__str__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__sub__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__subclasscheck__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__subclasshook__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__truediv__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__trunc__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__typing_is_unpacked_typevartuple__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__typing_prepare_subst__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__typing_subst__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__typing_unpacked_tuple_args__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__warningregistry__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__weaklistoffset__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__weakref__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__xor__); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_abc_impl); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_abstract_); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_active); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_annotation); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_anonymous_); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_argtypes_); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_as_parameter_); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_asyncio_future_blocking); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_blksize); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_bootstrap); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_check_retval_); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_dealloc_warn); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_feature_version); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_fields_); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_finalizing); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_find_and_load); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_fix_up_module); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_flags_); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_get_sourcefile); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_handle_fromlist); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_initializing); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_io); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_is_text_encoding); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_length_); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_limbo); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_lock_unlock_module); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_loop); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_needs_com_addref_); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_pack_); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_restype_); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_showwarnmsg); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_shutdown); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_slotnames); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_strptime_datetime); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_swappedbytes_); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_type_); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_uninitialized_submodules); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_warn_unawaited_coroutine); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(_xoptions); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(a); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(abs_tol); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(access); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(add); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(add_done_callback); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(after_in_child); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(after_in_parent); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(aggregate_class); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(append); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(argdefs); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(arguments); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(argv); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(as_integer_ratio); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(ast); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(attribute); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(authorizer_callback); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(autocommit); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(b); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(backtick); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(base); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(before); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(big); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(binary_form); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(block); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(buffer); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(buffer_callback); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(buffer_size); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(buffering); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(buffers); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(bufsize); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(builtins); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(byteorder); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(bytes); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(bytes_per_sep); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(c); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(c_call); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(c_exception); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(c_return); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(cached_statements); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(cadata); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(cafile); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(call); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(call_exception_handler); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(call_soon); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(cancel); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(capath); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(category); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(cb_type); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(certfile); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(check_same_thread); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(clear); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(close); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(closed); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(closefd); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(closure); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_argcount); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_cellvars); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_code); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_consts); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_exceptiontable); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_filename); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_firstlineno); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_flags); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_freevars); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_kwonlyargcount); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_linetable); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_name); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_names); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_nlocals); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_posonlyargcount); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_qualname); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_stacksize); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(co_varnames); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(code); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(command); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(comment_factory); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(consts); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(context); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(cookie); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(copy); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(copyreg); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(coro); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(count); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(cwd); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(d); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(data); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(database); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(decode); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(decoder); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(default); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(defaultaction); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(delete); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(depth); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(detect_types); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(deterministic); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(device); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(dict); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(dictcomp); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(difference_update); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(digest); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(digest_size); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(digestmod); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(dir_fd); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(discard); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(dispatch_table); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(displayhook); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(dklen); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(doc); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(dont_inherit); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(dst); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(dst_dir_fd); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(duration); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(e); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(eager_start); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(effective_ids); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(element_factory); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(encode); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(encoding); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(end); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(end_lineno); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(end_offset); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(endpos); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(entrypoint); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(env); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(errors); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(event); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(eventmask); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(exc_type); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(exc_value); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(excepthook); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(exception); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(exp); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(extend); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(facility); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(factory); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(false); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(family); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(fanout); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(fd); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(fd2); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(fdel); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(fget); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(file); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(file_actions); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(filename); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(fileno); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(filepath); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(fillvalue); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(filters); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(final); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(find_class); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(fix_imports); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(flags); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(flush); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(follow_symlinks); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(format); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(frequency); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(from_param); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(fromlist); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(fromtimestamp); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(fromutc); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(fset); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(func); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(future); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(generation); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(genexpr); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(get); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(get_debug); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(get_event_loop); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(get_loop); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(get_source); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(getattr); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(getstate); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(gid); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(globals); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(groupindex); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(groups); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(handle); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(hash_name); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(header); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(headers); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(hi); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(hook); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(id); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(ident); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(ignore); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(imag); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(importlib); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(in_fd); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(incoming); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(indexgroup); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(inf); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(inheritable); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(initial); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(initial_bytes); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(initial_value); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(initval); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(inner_size); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(input); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(insert_comments); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(insert_pis); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(instructions); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(intern); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(intersection); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(is_running); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(isatty); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(isinstance); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(isoformat); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(isolation_level); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(istext); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(item); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(items); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(iter); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(iterable); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(iterations); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(join); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(jump); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(keepends); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(key); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(keyfile); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(keys); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(kind); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(kw); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(kw1); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(kw2); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(lambda); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(last); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(last_exc); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(last_node); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(last_traceback); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(last_type); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(last_value); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(latin1); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(leaf_size); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(len); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(length); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(level); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(limit); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(line); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(line_buffering); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(lineno); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(listcomp); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(little); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(lo); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(locale); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(locals); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(logoption); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(loop); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(mapping); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(match); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(max_length); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(maxdigits); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(maxevents); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(maxmem); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(maxsplit); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(maxvalue); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(memLevel); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(memlimit); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(message); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(metaclass); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(metadata); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(method); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(mod); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(mode); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(module); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(module_globals); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(modules); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(mro); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(msg); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(mycmp); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(n); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(n_arg); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(n_fields); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(n_sequence_fields); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(n_unnamed_fields); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(name); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(name_from); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(namespace_separator); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(namespaces); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(narg); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(ndigits); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(new_limit); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(newline); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(newlines); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(next); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(node_depth); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(node_offset); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(ns); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(nstype); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(nt); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(null); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(number); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(obj); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(object); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(offset); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(offset_dst); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(offset_src); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(on_type_read); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(onceregistry); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(only_keys); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(oparg); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(opcode); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(open); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(opener); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(operation); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(optimize); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(options); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(order); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(out_fd); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(outgoing); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(overlapped); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(owner); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(p); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(pages); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(parent); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(password); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(path); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(pattern); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(peek); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(persistent_id); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(persistent_load); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(person); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(pi_factory); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(pid); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(policy); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(pos); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(pos1); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(pos2); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(posix); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(print_file_and_line); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(priority); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(progress); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(progress_handler); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(proto); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(protocol); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(ps1); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(ps2); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(query); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(quotetabs); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(r); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(raw); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(read); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(read1); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(readable); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(readall); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(readinto); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(readinto1); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(readline); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(readonly); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(real); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(reducer_override); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(registry); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(rel_tol); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(reload); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(repl); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(replace); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(reserved); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(reset); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(resetids); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(return); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(reverse); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(reversed); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(s); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(salt); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(sched_priority); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(scheduler); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(seek); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(seekable); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(selectors); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(self); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(send); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(sep); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(sequence); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(server_hostname); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(server_side); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(session); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(setcomp); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(setpgroup); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(setsid); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(setsigdef); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(setsigmask); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(setstate); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(shape); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(show_cmd); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(signed); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(size); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(sizehint); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(skip_file_prefixes); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(sleep); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(sock); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(sort); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(sound); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(source); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(source_traceback); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(src); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(src_dir_fd); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(stacklevel); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(start); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(statement); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(status); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(stderr); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(stdin); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(stdout); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(step); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(store_name); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(strategy); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(strftime); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(strict); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(strict_mode); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(string); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(sub_key); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(symmetric_difference_update); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(tabsize); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(tag); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(target); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(target_is_directory); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(task); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(tb_frame); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(tb_lasti); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(tb_lineno); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(tb_next); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(tell); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(template); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(term); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(text); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(threading); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(throw); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(timeout); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(times); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(timetuple); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(top); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(trace_callback); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(traceback); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(trailers); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(translate); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(true); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(truncate); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(twice); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(txt); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(type); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(tz); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(tzname); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(uid); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(unlink); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(unraisablehook); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(uri); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(usedforsecurity); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(value); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(values); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(version); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(volume); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(warnings); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(warnoptions); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(wbits); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(week); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(weekday); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(which); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(who); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(withdata); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(writable); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(write); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(write_through); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(x); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(year); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(zdict); assert(_PyUnicode_CheckConsistency(string, 1)); - PyUnicode_InternInPlace(&string); + _PyUnicode_InternInPlace(interp, &string); } /* End auto-generated code */ #ifdef __cplusplus diff --git a/Include/object.h b/Include/object.h index 844b9c4a51c3e4..66c3df0d7f780a 100644 --- a/Include/object.h +++ b/Include/object.h @@ -78,12 +78,76 @@ whose size is determined when the object is allocated. /* PyObject_HEAD defines the initial segment of every PyObject. */ #define PyObject_HEAD PyObject ob_base; -#define PyObject_HEAD_INIT(type) \ - { _PyObject_EXTRA_INIT \ - 1, (type) }, +/* +Immortalization: + +The following indicates the immortalization strategy depending on the amount +of available bits in the reference count field. All strategies are backwards +compatible but the specific reference count value or immortalization check +might change depending on the specializations for the underlying system. -#define PyVarObject_HEAD_INIT(type, size) \ - { PyObject_HEAD_INIT(type) (size) }, +Proper deallocation of immortal instances requires distinguishing between +statically allocated immortal instances vs those promoted by the runtime to be +immortal. The latter should be the only instances that require +cleanup during runtime finalization. +*/ + +#if SIZEOF_VOID_P > 4 +/* +In 64+ bit systems, an object will be marked as immortal by setting all of the +lower 32 bits of the reference count field, which is equal to: 0xFFFFFFFF + +Using the lower 32 bits makes the value backwards compatible by allowing +C-Extensions without the updated checks in Py_INCREF and Py_DECREF to safely +increase and decrease the objects reference count. The object would lose its +immortality, but the execution would still be correct. + +Reference count increases will use saturated arithmetic, taking advantage of +having all the lower 32 bits set, which will avoid the reference count to go +beyond the refcount limit. Immortality checks for reference count decreases will +be done by checking the bit sign flag in the lower 32 bits. +*/ +#define _Py_IMMORTAL_REFCNT UINT_MAX + +#else +/* +In 32 bit systems, an object will be marked as immortal by setting all of the +lower 30 bits of the reference count field, which is equal to: 0x3FFFFFFF + +Using the lower 30 bits makes the value backwards compatible by allowing +C-Extensions without the updated checks in Py_INCREF and Py_DECREF to safely +increase and decrease the objects reference count. The object would lose its +immortality, but the execution would still be correct. + +Reference count increases and decreases will first go through an immortality +check by comparing the reference count field to the immortality reference count. +*/ +#define _Py_IMMORTAL_REFCNT (UINT_MAX >> 2) +#endif + +// Make all internal uses of PyObject_HEAD_INIT immortal while preserving the +// C-API expectation that the refcnt will be set to 1. +#ifdef Py_BUILD_CORE +#define PyObject_HEAD_INIT(type) \ + { \ + _PyObject_EXTRA_INIT \ + { _Py_IMMORTAL_REFCNT }, \ + (type) \ + }, +#else +#define PyObject_HEAD_INIT(type) \ + { \ + _PyObject_EXTRA_INIT \ + { 1 }, \ + (type) \ + }, +#endif /* Py_BUILD_CORE */ + +#define PyVarObject_HEAD_INIT(type, size) \ + { \ + PyObject_HEAD_INIT(type) \ + (size) \ + }, /* PyObject_VAR_HEAD defines the initial segment of all variable-size * container objects. These end with a declaration of an array with 1 @@ -101,7 +165,12 @@ whose size is determined when the object is allocated. */ struct _object { _PyObject_HEAD_EXTRA - Py_ssize_t ob_refcnt; + union { + Py_ssize_t ob_refcnt; +#if SIZEOF_VOID_P > 4 + PY_UINT32_T ob_refcnt_split[2]; +#endif + }; PyTypeObject *ob_type; }; @@ -138,8 +207,13 @@ static inline PyTypeObject* Py_TYPE(PyObject *ob) { # define Py_TYPE(ob) Py_TYPE(_PyObject_CAST(ob)) #endif +PyAPI_DATA(PyTypeObject) PyLong_Type; +PyAPI_DATA(PyTypeObject) PyBool_Type; + // bpo-39573: The Py_SET_SIZE() function must be used to set an object size. static inline Py_ssize_t Py_SIZE(PyObject *ob) { + assert(ob->ob_type != &PyLong_Type); + assert(ob->ob_type != &PyBool_Type); PyVarObject *var_ob = _PyVarObject_CAST(ob); return var_ob->ob_size; } @@ -147,6 +221,15 @@ static inline Py_ssize_t Py_SIZE(PyObject *ob) { # define Py_SIZE(ob) Py_SIZE(_PyObject_CAST(ob)) #endif +static inline Py_ALWAYS_INLINE int _Py_IsImmortal(PyObject *op) +{ +#if SIZEOF_VOID_P > 4 + return _Py_CAST(PY_INT32_T, op->ob_refcnt) < 0; +#else + return op->ob_refcnt == _Py_IMMORTAL_REFCNT; +#endif +} +#define _Py_IsImmortal(op) _Py_IsImmortal(_PyObject_CAST(op)) static inline int Py_IS_TYPE(PyObject *ob, PyTypeObject *type) { return Py_TYPE(ob) == type; @@ -157,6 +240,13 @@ static inline int Py_IS_TYPE(PyObject *ob, PyTypeObject *type) { static inline void Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) { + // This immortal check is for code that is unaware of immortal objects. + // The runtime tracks these objects and we should avoid as much + // as possible having extensions inadvertently change the refcnt + // of an immortalized object. + if (_Py_IsImmortal(ob)) { + return; + } ob->ob_refcnt = refcnt; } #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000 @@ -171,8 +261,9 @@ static inline void Py_SET_TYPE(PyObject *ob, PyTypeObject *type) { # define Py_SET_TYPE(ob, type) Py_SET_TYPE(_PyObject_CAST(ob), type) #endif - static inline void Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size) { + assert(ob->ob_base.ob_type != &PyLong_Type); + assert(ob->ob_base.ob_type != &PyBool_Type); ob->ob_size = size; } #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000 @@ -494,14 +585,9 @@ you can count such references to the type object.) extern Py_ssize_t _Py_RefTotal; # define _Py_INC_REFTOTAL() _Py_RefTotal++ # define _Py_DEC_REFTOTAL() _Py_RefTotal-- -# elif defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) -extern void _Py_IncRefTotal(void); -extern void _Py_DecRefTotal(void); -# define _Py_INC_REFTOTAL() _Py_IncRefTotal() -# define _Py_DEC_REFTOTAL() _Py_DecRefTotal() # elif !defined(Py_LIMITED_API) || Py_LIMITED_API+0 > 0x030C0000 -extern void _Py_IncRefTotal_DO_NOT_USE_THIS(void); -extern void _Py_DecRefTotal_DO_NOT_USE_THIS(void); +PyAPI_FUNC(void) _Py_IncRefTotal_DO_NOT_USE_THIS(void); +PyAPI_FUNC(void) _Py_DecRefTotal_DO_NOT_USE_THIS(void); # define _Py_INC_REFTOTAL() _Py_IncRefTotal_DO_NOT_USE_THIS() # define _Py_DEC_REFTOTAL() _Py_DecRefTotal_DO_NOT_USE_THIS() # endif @@ -523,19 +609,33 @@ PyAPI_FUNC(void) Py_DecRef(PyObject *); PyAPI_FUNC(void) _Py_IncRef(PyObject *); PyAPI_FUNC(void) _Py_DecRef(PyObject *); -static inline void Py_INCREF(PyObject *op) +static inline Py_ALWAYS_INLINE void Py_INCREF(PyObject *op) { #if defined(Py_REF_DEBUG) && defined(Py_LIMITED_API) && Py_LIMITED_API+0 >= 0x030A0000 // Stable ABI for Python 3.10 built in debug mode. _Py_IncRef(op); #else - _Py_INCREF_STAT_INC(); // Non-limited C API and limited C API for Python 3.9 and older access // directly PyObject.ob_refcnt. +#if SIZEOF_VOID_P > 4 + // Portable saturated add, branching on the carry flag and set low bits + PY_UINT32_T cur_refcnt = op->ob_refcnt_split[PY_BIG_ENDIAN]; + PY_UINT32_T new_refcnt = cur_refcnt + 1; + if (new_refcnt == 0) { + return; + } + op->ob_refcnt_split[PY_BIG_ENDIAN] = new_refcnt; +#else + // Explicitly check immortality against the immortal value + if (_Py_IsImmortal(op)) { + return; + } + op->ob_refcnt++; +#endif + _Py_INCREF_STAT_INC(); #ifdef Py_REF_DEBUG _Py_INC_REFTOTAL(); -#endif // Py_REF_DEBUG - op->ob_refcnt++; +#endif #endif } #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000 @@ -552,6 +652,9 @@ static inline void Py_DECREF(PyObject *op) { #elif defined(Py_REF_DEBUG) static inline void Py_DECREF(const char *filename, int lineno, PyObject *op) { + if (_Py_IsImmortal(op)) { + return; + } _Py_DECREF_STAT_INC(); _Py_DEC_REFTOTAL(); if (--op->ob_refcnt != 0) { @@ -566,11 +669,14 @@ static inline void Py_DECREF(const char *filename, int lineno, PyObject *op) #define Py_DECREF(op) Py_DECREF(__FILE__, __LINE__, _PyObject_CAST(op)) #else -static inline void Py_DECREF(PyObject *op) +static inline Py_ALWAYS_INLINE void Py_DECREF(PyObject *op) { - _Py_DECREF_STAT_INC(); // Non-limited C API and limited C API for Python 3.9 and older access // directly PyObject.ob_refcnt. + if (_Py_IsImmortal(op)) { + return; + } + _Py_DECREF_STAT_INC(); if (--op->ob_refcnt == 0) { _Py_Dealloc(op); } @@ -720,7 +826,7 @@ PyAPI_FUNC(int) Py_IsNone(PyObject *x); #define Py_IsNone(x) Py_Is((x), Py_None) /* Macro for returning Py_None from a function */ -#define Py_RETURN_NONE return Py_NewRef(Py_None) +#define Py_RETURN_NONE return Py_None /* Py_NotImplemented is a singleton used to signal that an operation is @@ -730,7 +836,7 @@ PyAPI_DATA(PyObject) _Py_NotImplementedStruct; /* Don't use this directly */ #define Py_NotImplemented (&_Py_NotImplementedStruct) /* Macro for returning Py_NotImplemented from a function */ -#define Py_RETURN_NOTIMPLEMENTED return Py_NewRef(Py_NotImplemented) +#define Py_RETURN_NOTIMPLEMENTED return Py_NotImplemented /* Rich comparison opcodes */ #define Py_LT 0 diff --git a/Include/opcode.h b/Include/opcode.h index fa694dfbbb0a3d..f6f4af8c793d8d 100644 --- a/Include/opcode.h +++ b/Include/opcode.h @@ -13,10 +13,12 @@ extern "C" { #define PUSH_NULL 2 #define INTERPRETER_EXIT 3 #define END_FOR 4 +#define END_SEND 5 #define NOP 9 #define UNARY_NEGATIVE 11 #define UNARY_NOT 12 #define UNARY_INVERT 15 +#define RESERVED 17 #define BINARY_SUBSCR 25 #define BINARY_SLICE 26 #define STORE_SLICE 27 @@ -66,8 +68,6 @@ extern "C" { #define IMPORT_NAME 108 #define IMPORT_FROM 109 #define JUMP_FORWARD 110 -#define JUMP_IF_FALSE_OR_POP 111 -#define JUMP_IF_TRUE_OR_POP 112 #define POP_JUMP_IF_FALSE 114 #define POP_JUMP_IF_TRUE 115 #define LOAD_GLOBAL 116 @@ -95,7 +95,7 @@ extern "C" { #define STORE_DEREF 138 #define DELETE_DEREF 139 #define JUMP_BACKWARD 140 -#define COMPARE_AND_BRANCH 141 +#define LOAD_SUPER_ATTR 141 #define CALL_FUNCTION_EX 142 #define LOAD_FAST_AND_CLEAR 143 #define EXTENDED_ARG 144 @@ -118,6 +118,24 @@ extern "C" { #define KW_NAMES 172 #define CALL_INTRINSIC_1 173 #define CALL_INTRINSIC_2 174 +#define MIN_INSTRUMENTED_OPCODE 238 +#define INSTRUMENTED_POP_JUMP_IF_NONE 238 +#define INSTRUMENTED_POP_JUMP_IF_NOT_NONE 239 +#define INSTRUMENTED_RESUME 240 +#define INSTRUMENTED_CALL 241 +#define INSTRUMENTED_RETURN_VALUE 242 +#define INSTRUMENTED_YIELD_VALUE 243 +#define INSTRUMENTED_CALL_FUNCTION_EX 244 +#define INSTRUMENTED_JUMP_FORWARD 245 +#define INSTRUMENTED_JUMP_BACKWARD 246 +#define INSTRUMENTED_RETURN_CONST 247 +#define INSTRUMENTED_FOR_ITER 248 +#define INSTRUMENTED_POP_JUMP_IF_FALSE 249 +#define INSTRUMENTED_POP_JUMP_IF_TRUE 250 +#define INSTRUMENTED_END_FOR 251 +#define INSTRUMENTED_END_SEND 252 +#define INSTRUMENTED_INSTRUCTION 253 +#define INSTRUMENTED_LINE 254 #define MIN_PSEUDO_OPCODE 256 #define SETUP_FINALLY 256 #define SETUP_CLEANUP 257 @@ -126,76 +144,82 @@ extern "C" { #define JUMP 260 #define JUMP_NO_INTERRUPT 261 #define LOAD_METHOD 262 -#define STORE_FAST_MAYBE_NULL 263 -#define MAX_PSEUDO_OPCODE 263 -#define BINARY_OP_ADD_FLOAT 5 -#define BINARY_OP_ADD_INT 6 -#define BINARY_OP_ADD_UNICODE 7 -#define BINARY_OP_INPLACE_ADD_UNICODE 8 -#define BINARY_OP_MULTIPLY_FLOAT 10 -#define BINARY_OP_MULTIPLY_INT 13 -#define BINARY_OP_SUBTRACT_FLOAT 14 -#define BINARY_OP_SUBTRACT_INT 16 -#define BINARY_SUBSCR_DICT 17 -#define BINARY_SUBSCR_GETITEM 18 -#define BINARY_SUBSCR_LIST_INT 19 -#define BINARY_SUBSCR_TUPLE_INT 20 -#define CALL_PY_EXACT_ARGS 21 -#define CALL_PY_WITH_DEFAULTS 22 -#define CALL_BOUND_METHOD_EXACT_ARGS 23 -#define CALL_BUILTIN_CLASS 24 -#define CALL_BUILTIN_FAST_WITH_KEYWORDS 28 -#define CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 29 -#define CALL_NO_KW_BUILTIN_FAST 34 -#define CALL_NO_KW_BUILTIN_O 38 -#define CALL_NO_KW_ISINSTANCE 39 -#define CALL_NO_KW_LEN 40 -#define CALL_NO_KW_LIST_APPEND 41 -#define CALL_NO_KW_METHOD_DESCRIPTOR_FAST 42 -#define CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS 43 -#define CALL_NO_KW_METHOD_DESCRIPTOR_O 44 -#define CALL_NO_KW_STR_1 45 -#define CALL_NO_KW_TUPLE_1 46 -#define CALL_NO_KW_TYPE_1 47 -#define COMPARE_AND_BRANCH_FLOAT 48 -#define COMPARE_AND_BRANCH_INT 56 -#define COMPARE_AND_BRANCH_STR 57 -#define FOR_ITER_LIST 58 -#define FOR_ITER_TUPLE 59 -#define FOR_ITER_RANGE 62 -#define FOR_ITER_GEN 63 -#define LOAD_ATTR_CLASS 64 -#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 65 -#define LOAD_ATTR_INSTANCE_VALUE 66 -#define LOAD_ATTR_MODULE 67 -#define LOAD_ATTR_PROPERTY 70 -#define LOAD_ATTR_SLOT 72 -#define LOAD_ATTR_WITH_HINT 73 -#define LOAD_ATTR_METHOD_LAZY_DICT 76 -#define LOAD_ATTR_METHOD_NO_DICT 77 -#define LOAD_ATTR_METHOD_WITH_VALUES 78 -#define LOAD_CONST__LOAD_FAST 79 -#define LOAD_FAST__LOAD_CONST 80 -#define LOAD_FAST__LOAD_FAST 81 -#define LOAD_GLOBAL_BUILTIN 82 -#define LOAD_GLOBAL_MODULE 84 -#define STORE_ATTR_INSTANCE_VALUE 86 -#define STORE_ATTR_SLOT 87 -#define STORE_ATTR_WITH_HINT 88 -#define STORE_FAST__LOAD_FAST 113 -#define STORE_FAST__STORE_FAST 153 -#define STORE_SUBSCR_DICT 154 -#define STORE_SUBSCR_LIST_INT 158 -#define UNPACK_SEQUENCE_LIST 159 -#define UNPACK_SEQUENCE_TUPLE 160 -#define UNPACK_SEQUENCE_TWO_TUPLE 161 -#define SEND_GEN 166 -#define DO_TRACING 255 +#define LOAD_SUPER_METHOD 263 +#define LOAD_ZERO_SUPER_METHOD 264 +#define LOAD_ZERO_SUPER_ATTR 265 +#define STORE_FAST_MAYBE_NULL 266 +#define MAX_PSEUDO_OPCODE 266 +#define BINARY_OP_ADD_FLOAT 6 +#define BINARY_OP_ADD_INT 7 +#define BINARY_OP_ADD_UNICODE 8 +#define BINARY_OP_INPLACE_ADD_UNICODE 10 +#define BINARY_OP_MULTIPLY_FLOAT 13 +#define BINARY_OP_MULTIPLY_INT 14 +#define BINARY_OP_SUBTRACT_FLOAT 16 +#define BINARY_OP_SUBTRACT_INT 18 +#define BINARY_SUBSCR_DICT 19 +#define BINARY_SUBSCR_GETITEM 20 +#define BINARY_SUBSCR_LIST_INT 21 +#define BINARY_SUBSCR_TUPLE_INT 22 +#define CALL_PY_EXACT_ARGS 23 +#define CALL_PY_WITH_DEFAULTS 24 +#define CALL_BOUND_METHOD_EXACT_ARGS 28 +#define CALL_BUILTIN_CLASS 29 +#define CALL_BUILTIN_FAST_WITH_KEYWORDS 34 +#define CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 38 +#define CALL_NO_KW_BUILTIN_FAST 39 +#define CALL_NO_KW_BUILTIN_O 40 +#define CALL_NO_KW_ISINSTANCE 41 +#define CALL_NO_KW_LEN 42 +#define CALL_NO_KW_LIST_APPEND 43 +#define CALL_NO_KW_METHOD_DESCRIPTOR_FAST 44 +#define CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS 45 +#define CALL_NO_KW_METHOD_DESCRIPTOR_O 46 +#define CALL_NO_KW_STR_1 47 +#define CALL_NO_KW_TUPLE_1 48 +#define CALL_NO_KW_TYPE_1 56 +#define COMPARE_OP_FLOAT 57 +#define COMPARE_OP_INT 58 +#define COMPARE_OP_STR 59 +#define FOR_ITER_LIST 62 +#define FOR_ITER_TUPLE 63 +#define FOR_ITER_RANGE 64 +#define FOR_ITER_GEN 65 +#define LOAD_SUPER_ATTR_METHOD 66 +#define LOAD_ATTR_CLASS 67 +#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 70 +#define LOAD_ATTR_INSTANCE_VALUE 72 +#define LOAD_ATTR_MODULE 73 +#define LOAD_ATTR_PROPERTY 76 +#define LOAD_ATTR_SLOT 77 +#define LOAD_ATTR_WITH_HINT 78 +#define LOAD_ATTR_METHOD_LAZY_DICT 79 +#define LOAD_ATTR_METHOD_NO_DICT 80 +#define LOAD_ATTR_METHOD_WITH_VALUES 81 +#define LOAD_CONST__LOAD_FAST 82 +#define LOAD_FAST__LOAD_CONST 84 +#define LOAD_FAST__LOAD_FAST 86 +#define LOAD_GLOBAL_BUILTIN 87 +#define LOAD_GLOBAL_MODULE 88 +#define STORE_ATTR_INSTANCE_VALUE 111 +#define STORE_ATTR_SLOT 112 +#define STORE_ATTR_WITH_HINT 113 +#define STORE_FAST__LOAD_FAST 153 +#define STORE_FAST__STORE_FAST 154 +#define STORE_SUBSCR_DICT 158 +#define STORE_SUBSCR_LIST_INT 159 +#define UNPACK_SEQUENCE_LIST 160 +#define UNPACK_SEQUENCE_TUPLE 161 +#define UNPACK_SEQUENCE_TWO_TUPLE 166 +#define SEND_GEN 167 #define HAS_ARG(op) ((((op) >= HAVE_ARGUMENT) && (!IS_PSEUDO_OPCODE(op)))\ || ((op) == JUMP) \ || ((op) == JUMP_NO_INTERRUPT) \ || ((op) == LOAD_METHOD) \ + || ((op) == LOAD_SUPER_METHOD) \ + || ((op) == LOAD_ZERO_SUPER_METHOD) \ + || ((op) == LOAD_ZERO_SUPER_ATTR) \ || ((op) == STORE_FAST_MAYBE_NULL) \ ) diff --git a/Include/patchlevel.h b/Include/patchlevel.h index 049cdfa30897ca..aaedd563a905e9 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -20,10 +20,10 @@ #define PY_MINOR_VERSION 12 #define PY_MICRO_VERSION 0 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA -#define PY_RELEASE_SERIAL 6 +#define PY_RELEASE_SERIAL 7 /* Version as a string */ -#define PY_VERSION "3.12.0a6+" +#define PY_VERSION "3.12.0a7+" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Include/pymacro.h b/Include/pymacro.h index e37cda44c5ebf1..342d2a7b844adf 100644 --- a/Include/pymacro.h +++ b/Include/pymacro.h @@ -3,20 +3,23 @@ // gh-91782: On FreeBSD 12, if the _POSIX_C_SOURCE and _XOPEN_SOURCE macros are // defined, <sys/cdefs.h> disables C11 support and <assert.h> does not define -// the static_assert() macro. Define the static_assert() macro in Python until -// <sys/cdefs.h> suports C11: +// the static_assert() macro. // https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=255290 -#if defined(__FreeBSD__) && !defined(static_assert) -# define static_assert _Static_assert -#endif - -// static_assert is defined in glibc from version 2.16. Before it requires -// compiler support (gcc >= 4.6) and is called _Static_assert. -// In C++ 11 static_assert is a keyword, redefining is undefined behaviour. -#if (defined(__GLIBC__) \ - && (__GLIBC__ < 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ <= 16)) \ - && !(defined(__cplusplus) && __cplusplus >= 201103L) \ - && !defined(static_assert)) +// +// macOS <= 10.10 doesn't define static_assert in assert.h at all despite +// having C11 compiler support. +// +// static_assert is defined in glibc from version 2.16. Compiler support for +// the C11 _Static_assert keyword is in gcc >= 4.6. +// +// MSVC makes static_assert a keyword in C11-17, contrary to the standards. +// +// In C++11 and C2x, static_assert is a keyword, redefining is undefined +// behaviour. So only define if building as C (if __STDC_VERSION__ is defined), +// not C++, and only for C11-17. +#if !defined(static_assert) && (defined(__GNUC__) || defined(__clang__)) \ + && defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L \ + && __STDC_VERSION__ <= 201710L # define static_assert _Static_assert #endif diff --git a/Include/pyport.h b/Include/pyport.h index eef0fe1bfd71d8..bd0ba6d0681b21 100644 --- a/Include/pyport.h +++ b/Include/pyport.h @@ -184,7 +184,6 @@ typedef Py_ssize_t Py_ssize_clean_t; # define Py_LOCAL_INLINE(type) static inline type #endif -// bpo-28126: Py_MEMCPY is kept for backwards compatibility, #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000 # define Py_MEMCPY memcpy #endif @@ -663,6 +662,27 @@ extern char * _getpty(int *, int, mode_t, int); # define WITH_THREAD #endif +#ifdef WITH_THREAD +# ifdef Py_BUILD_CORE +# ifdef HAVE_THREAD_LOCAL +# error "HAVE_THREAD_LOCAL is already defined" +# endif +# define HAVE_THREAD_LOCAL 1 +# ifdef thread_local +# define _Py_thread_local thread_local +# elif __STDC_VERSION__ >= 201112L && !defined(__STDC_NO_THREADS__) +# define _Py_thread_local _Thread_local +# elif defined(_MSC_VER) /* AKA NT_THREADS */ +# define _Py_thread_local __declspec(thread) +# elif defined(__GNUC__) /* includes clang */ +# define _Py_thread_local __thread +# else + // fall back to the PyThread_tss_*() API, or ignore. +# undef HAVE_THREAD_LOCAL +# endif +# endif +#endif + /* Check that ALT_SOABI is consistent with Py_TRACE_REFS: ./configure --with-trace-refs should must be used to define Py_TRACE_REFS */ #if defined(ALT_SOABI) && defined(Py_TRACE_REFS) diff --git a/Include/pystats.h b/Include/pystats.h index 25ed4bddc7240c..4b961bad2a43e4 100644 --- a/Include/pystats.h +++ b/Include/pystats.h @@ -72,8 +72,6 @@ typedef struct _object_stats { uint64_t type_cache_collisions; } ObjectStats; -# - typedef struct _stats { OpcodeStats opcode_stats[256]; CallStats call_stats; diff --git a/Include/pythonrun.h b/Include/pythonrun.h index 41d82e89f84876..154c7450cb934f 100644 --- a/Include/pythonrun.h +++ b/Include/pythonrun.h @@ -12,7 +12,10 @@ PyAPI_FUNC(PyObject *) Py_CompileString(const char *, const char *, int); PyAPI_FUNC(void) PyErr_Print(void); PyAPI_FUNC(void) PyErr_PrintEx(int); PyAPI_FUNC(void) PyErr_Display(PyObject *, PyObject *, PyObject *); + +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x030C0000 PyAPI_FUNC(void) PyErr_DisplayException(PyObject *); +#endif /* Stuff with no proper home (yet) */ diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h index 74474f5bb8f976..5839c747a29275 100644 --- a/Include/unicodeobject.h +++ b/Include/unicodeobject.h @@ -626,7 +626,7 @@ PyAPI_FUNC(PyObject*) PyUnicode_AsLatin1String( /* --- ASCII Codecs ------------------------------------------------------- - Only 7-bit ASCII data is excepted. All other codes generate errors. + Only 7-bit ASCII data is expected. All other codes generate errors. */ diff --git a/Lib/_strptime.py b/Lib/_strptime.py index b97dfcce1e8e4d..77ccdc9e1d789b 100644 --- a/Lib/_strptime.py +++ b/Lib/_strptime.py @@ -290,22 +290,6 @@ def _calc_julian_from_U_or_W(year, week_of_year, day_of_week, week_starts_Mon): return 1 + days_to_week + day_of_week -def _calc_julian_from_V(iso_year, iso_week, iso_weekday): - """Calculate the Julian day based on the ISO 8601 year, week, and weekday. - ISO weeks start on Mondays, with week 01 being the week containing 4 Jan. - ISO week days range from 1 (Monday) to 7 (Sunday). - """ - correction = datetime_date(iso_year, 1, 4).isoweekday() + 3 - ordinal = (iso_week * 7) + iso_weekday - correction - # ordinal may be negative or 0 now, which means the date is in the previous - # calendar year - if ordinal < 1: - ordinal += datetime_date(iso_year, 1, 1).toordinal() - iso_year -= 1 - ordinal -= datetime_date(iso_year, 1, 1).toordinal() - return iso_year, ordinal - - def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"): """Return a 2-tuple consisting of a time struct and an int containing the number of microseconds based on the input string and the @@ -483,7 +467,8 @@ def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"): else: tz = value break - # Deal with the cases where ambiguities arize + + # Deal with the cases where ambiguities arise # don't assume default values for ISO week/year if year is None and iso_year is not None: if iso_week is None or weekday is None: @@ -511,7 +496,6 @@ def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"): elif year is None: year = 1900 - # If we know the week of the year and what day of that week, we can figure # out the Julian day of the year. if julian is None and weekday is not None: @@ -520,7 +504,10 @@ def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"): julian = _calc_julian_from_U_or_W(year, week_of_year, weekday, week_starts_Mon) elif iso_year is not None and iso_week is not None: - year, julian = _calc_julian_from_V(iso_year, iso_week, weekday + 1) + datetime_result = datetime_date.fromisocalendar(iso_year, iso_week, weekday + 1) + year = datetime_result.year + month = datetime_result.month + day = datetime_result.day if julian is not None and julian <= 0: year -= 1 yday = 366 if calendar.isleap(year) else 365 diff --git a/Lib/abc.py b/Lib/abc.py index 42048ddb855381..f8a4e11ce9c3b1 100644 --- a/Lib/abc.py +++ b/Lib/abc.py @@ -18,7 +18,7 @@ class that has a metaclass derived from ABCMeta cannot be class C(metaclass=ABCMeta): @abstractmethod - def my_abstract_method(self, ...): + def my_abstract_method(self, arg1, arg2, argN): ... """ funcobj.__isabstractmethod__ = True diff --git a/Lib/ast.py b/Lib/ast.py index 2cbc80a9835aa5..d9733a79d3a78f 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -25,6 +25,7 @@ :license: Python License. """ import sys +import re from _ast import * from contextlib import contextmanager, nullcontext from enum import IntEnum, auto, _simple_enum @@ -305,28 +306,17 @@ def get_docstring(node, clean=True): return text -def _splitlines_no_ff(source): +_line_pattern = re.compile(r"(.*?(?:\r\n|\n|\r|$))") +def _splitlines_no_ff(source, maxlines=None): """Split a string into lines ignoring form feed and other chars. This mimics how the Python parser splits source code. """ - idx = 0 lines = [] - next_line = '' - while idx < len(source): - c = source[idx] - next_line += c - idx += 1 - # Keep \r\n together - if c == '\r' and idx < len(source) and source[idx] == '\n': - next_line += '\n' - idx += 1 - if c in '\r\n': - lines.append(next_line) - next_line = '' - - if next_line: - lines.append(next_line) + for lineno, match in enumerate(_line_pattern.finditer(source), 1): + if maxlines is not None and lineno > maxlines: + break + lines.append(match[0]) return lines @@ -360,7 +350,7 @@ def get_source_segment(source, node, *, padded=False): except AttributeError: return None - lines = _splitlines_no_ff(source) + lines = _splitlines_no_ff(source, maxlines=end_lineno+1) if end_lineno == lineno: return lines[lineno].encode()[col_offset:end_col_offset].decode() diff --git a/Lib/asyncio/base_tasks.py b/Lib/asyncio/base_tasks.py index 26298e638cbf0d..c907b683413732 100644 --- a/Lib/asyncio/base_tasks.py +++ b/Lib/asyncio/base_tasks.py @@ -15,11 +15,13 @@ def _task_repr_info(task): info.insert(1, 'name=%r' % task.get_name()) - coro = coroutines._format_coroutine(task._coro) - info.insert(2, f'coro=<{coro}>') - if task._fut_waiter is not None: - info.insert(3, f'wait_for={task._fut_waiter!r}') + info.insert(2, f'wait_for={task._fut_waiter!r}') + + if task._coro: + coro = coroutines._format_coroutine(task._coro) + info.insert(2, f'coro=<{coro}>') + return info diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py index de5076a96218e0..fa2422b7fba4a7 100644 --- a/Lib/asyncio/selector_events.py +++ b/Lib/asyncio/selector_events.py @@ -794,6 +794,8 @@ def __init__(self, loop, sock, protocol, extra=None, server=None): self._buffer = collections.deque() self._conn_lost = 0 # Set when call to connection_lost scheduled. self._closing = False # Set when close() called. + self._paused = False # Set when pause_reading() called + if self._server is not None: self._server._attach() loop._transports[self._sock_fd] = self @@ -839,6 +841,25 @@ def get_protocol(self): def is_closing(self): return self._closing + def is_reading(self): + return not self.is_closing() and not self._paused + + def pause_reading(self): + if not self.is_reading(): + return + self._paused = True + self._loop._remove_reader(self._sock_fd) + if self._loop.get_debug(): + logger.debug("%r pauses reading", self) + + def resume_reading(self): + if self._closing or not self._paused: + return + self._paused = False + self._add_reader(self._sock_fd, self._read_ready) + if self._loop.get_debug(): + logger.debug("%r resumes reading", self) + def close(self): if self._closing: return @@ -898,9 +919,8 @@ def get_write_buffer_size(self): return sum(map(len, self._buffer)) def _add_reader(self, fd, callback, *args): - if self._closing: + if not self.is_reading(): return - self._loop._add_reader(fd, callback, *args) @@ -915,7 +935,6 @@ def __init__(self, loop, sock, protocol, waiter=None, self._read_ready_cb = None super().__init__(loop, sock, protocol, extra, server) self._eof = False - self._paused = False self._empty_waiter = None if _HAS_SENDMSG: self._write_ready = self._write_sendmsg @@ -943,25 +962,6 @@ def set_protocol(self, protocol): super().set_protocol(protocol) - def is_reading(self): - return not self._paused and not self._closing - - def pause_reading(self): - if self._closing or self._paused: - return - self._paused = True - self._loop._remove_reader(self._sock_fd) - if self._loop.get_debug(): - logger.debug("%r pauses reading", self) - - def resume_reading(self): - if self._closing or not self._paused: - return - self._paused = False - self._add_reader(self._sock_fd, self._read_ready) - if self._loop.get_debug(): - logger.debug("%r resumes reading", self) - def _read_ready(self): self._read_ready_cb() @@ -1176,6 +1176,9 @@ def writelines(self, list_of_data): return self._buffer.extend([memoryview(data) for data in list_of_data]) self._write_ready() + # If the entire buffer couldn't be written, register a write handler + if self._buffer: + self._loop._add_writer(self._sock_fd, self._write_ready) def can_write_eof(self): return True diff --git a/Lib/asyncio/subprocess.py b/Lib/asyncio/subprocess.py index cd10231f710f11..50727ca300e63e 100644 --- a/Lib/asyncio/subprocess.py +++ b/Lib/asyncio/subprocess.py @@ -144,10 +144,11 @@ def kill(self): async def _feed_stdin(self, input): debug = self._loop.get_debug() - self.stdin.write(input) - if debug: - logger.debug( - '%r communicate: feed stdin (%s bytes)', self, len(input)) + if input is not None: + self.stdin.write(input) + if debug: + logger.debug( + '%r communicate: feed stdin (%s bytes)', self, len(input)) try: await self.stdin.drain() except (BrokenPipeError, ConnectionResetError) as exc: @@ -180,7 +181,7 @@ async def _read_stream(self, fd): return output async def communicate(self, input=None): - if input is not None: + if self.stdin is not None: stdin = self._feed_stdin(input) else: stdin = self._noop() diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py index c90d32c97add78..aa5269ade19a7f 100644 --- a/Lib/asyncio/tasks.py +++ b/Lib/asyncio/tasks.py @@ -6,6 +6,7 @@ 'wait', 'wait_for', 'as_completed', 'sleep', 'gather', 'shield', 'ensure_future', 'run_coroutine_threadsafe', 'current_task', 'all_tasks', + 'create_eager_task_factory', 'eager_task_factory', '_register_task', '_unregister_task', '_enter_task', '_leave_task', ) @@ -43,22 +44,26 @@ def all_tasks(loop=None): """Return a set of all tasks for the loop.""" if loop is None: loop = events.get_running_loop() - # Looping over a WeakSet (_all_tasks) isn't safe as it can be updated from another - # thread while we do so. Therefore we cast it to list prior to filtering. The list - # cast itself requires iteration, so we repeat it several times ignoring - # RuntimeErrors (which are not very likely to occur). See issues 34970 and 36607 for - # details. + # capturing the set of eager tasks first, so if an eager task "graduates" + # to a regular task in another thread, we don't risk missing it. + eager_tasks = list(_eager_tasks) + # Looping over the WeakSet isn't safe as it can be updated from another + # thread, therefore we cast it to list prior to filtering. The list cast + # itself requires iteration, so we repeat it several times ignoring + # RuntimeErrors (which are not very likely to occur). + # See issues 34970 and 36607 for details. + scheduled_tasks = None i = 0 while True: try: - tasks = list(_all_tasks) + scheduled_tasks = list(_scheduled_tasks) except RuntimeError: i += 1 if i >= 1000: raise else: break - return {t for t in tasks + return {t for t in itertools.chain(scheduled_tasks, eager_tasks) if futures._get_loop(t) is loop and not t.done()} @@ -93,7 +98,8 @@ class Task(futures._PyFuture): # Inherit Python Task implementation # status is still pending _log_destroy_pending = True - def __init__(self, coro, *, loop=None, name=None, context=None): + def __init__(self, coro, *, loop=None, name=None, context=None, + eager_start=False): super().__init__(loop=loop) if self._source_traceback: del self._source_traceback[-1] @@ -117,8 +123,11 @@ def __init__(self, coro, *, loop=None, name=None, context=None): else: self._context = context - self._loop.call_soon(self.__step, context=self._context) - _register_task(self) + if eager_start and self._loop.is_running(): + self.__eager_start() + else: + self._loop.call_soon(self.__step, context=self._context) + _register_task(self) def __del__(self): if self._state == futures._PENDING and self._log_destroy_pending: @@ -250,6 +259,25 @@ def uncancel(self): self._num_cancels_requested -= 1 return self._num_cancels_requested + def __eager_start(self): + prev_task = _swap_current_task(self._loop, self) + try: + _register_eager_task(self) + try: + self._context.run(self.__step_run_and_handle_result, None) + finally: + _unregister_eager_task(self) + finally: + try: + curtask = _swap_current_task(self._loop, prev_task) + assert curtask is self + finally: + if self.done(): + self._coro = None + self = None # Needed to break cycles when an exception occurs. + else: + _register_task(self) + def __step(self, exc=None): if self.done(): raise exceptions.InvalidStateError( @@ -258,11 +286,17 @@ def __step(self, exc=None): if not isinstance(exc, exceptions.CancelledError): exc = self._make_cancelled_error() self._must_cancel = False - coro = self._coro self._fut_waiter = None _enter_task(self._loop, self) - # Call either coro.throw(exc) or coro.send(None). + try: + self.__step_run_and_handle_result(exc) + finally: + _leave_task(self._loop, self) + self = None # Needed to break cycles when an exception occurs. + + def __step_run_and_handle_result(self, exc): + coro = self._coro try: if exc is None: # We use the `send` method directly, because coroutines @@ -334,7 +368,6 @@ def __step(self, exc=None): self._loop.call_soon( self.__step, new_exc, context=self._context) finally: - _leave_task(self._loop, self) self = None # Needed to break cycles when an exception occurs. def __wakeup(self, future): @@ -897,8 +930,27 @@ def callback(): return future -# WeakSet containing all alive tasks. -_all_tasks = weakref.WeakSet() +def create_eager_task_factory(custom_task_constructor): + + if "eager_start" not in inspect.signature(custom_task_constructor).parameters: + raise TypeError( + "Provided constructor does not support eager task execution") + + def factory(loop, coro, *, name=None, context=None): + return custom_task_constructor( + coro, loop=loop, name=name, context=context, eager_start=True) + + + return factory + +eager_task_factory = create_eager_task_factory(Task) + + +# Collectively these two sets hold references to the complete set of active +# tasks. Eagerly executed tasks use a faster regular set as an optimization +# but may graduate to a WeakSet if the task blocks on IO. +_scheduled_tasks = weakref.WeakSet() +_eager_tasks = set() # Dictionary containing tasks that are currently active in # all running event loops. {EventLoop: Task} @@ -906,8 +958,13 @@ def callback(): def _register_task(task): - """Register a new task in asyncio as executed by loop.""" - _all_tasks.add(task) + """Register an asyncio Task scheduled to run on an event loop.""" + _scheduled_tasks.add(task) + + +def _register_eager_task(task): + """Register an asyncio Task about to be eagerly executed.""" + _eager_tasks.add(task) def _enter_task(loop, task): @@ -926,28 +983,49 @@ def _leave_task(loop, task): del _current_tasks[loop] +def _swap_current_task(loop, task): + prev_task = _current_tasks.get(loop) + if task is None: + del _current_tasks[loop] + else: + _current_tasks[loop] = task + return prev_task + + def _unregister_task(task): - """Unregister a task.""" - _all_tasks.discard(task) + """Unregister a completed, scheduled Task.""" + _scheduled_tasks.discard(task) + + +def _unregister_eager_task(task): + """Unregister a task which finished its first eager step.""" + _eager_tasks.discard(task) _py_current_task = current_task _py_register_task = _register_task +_py_register_eager_task = _register_eager_task _py_unregister_task = _unregister_task +_py_unregister_eager_task = _unregister_eager_task _py_enter_task = _enter_task _py_leave_task = _leave_task +_py_swap_current_task = _swap_current_task try: - from _asyncio import (_register_task, _unregister_task, - _enter_task, _leave_task, - _all_tasks, _current_tasks, + from _asyncio import (_register_task, _register_eager_task, + _unregister_task, _unregister_eager_task, + _enter_task, _leave_task, _swap_current_task, + _scheduled_tasks, _eager_tasks, _current_tasks, current_task) except ImportError: pass else: _c_current_task = current_task _c_register_task = _register_task + _c_register_eager_task = _register_eager_task _c_unregister_task = _unregister_task + _c_unregister_eager_task = _unregister_eager_task _c_enter_task = _enter_task _c_leave_task = _leave_task + _c_swap_current_task = _swap_current_task diff --git a/Lib/asyncio/timeouts.py b/Lib/asyncio/timeouts.py index 94d25535fbc059..029c468739bf2d 100644 --- a/Lib/asyncio/timeouts.py +++ b/Lib/asyncio/timeouts.py @@ -25,8 +25,18 @@ class _State(enum.Enum): @final class Timeout: + """Asynchronous context manager for cancelling overdue coroutines. + + Use `timeout()` or `timeout_at()` rather than instantiating this class directly. + """ def __init__(self, when: Optional[float]) -> None: + """Schedule a timeout that will trigger at a given loop time. + + - If `when` is `None`, the timeout will never trigger. + - If `when < loop.time()`, the timeout will trigger on the next + iteration of the event loop. + """ self._state = _State.CREATED self._timeout_handler: Optional[events.TimerHandle] = None @@ -34,9 +44,11 @@ def __init__(self, when: Optional[float]) -> None: self._when = when def when(self) -> Optional[float]: + """Return the current deadline.""" return self._when def reschedule(self, when: Optional[float]) -> None: + """Reschedule the timeout.""" assert self._state is not _State.CREATED if self._state is not _State.ENTERED: raise RuntimeError( @@ -72,6 +84,7 @@ def __repr__(self) -> str: async def __aenter__(self) -> "Timeout": self._state = _State.ENTERED self._task = tasks.current_task() + self._cancelling = self._task.cancelling() if self._task is None: raise RuntimeError("Timeout should be used inside a task") self.reschedule(self._when) @@ -92,10 +105,10 @@ async def __aexit__( if self._state is _State.EXPIRING: self._state = _State.EXPIRED - if self._task.uncancel() == 0 and exc_type is exceptions.CancelledError: - # Since there are no outstanding cancel requests, we're + if self._task.uncancel() <= self._cancelling and exc_type is exceptions.CancelledError: + # Since there are no new cancel requests, we're # handling this. - raise TimeoutError + raise TimeoutError from exc_val elif self._state is _State.ENTERED: self._state = _State.EXITED diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index b21e0394141bf4..17fb4d5f7646ce 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -485,13 +485,21 @@ def __init__(self, loop, pipe, protocol, waiter=None, extra=None): self._loop.call_soon(self._protocol.connection_made, self) # only start reading when connection_made() has been called - self._loop.call_soon(self._loop._add_reader, + self._loop.call_soon(self._add_reader, self._fileno, self._read_ready) if waiter is not None: # only wake up the waiter when connection_made() has been called self._loop.call_soon(futures._set_result_unless_cancelled, waiter, None) + def _add_reader(self, fd, callback): + if not self.is_reading(): + return + self._loop._add_reader(fd, callback) + + def is_reading(self): + return not self._paused and not self._closing + def __repr__(self): info = [self.__class__.__name__] if self._pipe is None: @@ -532,7 +540,7 @@ def _read_ready(self): self._loop.call_soon(self._call_connection_lost, None) def pause_reading(self): - if self._closing or self._paused: + if not self.is_reading(): return self._paused = True self._loop._remove_reader(self._fileno) diff --git a/Lib/base64.py b/Lib/base64.py index 95dc7b0086051b..e233647ee76639 100755 --- a/Lib/base64.py +++ b/Lib/base64.py @@ -558,12 +558,12 @@ def decodebytes(s): def main(): """Small main program""" import sys, getopt - usage = f"""usage: {sys.argv[0]} [-h|-d|-e|-u|-t] [file|-] + usage = f"""usage: {sys.argv[0]} [-h|-d|-e|-u] [file|-] -h: print this help message and exit -d, -u: decode -e: encode (default)""" try: - opts, args = getopt.getopt(sys.argv[1:], 'hdeut') + opts, args = getopt.getopt(sys.argv[1:], 'hdeu') except getopt.error as msg: sys.stdout = sys.stderr print(msg) diff --git a/Lib/bdb.py b/Lib/bdb.py index 7f9b09514ffd00..0f3eec653baaad 100644 --- a/Lib/bdb.py +++ b/Lib/bdb.py @@ -574,6 +574,8 @@ def format_stack_entry(self, frame_lineno, lprefix=': '): line = linecache.getline(filename, lineno, frame.f_globals) if line: s += lprefix + line.strip() + else: + s += f'{lprefix}Warning: lineno is None' return s # The following methods can be called by clients to use diff --git a/Lib/bisect.py b/Lib/bisect.py index d37da74f7b4055..ca6ca7240840bb 100644 --- a/Lib/bisect.py +++ b/Lib/bisect.py @@ -8,6 +8,8 @@ def insort_right(a, x, lo=0, hi=None, *, key=None): Optional args lo (default 0) and hi (default len(a)) bound the slice of a to be searched. + + A custom key function can be supplied to customize the sort order. """ if key is None: lo = bisect_right(a, x, lo, hi) @@ -25,6 +27,8 @@ def bisect_right(a, x, lo=0, hi=None, *, key=None): Optional args lo (default 0) and hi (default len(a)) bound the slice of a to be searched. + + A custom key function can be supplied to customize the sort order. """ if lo < 0: @@ -57,6 +61,8 @@ def insort_left(a, x, lo=0, hi=None, *, key=None): Optional args lo (default 0) and hi (default len(a)) bound the slice of a to be searched. + + A custom key function can be supplied to customize the sort order. """ if key is None: @@ -74,6 +80,8 @@ def bisect_left(a, x, lo=0, hi=None, *, key=None): Optional args lo (default 0) and hi (default len(a)) bound the slice of a to be searched. + + A custom key function can be supplied to customize the sort order. """ if lo < 0: diff --git a/Lib/cProfile.py b/Lib/cProfile.py index f7000a8bfa0ddb..135a12c3965c00 100755 --- a/Lib/cProfile.py +++ b/Lib/cProfile.py @@ -8,6 +8,7 @@ import _lsprof import importlib.machinery +import io import profile as _pyprofile # ____________________________________________________________ @@ -168,7 +169,7 @@ def main(): else: progname = args[0] sys.path.insert(0, os.path.dirname(progname)) - with open(progname, 'rb') as fp: + with io.open_code(progname) as fp: code = compile(fp.read(), progname, 'exec') spec = importlib.machinery.ModuleSpec(name='__main__', loader=None, origin=progname) diff --git a/Lib/calendar.py b/Lib/calendar.py index 657396439c91fc..bbd4fea3b88ca4 100644 --- a/Lib/calendar.py +++ b/Lib/calendar.py @@ -7,8 +7,10 @@ import sys import datetime +from enum import IntEnum, global_enum import locale as _locale from itertools import repeat +import warnings __all__ = ["IllegalMonthError", "IllegalWeekdayError", "setfirstweekday", "firstweekday", "isleap", "leapdays", "weekday", "monthrange", @@ -16,6 +18,9 @@ "timegm", "month_name", "month_abbr", "day_name", "day_abbr", "Calendar", "TextCalendar", "HTMLCalendar", "LocaleTextCalendar", "LocaleHTMLCalendar", "weekheader", + "Day", "Month", "JANUARY", "FEBRUARY", "MARCH", + "APRIL", "MAY", "JUNE", "JULY", + "AUGUST", "SEPTEMBER", "OCTOBER", "NOVEMBER", "DECEMBER", "MONDAY", "TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY", "SATURDAY", "SUNDAY"] @@ -37,9 +42,47 @@ def __str__(self): return "bad weekday number %r; must be 0 (Monday) to 6 (Sunday)" % self.weekday -# Constants for months referenced later -January = 1 -February = 2 +def __getattr__(name): + if name in ('January', 'February'): + warnings.warn(f"The '{name}' attribute is deprecated, use '{name.upper()}' instead", + DeprecationWarning, stacklevel=2) + if name == 'January': + return 1 + else: + return 2 + + raise AttributeError(f"module '{__name__}' has no attribute '{name}'") + + +# Constants for months +@global_enum +class Month(IntEnum): + JANUARY = 1 + FEBRUARY = 2 + MARCH = 3 + APRIL = 4 + MAY = 5 + JUNE = 6 + JULY = 7 + AUGUST = 8 + SEPTEMBER = 9 + OCTOBER = 10 + NOVEMBER = 11 + DECEMBER = 12 + + +# Constants for days +@global_enum +class Day(IntEnum): + MONDAY = 0 + TUESDAY = 1 + WEDNESDAY = 2 + THURSDAY = 3 + FRIDAY = 4 + SATURDAY = 5 + SUNDAY = 6 + + # Number of days per month (except for February in leap years) mdays = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] @@ -95,9 +138,6 @@ def __len__(self): month_name = _localized_month('%B') month_abbr = _localized_month('%b') -# Constants for weekdays -(MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY) = range(7) - def isleap(year): """Return True for leap years, False for non-leap years.""" @@ -125,12 +165,12 @@ def monthrange(year, month): if not 1 <= month <= 12: raise IllegalMonthError(month) day1 = weekday(year, month, 1) - ndays = mdays[month] + (month == February and isleap(year)) + ndays = mdays[month] + (month == FEBRUARY and isleap(year)) return day1, ndays def _monthlen(year, month): - return mdays[month] + (month == February and isleap(year)) + return mdays[month] + (month == FEBRUARY and isleap(year)) def _prevmonth(year, month): @@ -260,10 +300,7 @@ def yeardatescalendar(self, year, width=3): Each month contains between 4 and 6 weeks and each week contains 1-7 days. Days are datetime.date objects. """ - months = [ - self.monthdatescalendar(year, i) - for i in range(January, January+12) - ] + months = [self.monthdatescalendar(year, m) for m in Month] return [months[i:i+width] for i in range(0, len(months), width) ] def yeardays2calendar(self, year, width=3): @@ -273,10 +310,7 @@ def yeardays2calendar(self, year, width=3): (day number, weekday number) tuples. Day numbers outside this month are zero. """ - months = [ - self.monthdays2calendar(year, i) - for i in range(January, January+12) - ] + months = [self.monthdays2calendar(year, m) for m in Month] return [months[i:i+width] for i in range(0, len(months), width) ] def yeardayscalendar(self, year, width=3): @@ -285,10 +319,7 @@ def yeardayscalendar(self, year, width=3): yeardatescalendar()). Entries in the week lists are day numbers. Day numbers outside this month are zero. """ - months = [ - self.monthdayscalendar(year, i) - for i in range(January, January+12) - ] + months = [self.monthdayscalendar(year, m) for m in Month] return [months[i:i+width] for i in range(0, len(months), width) ] @@ -509,7 +540,7 @@ def formatyear(self, theyear, width=3): a('\n') a('<tr><th colspan="%d" class="%s">%s</th></tr>' % ( width, self.cssclass_year_head, theyear)) - for i in range(January, January+12, width): + for i in range(JANUARY, JANUARY+12, width): # months in this row months = range(i, min(i+width, 13)) a('<tr>') diff --git a/Lib/code.py b/Lib/code.py index 76000f8c8b2c1e..2bd5fa3e795a61 100644 --- a/Lib/code.py +++ b/Lib/code.py @@ -106,6 +106,7 @@ def showsyntaxerror(self, filename=None): """ type, value, tb = sys.exc_info() + sys.last_exc = value sys.last_type = type sys.last_value = value sys.last_traceback = tb @@ -119,7 +120,7 @@ def showsyntaxerror(self, filename=None): else: # Stuff in the right filename value = SyntaxError(msg, (filename, lineno, offset, line)) - sys.last_value = value + sys.last_exc = sys.last_value = value if sys.excepthook is sys.__excepthook__: lines = traceback.format_exception_only(type, value) self.write(''.join(lines)) @@ -138,6 +139,7 @@ def showtraceback(self): """ sys.last_type, sys.last_value, last_tb = ei = sys.exc_info() sys.last_traceback = last_tb + sys.last_exc = ei[1] try: lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next) if sys.excepthook is sys.__excepthook__: diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py index a5393aad4249c0..03ca2d7e18f6f0 100644 --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -45,6 +45,11 @@ else: _collections_abc.MutableSequence.register(deque) +try: + from _collections import _deque_iterator +except ImportError: + pass + try: from _collections import defaultdict except ImportError: diff --git a/Lib/contextlib.py b/Lib/contextlib.py index 30d9ac25b2bbec..b5acbcb9e6d77c 100644 --- a/Lib/contextlib.py +++ b/Lib/contextlib.py @@ -441,7 +441,16 @@ def __exit__(self, exctype, excinst, exctb): # exactly reproduce the limitations of the CPython interpreter. # # See http://bugs.python.org/issue12029 for more details - return exctype is not None and issubclass(exctype, self._exceptions) + if exctype is None: + return + if issubclass(exctype, self._exceptions): + return True + if issubclass(exctype, ExceptionGroup): + match, rest = excinst.split(self._exceptions) + if rest is None: + return True + raise rest + return False class _BaseExitStack: diff --git a/Lib/csv.py b/Lib/csv.py index 4ef8be45ca9e0a..77f30c8d2b1f61 100644 --- a/Lib/csv.py +++ b/Lib/csv.py @@ -9,12 +9,14 @@ unregister_dialect, get_dialect, list_dialects, \ field_size_limit, \ QUOTE_MINIMAL, QUOTE_ALL, QUOTE_NONNUMERIC, QUOTE_NONE, \ + QUOTE_STRINGS, QUOTE_NOTNULL, \ __doc__ from _csv import Dialect as _Dialect from io import StringIO __all__ = ["QUOTE_MINIMAL", "QUOTE_ALL", "QUOTE_NONNUMERIC", "QUOTE_NONE", + "QUOTE_STRINGS", "QUOTE_NOTNULL", "Error", "Dialect", "__doc__", "excel", "excel_tab", "field_size_limit", "reader", "writer", "register_dialect", "get_dialect", "list_dialects", "Sniffer", diff --git a/Lib/ctypes/_endian.py b/Lib/ctypes/_endian.py index 34dee64b1a65a6..b5446c049bc9dc 100644 --- a/Lib/ctypes/_endian.py +++ b/Lib/ctypes/_endian.py @@ -37,7 +37,7 @@ class _swapped_union_meta(_swapped_meta, type(Union)): pass ################################################################ # Note: The Structure metaclass checks for the *presence* (not the -# value!) of a _swapped_bytes_ attribute to determine the bit order in +# value!) of a _swappedbytes_ attribute to determine the bit order in # structures containing bit fields. if sys.byteorder == "little": diff --git a/Lib/curses/textpad.py b/Lib/curses/textpad.py index 2079953a06614b..aa87061b8d749e 100644 --- a/Lib/curses/textpad.py +++ b/Lib/curses/textpad.py @@ -102,7 +102,10 @@ def do_command(self, ch): self._insert_printable_char(ch) elif ch == curses.ascii.SOH: # ^a self.win.move(y, 0) - elif ch in (curses.ascii.STX,curses.KEY_LEFT, curses.ascii.BS,curses.KEY_BACKSPACE): + elif ch in (curses.ascii.STX,curses.KEY_LEFT, + curses.ascii.BS, + curses.KEY_BACKSPACE, + curses.ascii.DEL): if x > 0: self.win.move(y, x-1) elif y == 0: @@ -111,7 +114,7 @@ def do_command(self, ch): self.win.move(y-1, self._end_of_line(y-1)) else: self.win.move(y-1, self.maxx) - if ch in (curses.ascii.BS, curses.KEY_BACKSPACE): + if ch in (curses.ascii.BS, curses.KEY_BACKSPACE, curses.ascii.DEL): self.win.delch() elif ch == curses.ascii.EOT: # ^d self.win.delch() diff --git a/Lib/dataclasses.py b/Lib/dataclasses.py index 82b08fc017884f..b0b8a773b7594f 100644 --- a/Lib/dataclasses.py +++ b/Lib/dataclasses.py @@ -222,6 +222,29 @@ def __repr__(self): # https://bugs.python.org/issue33453 for details. _MODULE_IDENTIFIER_RE = re.compile(r'^(?:\s*(\w+)\s*\.)?\s*(\w+)') +# Atomic immutable types which don't require any recursive handling and for which deepcopy +# returns the same object. We can provide a fast-path for these types in asdict and astuple. +_ATOMIC_TYPES = frozenset({ + # Common JSON Serializable types + types.NoneType, + bool, + int, + float, + str, + # Other common types + complex, + bytes, + # Other types that are also unaffected by deepcopy + types.EllipsisType, + types.NotImplementedType, + types.CodeType, + types.BuiltinFunctionType, + types.FunctionType, + type, + range, + property, +}) + # This function's logic is copied from "recursive_repr" function in # reprlib module to avoid dependency. def _recursive_repr(user_function): @@ -432,8 +455,8 @@ def _create_fn(name, args, body, *, globals=None, locals=None, locals = {} return_annotation = '' if return_type is not MISSING: - locals['_return_type'] = return_type - return_annotation = '->_return_type' + locals['__dataclass_return_type__'] = return_type + return_annotation = '->__dataclass_return_type__' args = ','.join(args) body = '\n'.join(f' {b}' for b in body) @@ -467,14 +490,14 @@ def _field_init(f, frozen, globals, self_name, slots): # Return the text of the line in the body of __init__ that will # initialize this field. - default_name = f'_dflt_{f.name}' + default_name = f'__dataclass_dflt_{f.name}__' if f.default_factory is not MISSING: if f.init: # This field has a default factory. If a parameter is # given, use it. If not, call the factory. globals[default_name] = f.default_factory value = (f'{default_name}() ' - f'if {f.name} is _HAS_DEFAULT_FACTORY ' + f'if {f.name} is __dataclass_HAS_DEFAULT_FACTORY__ ' f'else {f.name}') else: # This is a field that's not in the __init__ params, but @@ -535,11 +558,11 @@ def _init_param(f): elif f.default is not MISSING: # There's a default, this will be the name that's used to look # it up. - default = f'=_dflt_{f.name}' + default = f'=__dataclass_dflt_{f.name}__' elif f.default_factory is not MISSING: # There's a factory function. Set a marker. - default = '=_HAS_DEFAULT_FACTORY' - return f'{f.name}:_type_{f.name}{default}' + default = '=__dataclass_HAS_DEFAULT_FACTORY__' + return f'{f.name}:__dataclass_type_{f.name}__{default}' def _init_fn(fields, std_fields, kw_only_fields, frozen, has_post_init, @@ -562,10 +585,9 @@ def _init_fn(fields, std_fields, kw_only_fields, frozen, has_post_init, raise TypeError(f'non-default argument {f.name!r} ' 'follows default argument') - locals = {f'_type_{f.name}': f.type for f in fields} + locals = {f'__dataclass_type_{f.name}__': f.type for f in fields} locals.update({ - 'MISSING': MISSING, - '_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY, + '__dataclass_HAS_DEFAULT_FACTORY__': _HAS_DEFAULT_FACTORY, '__dataclass_builtins_object__': object, }) @@ -1106,8 +1128,13 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen, if not getattr(cls, '__doc__'): # Create a class doc-string. - cls.__doc__ = (cls.__name__ + - str(inspect.signature(cls)).replace(' -> None', '')) + try: + # In some cases fetching a signature is not possible. + # But, we surely should not fail in this case. + text_sig = str(inspect.signature(cls)).replace(' -> None', '') + except (TypeError, ValueError): + text_sig = '' + cls.__doc__ = (cls.__name__ + text_sig) if match_args: # I could probably compute this once @@ -1200,8 +1227,10 @@ def _add_slots(cls, is_frozen, weakref_slot): if is_frozen: # Need this for pickling frozen classes with slots. - cls.__getstate__ = _dataclass_getstate - cls.__setstate__ = _dataclass_setstate + if '__getstate__' not in cls_dict: + cls.__getstate__ = _dataclass_getstate + if '__setstate__' not in cls_dict: + cls.__setstate__ = _dataclass_setstate return cls @@ -1248,7 +1277,7 @@ def fields(class_or_instance): try: fields = getattr(class_or_instance, _FIELDS) except AttributeError: - raise TypeError('must be called with a dataclass type or instance') + raise TypeError('must be called with a dataclass type or instance') from None # Exclude pseudo-fields. Note that fields is sorted by insertion # order, so the order of the tuple is as the fields were defined. @@ -1292,7 +1321,9 @@ class C: def _asdict_inner(obj, dict_factory): - if _is_dataclass_instance(obj): + if type(obj) in _ATOMIC_TYPES: + return obj + elif _is_dataclass_instance(obj): result = [] for f in fields(obj): value = _asdict_inner(getattr(obj, f.name), dict_factory) @@ -1364,7 +1395,9 @@ class C: def _astuple_inner(obj, tuple_factory): - if _is_dataclass_instance(obj): + if type(obj) in _ATOMIC_TYPES: + return obj + elif _is_dataclass_instance(obj): result = [] for f in fields(obj): value = _astuple_inner(getattr(obj, f.name), tuple_factory) @@ -1421,8 +1454,11 @@ class C(Base): For the bases and namespace parameters, see the builtin type() function. - The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to - dataclass(). + The parameters init, repr, eq, order, unsafe_hash, frozen, match_args, kw_only, + slots, and weakref_slot are passed to dataclass(). + + If module parameter is defined, the '__module__' attribute of the dataclass is + set to that value. """ if namespace is None: diff --git a/Lib/datetime.py b/Lib/datetime.py index 637144637485bc..b0eb1c216a689d 100644 --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1801,6 +1801,13 @@ def fromtimestamp(cls, timestamp, tz=None): @classmethod def utcfromtimestamp(cls, t): """Construct a naive UTC datetime from a POSIX timestamp.""" + import warnings + warnings.warn("datetime.utcfromtimestamp() is deprecated and scheduled " + "for removal in a future version. Use timezone-aware " + "objects to represent datetimes in UTC: " + "datetime.fromtimestamp(t, datetime.UTC).", + DeprecationWarning, + stacklevel=2) return cls._fromtimestamp(t, True, None) @classmethod @@ -1812,8 +1819,15 @@ def now(cls, tz=None): @classmethod def utcnow(cls): "Construct a UTC datetime from time.time()." + import warnings + warnings.warn("datetime.utcnow() is deprecated and scheduled for " + "removal in a future version. Instead, Use timezone-aware " + "objects to represent datetimes in UTC: " + "datetime.now(datetime.UTC).", + DeprecationWarning, + stacklevel=2) t = _time.time() - return cls.utcfromtimestamp(t) + return cls._fromtimestamp(t, True, None) @classmethod def combine(cls, date, time, tzinfo=True): @@ -1965,6 +1979,11 @@ def replace(self, year=None, month=None, day=None, hour=None, def _local_timezone(self): if self.tzinfo is None: ts = self._mktime() + # Detect gap + ts2 = self.replace(fold=1-self.fold)._mktime() + if ts2 != ts: # This happens in a gap or a fold + if (ts2 > ts) == self.fold: + ts = ts2 else: ts = (self - _EPOCH) // timedelta(seconds=1) localtm = _time.localtime(ts) diff --git a/Lib/dis.py b/Lib/dis.py index 9edde6ae8258da..85c109584bf94f 100644 --- a/Lib/dis.py +++ b/Lib/dis.py @@ -41,6 +41,7 @@ FOR_ITER = opmap['FOR_ITER'] SEND = opmap['SEND'] LOAD_ATTR = opmap['LOAD_ATTR'] +LOAD_SUPER_ATTR = opmap['LOAD_SUPER_ATTR'] CACHE = opmap["CACHE"] @@ -64,10 +65,10 @@ def _try_compile(source, name): expect code objects """ try: - c = compile(source, name, 'eval') + return compile(source, name, 'eval') except SyntaxError: - c = compile(source, name, 'exec') - return c + pass + return compile(source, name, 'exec') def dis(x=None, *, file=None, depth=None, show_caches=False, adaptive=False): """Disassemble classes, methods, functions, and other compiled objects. @@ -118,7 +119,10 @@ def distb(tb=None, *, file=None, show_caches=False, adaptive=False): """Disassemble a traceback (default: last traceback).""" if tb is None: try: - tb = sys.last_traceback + if hasattr(sys, 'last_exc'): + tb = sys.last_exc.__traceback__ + else: + tb = sys.last_traceback except AttributeError: raise RuntimeError("no last traceback to disassemble") from None while tb.tb_next: tb = tb.tb_next @@ -365,9 +369,8 @@ def _get_const_value(op, arg, co_consts): assert op in hasconst argval = UNKNOWN - if op == LOAD_CONST or op == RETURN_CONST: - if co_consts is not None: - argval = co_consts[arg] + if co_consts is not None: + argval = co_consts[arg] return argval def _get_const_info(op, arg, co_consts): @@ -472,6 +475,10 @@ def _get_instructions_bytes(code, varname_from_oparg=None, argval, argrepr = _get_name_info(arg//2, get_name) if (arg & 1) and argrepr: argrepr = "NULL|self + " + argrepr + elif deop == LOAD_SUPER_ATTR: + argval, argrepr = _get_name_info(arg//4, get_name) + if (arg & 1) and argrepr: + argrepr = "NULL|self + " + argrepr else: argval, argrepr = _get_name_info(arg, get_name) elif deop in hasjabs: @@ -578,7 +585,12 @@ def _disassemble_bytes(code, lasti=-1, varname_from_oparg=None, instr.offset > 0) if new_source_line: print(file=file) - is_current_instr = instr.offset == lasti + if show_caches: + is_current_instr = instr.offset == lasti + else: + # Each CACHE takes 2 bytes + is_current_instr = instr.offset <= lasti \ + <= instr.offset + 2 * _inline_cache_entries[_deoptop(instr.opcode)] print(instr._disassemble(lineno_width, is_current_instr, offset_width), file=file) if exception_entries: diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py index e637e6df06612d..0d6bd812475eea 100644 --- a/Lib/email/_header_value_parser.py +++ b/Lib/email/_header_value_parser.py @@ -1987,7 +1987,7 @@ def get_address_list(value): try: token, value = get_address(value) address_list.append(token) - except errors.HeaderParseError as err: + except errors.HeaderParseError: leader = None if value[0] in CFWS_LEADER: leader, value = get_cfws(value) @@ -2096,7 +2096,7 @@ def get_msg_id(value): except errors.HeaderParseError: try: token, value = get_no_fold_literal(value) - except errors.HeaderParseError as e: + except errors.HeaderParseError: try: token, value = get_domain(value) msg_id.defects.append(errors.ObsoleteHeaderDefect( @@ -2443,7 +2443,6 @@ def get_parameter(value): raise errors.HeaderParseError("Parameter not followed by '='") param.append(ValueTerminal('=', 'parameter-separator')) value = value[1:] - leader = None if value and value[0] in CFWS_LEADER: token, value = get_cfws(value) param.append(token) @@ -2568,7 +2567,7 @@ def parse_mime_parameters(value): try: token, value = get_parameter(value) mime_parameters.append(token) - except errors.HeaderParseError as err: + except errors.HeaderParseError: leader = None if value[0] in CFWS_LEADER: leader, value = get_cfws(value) @@ -2626,7 +2625,6 @@ def parse_content_type_header(value): don't do that. """ ctype = ContentType() - recover = False if not value: ctype.defects.append(errors.HeaderMissingRequiredValue( "Missing content type specification")) diff --git a/Lib/email/charset.py b/Lib/email/charset.py index 9af269442fb8af..043801107b60e5 100644 --- a/Lib/email/charset.py +++ b/Lib/email/charset.py @@ -341,7 +341,6 @@ def header_encode_lines(self, string, maxlengths): if not lines and not current_line: lines.append(None) else: - separator = (' ' if lines else '') joined_line = EMPTYSTRING.join(current_line) header_bytes = _encode(joined_line, codec) lines.append(encoder(header_bytes)) diff --git a/Lib/email/feedparser.py b/Lib/email/feedparser.py index 6bc4e0c4e59895..885097c7dda067 100644 --- a/Lib/email/feedparser.py +++ b/Lib/email/feedparser.py @@ -264,7 +264,7 @@ def _parsegen(self): yield NeedMoreData continue break - msg = self._pop_message() + self._pop_message() # We need to pop the EOF matcher in order to tell if we're at # the end of the current file, not the end of the last block # of message headers. diff --git a/Lib/email/message.py b/Lib/email/message.py index b540c33984a753..411118c74dabb4 100644 --- a/Lib/email/message.py +++ b/Lib/email/message.py @@ -14,7 +14,7 @@ # Intrapackage imports from email import utils from email import errors -from email._policybase import Policy, compat32 +from email._policybase import compat32 from email import charset as _charset from email._encoded_words import decode_b Charset = _charset.Charset diff --git a/Lib/email/mime/text.py b/Lib/email/mime/text.py index dfe53c426b2ac4..7672b789138600 100644 --- a/Lib/email/mime/text.py +++ b/Lib/email/mime/text.py @@ -6,7 +6,6 @@ __all__ = ['MIMEText'] -from email.charset import Charset from email.mime.nonmultipart import MIMENonMultipart @@ -36,6 +35,6 @@ def __init__(self, _text, _subtype='plain', _charset=None, *, policy=None): _charset = 'utf-8' MIMENonMultipart.__init__(self, 'text', _subtype, policy=policy, - **{'charset': str(_charset)}) + charset=str(_charset)) self.set_payload(_text, _charset) diff --git a/Lib/email/utils.py b/Lib/email/utils.py index cfdfeb3f1a86e4..81da5394ea1695 100644 --- a/Lib/email/utils.py +++ b/Lib/email/utils.py @@ -143,13 +143,13 @@ def formatdate(timeval=None, localtime=False, usegmt=False): # 2822 requires that day and month names be the English abbreviations. if timeval is None: timeval = time.time() - if localtime or usegmt: - dt = datetime.datetime.fromtimestamp(timeval, datetime.timezone.utc) - else: - dt = datetime.datetime.utcfromtimestamp(timeval) + dt = datetime.datetime.fromtimestamp(timeval, datetime.timezone.utc) + if localtime: dt = dt.astimezone() usegmt = False + elif not usegmt: + dt = dt.replace(tzinfo=None) return format_datetime(dt, usegmt) def format_datetime(dt, usegmt=False): @@ -331,41 +331,23 @@ def collapse_rfc2231_value(value, errors='replace', # better than not having it. # -def localtime(dt=None, isdst=-1): +def localtime(dt=None, isdst=None): """Return local time as an aware datetime object. If called without arguments, return current time. Otherwise *dt* argument should be a datetime instance, and it is converted to the local time zone according to the system time zone database. If *dt* is naive (that is, dt.tzinfo is None), it is assumed to be in local time. - In this case, a positive or zero value for *isdst* causes localtime to - presume initially that summer time (for example, Daylight Saving Time) - is or is not (respectively) in effect for the specified time. A - negative value for *isdst* causes the localtime() function to attempt - to divine whether summer time is in effect for the specified time. + The isdst parameter is ignored. """ + if isdst is not None: + import warnings + warnings._deprecated( + "The 'isdst' parameter to 'localtime'", + message='{name} is deprecated and slated for removal in Python {remove}', + remove=(3, 14), + ) if dt is None: - return datetime.datetime.now(datetime.timezone.utc).astimezone() - if dt.tzinfo is not None: - return dt.astimezone() - # We have a naive datetime. Convert to a (localtime) timetuple and pass to - # system mktime together with the isdst hint. System mktime will return - # seconds since epoch. - tm = dt.timetuple()[:-1] + (isdst,) - seconds = time.mktime(tm) - localtm = time.localtime(seconds) - try: - delta = datetime.timedelta(seconds=localtm.tm_gmtoff) - tz = datetime.timezone(delta, localtm.tm_zone) - except AttributeError: - # Compute UTC offset and compare with the value implied by tm_isdst. - # If the values match, use the zone name implied by tm_isdst. - delta = dt - datetime.datetime(*time.gmtime(seconds)[:6]) - dst = time.daylight and localtm.tm_isdst > 0 - gmtoff = -(time.altzone if dst else time.timezone) - if delta == datetime.timedelta(seconds=gmtoff): - tz = datetime.timezone(delta, time.tzname[dst]) - else: - tz = datetime.timezone(delta) - return dt.replace(tzinfo=tz) + dt = datetime.datetime.now() + return dt.astimezone() diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 00e77749e25e77..5f4f1d75b43e64 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -9,11 +9,9 @@ __all__ = ["version", "bootstrap"] -_PACKAGE_NAMES = ('setuptools', 'pip') -_SETUPTOOLS_VERSION = "65.5.0" -_PIP_VERSION = "23.0.1" +_PACKAGE_NAMES = ('pip',) +_PIP_VERSION = "23.1.2" _PROJECTS = [ - ("setuptools", _SETUPTOOLS_VERSION, "py3"), ("pip", _PIP_VERSION, "py3"), ] @@ -153,17 +151,17 @@ def _bootstrap(*, root=None, upgrade=False, user=False, _disable_pip_configuration_settings() - # By default, installing pip and setuptools installs all of the + # By default, installing pip installs all of the # following scripts (X.Y == running Python version): # - # pip, pipX, pipX.Y, easy_install, easy_install-X.Y + # pip, pipX, pipX.Y # # pip 1.5+ allows ensurepip to request that some of those be left out if altinstall: - # omit pip, pipX and easy_install + # omit pip, pipX os.environ["ENSUREPIP_OPTIONS"] = "altinstall" elif not default_pip: - # omit pip and easy_install + # omit pip os.environ["ENSUREPIP_OPTIONS"] = "install" with tempfile.TemporaryDirectory() as tmpdir: @@ -271,14 +269,14 @@ def _main(argv=None): action="store_true", default=False, help=("Make an alternate install, installing only the X.Y versioned " - "scripts (Default: pipX, pipX.Y, easy_install-X.Y)."), + "scripts (Default: pipX, pipX.Y)."), ) parser.add_argument( "--default-pip", action="store_true", default=False, help=("Make a default pip install, installing the unqualified pip " - "and easy_install in addition to the versioned scripts."), + "in addition to the versioned scripts."), ) args = parser.parse_args(argv) diff --git a/Lib/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl b/Lib/ensurepip/_bundled/pip-23.1.2-py3-none-any.whl similarity index 76% rename from Lib/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl rename to Lib/ensurepip/_bundled/pip-23.1.2-py3-none-any.whl index a855dc40e8630d..6a2515615ccda3 100644 Binary files a/Lib/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl and b/Lib/ensurepip/_bundled/pip-23.1.2-py3-none-any.whl differ diff --git a/Lib/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl b/Lib/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl deleted file mode 100644 index 123a13e2c6b254..00000000000000 Binary files a/Lib/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl and /dev/null differ diff --git a/Lib/enum.py b/Lib/enum.py index d14e91a9b017d1..6e497f7ef6a7de 100644 --- a/Lib/enum.py +++ b/Lib/enum.py @@ -190,6 +190,8 @@ class property(DynamicClassAttribute): """ member = None + _attr_type = None + _cls_type = None def __get__(self, instance, ownerclass=None): if instance is None: @@ -199,43 +201,36 @@ def __get__(self, instance, ownerclass=None): raise AttributeError( '%r has no attribute %r' % (ownerclass, self.name) ) - else: - if self.fget is None: - if self.member is None: # not sure this can happen, but just in case - raise AttributeError( - '%r has no attribute %r' % (ownerclass, self.name) - ) - # issue warning deprecating this behavior - import warnings - warnings.warn( - "`member.member` access (e.g. `Color.RED.BLUE`) is " - "deprecated and will be removed in 3.14.", - DeprecationWarning, - stacklevel=2, - ) - return self.member - # XXX: uncomment in 3.14 and remove warning above - # raise AttributeError( - # '%r member has no attribute %r' % (ownerclass, self.name) - # ) - else: - return self.fget(instance) + if self.fget is not None: + # use previous enum.property + return self.fget(instance) + elif self._attr_type == 'attr': + # look up previous attibute + return getattr(self._cls_type, self.name) + elif self._attr_type == 'desc': + # use previous descriptor + return getattr(instance._value_, self.name) + # look for a member by this name. + try: + return ownerclass._member_map_[self.name] + except KeyError: + raise AttributeError( + '%r has no attribute %r' % (ownerclass, self.name) + ) from None def __set__(self, instance, value): - if self.fset is None: - raise AttributeError( - "<enum %r> cannot set attribute %r" % (self.clsname, self.name) - ) - else: + if self.fset is not None: return self.fset(instance, value) + raise AttributeError( + "<enum %r> cannot set attribute %r" % (self.clsname, self.name) + ) def __delete__(self, instance): - if self.fdel is None: - raise AttributeError( - "<enum %r> cannot delete attribute %r" % (self.clsname, self.name) - ) - else: + if self.fdel is not None: return self.fdel(instance) + raise AttributeError( + "<enum %r> cannot delete attribute %r" % (self.clsname, self.name) + ) def __set_name__(self, ownerclass, name): self.name = name @@ -266,28 +261,32 @@ def __set_name__(self, enum_class, member_name): args = (args, ) # wrap it one more time if not enum_class._use_args_: enum_member = enum_class._new_member_(enum_class) - if not hasattr(enum_member, '_value_'): + else: + enum_member = enum_class._new_member_(enum_class, *args) + if not hasattr(enum_member, '_value_'): + if enum_class._member_type_ is object: + enum_member._value_ = value + else: try: enum_member._value_ = enum_class._member_type_(*args) except Exception as exc: - enum_member._value_ = value - else: - enum_member = enum_class._new_member_(enum_class, *args) - if not hasattr(enum_member, '_value_'): - if enum_class._member_type_ is object: - enum_member._value_ = value - else: - try: - enum_member._value_ = enum_class._member_type_(*args) - except Exception as exc: - raise TypeError( - '_value_ not set in __new__, unable to create it' - ) from None + new_exc = TypeError( + '_value_ not set in __new__, unable to create it' + ) + new_exc.__cause__ = exc + raise new_exc value = enum_member._value_ enum_member._name_ = member_name enum_member.__objclass__ = enum_class enum_member.__init__(*args) enum_member._sort_order_ = len(enum_class._member_names_) + + if Flag is not None and issubclass(enum_class, Flag): + enum_class._flag_mask_ |= value + if _is_single_bit(value): + enum_class._singles_mask_ |= value + enum_class._all_bits_ = 2 ** ((enum_class._flag_mask_).bit_length()) - 1 + # If another member with the same value was already defined, the # new member becomes an alias to the existing one. try: @@ -317,22 +316,43 @@ def __set_name__(self, enum_class, member_name): ): # no other instances found, record this member in _member_names_ enum_class._member_names_.append(member_name) - # get redirect in place before adding to _member_map_ - # but check for other instances in parent classes first - descriptor = None + # if necessary, get redirect in place and then add it to _member_map_ + found_descriptor = None + descriptor_type = None + class_type = None for base in enum_class.__mro__[1:]: - descriptor = base.__dict__.get(member_name) - if descriptor is not None: - if isinstance(descriptor, (property, DynamicClassAttribute)): + attr = base.__dict__.get(member_name) + if attr is not None: + if isinstance(attr, (property, DynamicClassAttribute)): + found_descriptor = attr + class_type = base + descriptor_type = 'enum' break - redirect = property() - redirect.member = enum_member - redirect.__set_name__(enum_class, member_name) - if descriptor: - redirect.fget = getattr(descriptor, 'fget', None) - redirect.fset = getattr(descriptor, 'fset', None) - redirect.fdel = getattr(descriptor, 'fdel', None) - setattr(enum_class, member_name, redirect) + elif _is_descriptor(attr): + found_descriptor = attr + descriptor_type = descriptor_type or 'desc' + class_type = class_type or base + continue + else: + descriptor_type = 'attr' + class_type = base + if found_descriptor: + redirect = property() + redirect.member = enum_member + redirect.__set_name__(enum_class, member_name) + if descriptor_type in ('enum','desc'): + # earlier descriptor found; copy fget, fset, fdel to this one. + redirect.fget = getattr(found_descriptor, 'fget', None) + redirect._get = getattr(found_descriptor, '__get__', None) + redirect.fset = getattr(found_descriptor, 'fset', None) + redirect._set = getattr(found_descriptor, '__set__', None) + redirect.fdel = getattr(found_descriptor, 'fdel', None) + redirect._del = getattr(found_descriptor, '__delete__', None) + redirect._attr_type = descriptor_type + redirect._cls_type = class_type + setattr(enum_class, member_name, redirect) + else: + setattr(enum_class, member_name, enum_member) # now add to _member_map_ (even aliases) enum_class._member_map_[member_name] = enum_member try: @@ -518,8 +538,13 @@ def __new__(metacls, cls, bases, classdict, *, boundary=None, _simple=False, **k # # adjust the sunders _order_ = classdict.pop('_order_', None) + _gnv = classdict.get('_generate_next_value_') + if _gnv is not None and type(_gnv) is not staticmethod: + _gnv = staticmethod(_gnv) # convert to normal dict classdict = dict(classdict.items()) + if _gnv is not None: + classdict['_generate_next_value_'] = _gnv # # data type of member and the controlling Enum class member_type, first_enum = metacls._get_mixins_(cls, bases) @@ -530,12 +555,8 @@ def __new__(metacls, cls, bases, classdict, *, boundary=None, _simple=False, **k classdict['_use_args_'] = use_args # # convert future enum members into temporary _proto_members - # and record integer values in case this will be a Flag - flag_mask = 0 for name in member_names: value = classdict[name] - if isinstance(value, int): - flag_mask |= value classdict[name] = _proto_member(value) # # house-keeping structures @@ -552,8 +573,9 @@ def __new__(metacls, cls, bases, classdict, *, boundary=None, _simple=False, **k boundary or getattr(first_enum, '_boundary_', None) ) - classdict['_flag_mask_'] = flag_mask - classdict['_all_bits_'] = 2 ** ((flag_mask).bit_length()) - 1 + classdict['_flag_mask_'] = 0 + classdict['_singles_mask_'] = 0 + classdict['_all_bits_'] = 0 classdict['_inverted_'] = None try: exc = None @@ -642,21 +664,10 @@ def __new__(metacls, cls, bases, classdict, *, boundary=None, _simple=False, **k ): delattr(enum_class, '_boundary_') delattr(enum_class, '_flag_mask_') + delattr(enum_class, '_singles_mask_') delattr(enum_class, '_all_bits_') delattr(enum_class, '_inverted_') elif Flag is not None and issubclass(enum_class, Flag): - # ensure _all_bits_ is correct and there are no missing flags - single_bit_total = 0 - multi_bit_total = 0 - for flag in enum_class._member_map_.values(): - flag_value = flag._value_ - if _is_single_bit(flag_value): - single_bit_total |= flag_value - else: - # multi-bit flags are considered aliases - multi_bit_total |= flag_value - enum_class._flag_mask_ = single_bit_total - # # set correct __iter__ member_list = [m._value_ for m in enum_class] if member_list != sorted(member_list): @@ -923,7 +934,7 @@ def _convert_(cls, name, module, filter, source=None, *, boundary=None, as_globa def _check_for_existing_members_(mcls, class_name, bases): for chain in bases: for base in chain.__mro__: - if issubclass(base, Enum) and base._member_names_: + if isinstance(base, EnumType) and base._member_names_: raise TypeError( "<enum %r> cannot extend %r" % (class_name, base) @@ -942,7 +953,7 @@ def _get_mixins_(mcls, class_name, bases): # ensure final parent class is an Enum derivative, find any concrete # data type, and check that Enum has no members first_enum = bases[-1] - if not issubclass(first_enum, Enum): + if not isinstance(first_enum, EnumType): raise TypeError("new enumerations should be created as " "`EnumName([mixin_type, ...] [data_type,] enum_type)`") member_type = mcls._find_data_type_(class_name, bases) or object @@ -954,7 +965,7 @@ def _find_data_repr_(mcls, class_name, bases): for base in chain.__mro__: if base is object: continue - elif issubclass(base, Enum): + elif isinstance(base, EnumType): # if we hit an Enum, use it's _value_repr_ return base._value_repr_ elif '__repr__' in base.__dict__: @@ -972,6 +983,7 @@ def _find_data_repr_(mcls, class_name, bases): @classmethod def _find_data_type_(mcls, class_name, bases): + # a datatype has a __new__ method, or a __dataclass_fields__ attribute data_types = set() base_chain = set() for chain in bases: @@ -980,13 +992,11 @@ def _find_data_type_(mcls, class_name, bases): base_chain.add(base) if base is object: continue - elif issubclass(base, Enum): + elif isinstance(base, EnumType): if base._member_type_ is not object: data_types.add(base._member_type_) break - elif '__new__' in base.__dict__ or '__init__' in base.__dict__: - if issubclass(base, Enum): - continue + elif '__new__' in base.__dict__ or '__dataclass_fields__' in base.__dict__: data_types.add(candidate or base) break else: @@ -1146,6 +1156,7 @@ def __new__(cls, value): def __init__(self, *args, **kwds): pass + @staticmethod def _generate_next_value_(name, start, count, last_values): """ Generate the next value when not given. @@ -1288,6 +1299,7 @@ def __new__(cls, *values): member._value_ = value return member + @staticmethod def _generate_next_value_(name, start, count, last_values): """ Return the lower-cased version of the member name. @@ -1301,10 +1313,10 @@ def _reduce_ex_by_global_name(self, proto): class FlagBoundary(StrEnum): """ control how out of range values are handled - "strict" -> error is raised [default for Flag] + "strict" -> error is raised [default for Flag] "conform" -> extra bits are discarded - "eject" -> lose flag status [default for IntFlag] - "keep" -> keep flag status and all bits + "eject" -> lose flag status + "keep" -> keep flag status and all bits [default for IntFlag] """ STRICT = auto() CONFORM = auto() @@ -1313,7 +1325,7 @@ class FlagBoundary(StrEnum): STRICT, CONFORM, EJECT, KEEP = FlagBoundary -class Flag(Enum, boundary=CONFORM): +class Flag(Enum, boundary=STRICT): """ Support for flags """ @@ -1337,6 +1349,7 @@ def __reduce_ex__(self, proto): _numeric_repr_ = repr + @staticmethod def _generate_next_value_(name, start, count, last_values): """ Generate the next value when not given. @@ -1391,6 +1404,7 @@ def _missing_(cls, value): # - value must not include any skipped flags (e.g. if bit 2 is not # defined, then 0d10 is invalid) flag_mask = cls._flag_mask_ + singles_mask = cls._singles_mask_ all_bits = cls._all_bits_ neg_value = None if ( @@ -1422,7 +1436,8 @@ def _missing_(cls, value): value = all_bits + 1 + value # get members and unknown unknown = value & ~flag_mask - member_value = value & flag_mask + aliases = value & ~singles_mask + member_value = value & singles_mask if unknown and cls._boundary_ is not KEEP: raise ValueError( '%s(%r) --> unknown values %r [%s]' @@ -1436,11 +1451,25 @@ def _missing_(cls, value): pseudo_member = cls._member_type_.__new__(cls, value) if not hasattr(pseudo_member, '_value_'): pseudo_member._value_ = value - if member_value: - pseudo_member._name_ = '|'.join([ - m._name_ for m in cls._iter_member_(member_value) - ]) - if unknown: + if member_value or aliases: + members = [] + combined_value = 0 + for m in cls._iter_member_(member_value): + members.append(m) + combined_value |= m._value_ + if aliases: + value = member_value | aliases + for n, pm in cls._member_map_.items(): + if pm not in members and pm._value_ and pm._value_ & value == pm._value_: + members.append(pm) + combined_value |= pm._value_ + unknown = value ^ combined_value + pseudo_member._name_ = '|'.join([m._name_ for m in members]) + if not combined_value: + pseudo_member._name_ = None + elif unknown and cls._boundary_ is STRICT: + raise ValueError('%r: no members with value %r' % (cls, unknown)) + elif unknown: pseudo_member._name_ += '|%s' % cls._numeric_repr_(unknown) else: pseudo_member._name_ = None @@ -1672,6 +1701,7 @@ def convert_class(cls): body['_boundary_'] = boundary or etype._boundary_ body['_flag_mask_'] = None body['_all_bits_'] = None + body['_singles_mask_'] = None body['_inverted_'] = None body['__or__'] = Flag.__or__ body['__xor__'] = Flag.__xor__ @@ -1747,7 +1777,8 @@ def convert_class(cls): else: multi_bits |= value gnv_last_values.append(value) - enum_class._flag_mask_ = single_bits + enum_class._flag_mask_ = single_bits | multi_bits + enum_class._singles_mask_ = single_bits enum_class._all_bits_ = 2 ** ((single_bits|multi_bits).bit_length()) - 1 # set correct __iter__ member_list = [m._value_ for m in enum_class] diff --git a/Lib/http/client.py b/Lib/http/client.py index 15c5cf634cf508..0f5cd35247ae82 100644 --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -448,6 +448,7 @@ def isclosed(self): return self.fp is None def read(self, amt=None): + """Read and return the response body, or up to the next amt bytes.""" if self.fp is None: return b"" @@ -869,9 +870,9 @@ def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, def set_tunnel(self, host, port=None, headers=None): """Set up host and port for HTTP CONNECT tunnelling. - In a connection that uses HTTP CONNECT tunneling, the host passed to the - constructor is used as a proxy server that relays all communication to - the endpoint passed to `set_tunnel`. This done by sending an HTTP + In a connection that uses HTTP CONNECT tunnelling, the host passed to + the constructor is used as a proxy server that relays all communication + to the endpoint passed to `set_tunnel`. This done by sending an HTTP CONNECT request to the proxy server when the connection is established. This method must be called before the HTTP connection has been @@ -879,6 +880,13 @@ def set_tunnel(self, host, port=None, headers=None): The headers argument should be a mapping of extra HTTP headers to send with the CONNECT request. + + As HTTP/1.1 is used for HTTP CONNECT tunnelling request, as per the RFC + (https://tools.ietf.org/html/rfc7231#section-4.3.6), a HTTP Host: + header must be provided, matching the authority-form of the request + target provided as the destination for the CONNECT request. If a + HTTP Host: header is not provided via the headers argument, one + is generated and transmitted automatically. """ if self.sock: @@ -886,10 +894,15 @@ def set_tunnel(self, host, port=None, headers=None): self._tunnel_host, self._tunnel_port = self._get_hostport(host, port) if headers: - self._tunnel_headers = headers + self._tunnel_headers = headers.copy() else: self._tunnel_headers.clear() + if not any(header.lower() == "host" for header in self._tunnel_headers): + encoded_host = self._tunnel_host.encode("idna").decode("ascii") + self._tunnel_headers["Host"] = "%s:%d" % ( + encoded_host, self._tunnel_port) + def _get_hostport(self, host, port): if port is None: i = host.rfind(':') @@ -914,8 +927,9 @@ def set_debuglevel(self, level): self.debuglevel = level def _tunnel(self): - connect = b"CONNECT %s:%d HTTP/1.0\r\n" % ( - self._tunnel_host.encode("ascii"), self._tunnel_port) + connect = b"CONNECT %s:%d %s\r\n" % ( + self._tunnel_host.encode("idna"), self._tunnel_port, + self._http_vsn_str.encode("ascii")) headers = [connect] for header, value in self._tunnel_headers.items(): headers.append(f"{header}: {value}\r\n".encode("latin-1")) diff --git a/Lib/http/cookiejar.py b/Lib/http/cookiejar.py index 93b10d26c84545..bd89370e16831e 100644 --- a/Lib/http/cookiejar.py +++ b/Lib/http/cookiejar.py @@ -104,9 +104,9 @@ def time2isoz(t=None): """ if t is None: - dt = datetime.datetime.utcnow() + dt = datetime.datetime.now(tz=datetime.UTC) else: - dt = datetime.datetime.utcfromtimestamp(t) + dt = datetime.datetime.fromtimestamp(t, tz=datetime.UTC) return "%04d-%02d-%02d %02d:%02d:%02dZ" % ( dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) @@ -122,9 +122,9 @@ def time2netscape(t=None): """ if t is None: - dt = datetime.datetime.utcnow() + dt = datetime.datetime.now(tz=datetime.UTC) else: - dt = datetime.datetime.utcfromtimestamp(t) + dt = datetime.datetime.fromtimestamp(t, tz=datetime.UTC) return "%s, %02d-%s-%04d %02d:%02d:%02d GMT" % ( DAYS[dt.weekday()], dt.day, MONTHS[dt.month-1], dt.year, dt.hour, dt.minute, dt.second) diff --git a/Lib/idlelib/calltip_w.py b/Lib/idlelib/calltip_w.py index 1e0404aa49f562..278546064adde2 100644 --- a/Lib/idlelib/calltip_w.py +++ b/Lib/idlelib/calltip_w.py @@ -25,7 +25,7 @@ def __init__(self, text_widget): text_widget: a Text widget with code for which call-tips are desired """ # Note: The Text widget will be accessible as self.anchor_widget - super(CalltipWindow, self).__init__(text_widget) + super().__init__(text_widget) self.label = self.text = None self.parenline = self.parencol = self.lastline = None @@ -54,7 +54,7 @@ def position_window(self): return self.lastline = curline self.anchor_widget.see("insert") - super(CalltipWindow, self).position_window() + super().position_window() def showtip(self, text, parenleft, parenright): """Show the call-tip, bind events which will close it and reposition it. @@ -73,7 +73,7 @@ def showtip(self, text, parenleft, parenright): self.parenline, self.parencol = map( int, self.anchor_widget.index(parenleft).split(".")) - super(CalltipWindow, self).showtip() + super().showtip() self._bind_events() @@ -143,7 +143,7 @@ def hidetip(self): # ValueError may be raised by MultiCall pass - super(CalltipWindow, self).hidetip() + super().hidetip() def _bind_events(self): """Bind event handlers.""" diff --git a/Lib/idlelib/colorizer.py b/Lib/idlelib/colorizer.py index e9f19c145c8673..b4df353012b788 100644 --- a/Lib/idlelib/colorizer.py +++ b/Lib/idlelib/colorizer.py @@ -310,7 +310,7 @@ def recolorize_main(self): # crumb telling the next invocation to resume here # in case update tells us to leave. self.tag_add("TODO", next) - self.update() + self.update_idletasks() if self.stop_colorizing: if DEBUG: print("colorizing stopped") return diff --git a/Lib/idlelib/debugger.py b/Lib/idlelib/debugger.py index ccd03e46e16147..452c62b42655b3 100644 --- a/Lib/idlelib/debugger.py +++ b/Lib/idlelib/debugger.py @@ -49,9 +49,9 @@ def __frame2message(self, frame): filename = code.co_filename lineno = frame.f_lineno basename = os.path.basename(filename) - message = "%s:%s" % (basename, lineno) + message = f"{basename}:{lineno}" if code.co_name != "?": - message = "%s: %s()" % (message, code.co_name) + message = f"{message}: {code.co_name}()" return message @@ -213,7 +213,8 @@ def interaction(self, message, frame, info=None): m1 = "%s" % str(type) if value is not None: try: - m1 = "%s: %s" % (m1, str(value)) + # TODO redo entire section, tries not needed. + m1 = f"{m1}: {value}" except: pass bg = "yellow" diff --git a/Lib/idlelib/debugobj.py b/Lib/idlelib/debugobj.py index 5a4c9978842035..71d01c7070df54 100644 --- a/Lib/idlelib/debugobj.py +++ b/Lib/idlelib/debugobj.py @@ -87,7 +87,7 @@ def GetSubList(self): continue def setfunction(value, key=key, object=self.object): object[key] = value - item = make_objecttreeitem("%r:" % (key,), value, setfunction) + item = make_objecttreeitem(f"{key!r}:", value, setfunction) sublist.append(item) return sublist diff --git a/Lib/idlelib/editor.py b/Lib/idlelib/editor.py index 08d6aa2efde22a..505815502600b1 100644 --- a/Lib/idlelib/editor.py +++ b/Lib/idlelib/editor.py @@ -38,12 +38,13 @@ def _sphinx_version(): "Format sys.version_info to produce the Sphinx version string used to install the chm docs" major, minor, micro, level, serial = sys.version_info - release = '%s%s' % (major, minor) - release += '%s' % (micro,) + # TODO remove unneeded function since .chm no longer installed + release = f'{major}{minor}' + release += f'{micro}' if level == 'candidate': - release += 'rc%s' % (serial,) + release += f'rc{serial}' elif level != 'final': - release += '%s%s' % (level[0], serial) + release += f'{level[0]}{serial}' return release @@ -950,7 +951,7 @@ def update_recent_files_list(self, new_file=None): rf_list = [] file_path = self.recent_files_path if file_path and os.path.exists(file_path): - with open(file_path, 'r', + with open(file_path, encoding='utf_8', errors='replace') as rf_list_file: rf_list = rf_list_file.readlines() if new_file: @@ -1458,7 +1459,7 @@ def newline_and_indent_event(self, event): else: self.reindent_to(y.compute_backslash_indent()) else: - assert 0, "bogus continuation type %r" % (c,) + assert 0, f"bogus continuation type {c!r}" return "break" # This line starts a brand new statement; indent relative to diff --git a/Lib/idlelib/filelist.py b/Lib/idlelib/filelist.py index 254f5caf6b81b0..f87781d2570fe0 100644 --- a/Lib/idlelib/filelist.py +++ b/Lib/idlelib/filelist.py @@ -22,7 +22,7 @@ def open(self, filename, action=None): # This can happen when bad filename is passed on command line: messagebox.showerror( "File Error", - "%r is a directory." % (filename,), + f"{filename!r} is a directory.", master=self.root) return None key = os.path.normcase(filename) @@ -90,7 +90,7 @@ def filename_changed_edit(self, edit): self.inversedict[conflict] = None messagebox.showerror( "Name Conflict", - "You now have multiple edit windows open for %r" % (filename,), + f"You now have multiple edit windows open for {filename!r}", master=self.root) self.dict[newkey] = edit self.inversedict[edit] = newkey diff --git a/Lib/idlelib/idle_test/test_config.py b/Lib/idlelib/idle_test/test_config.py index 697fda527968de..08ed76fe288294 100644 --- a/Lib/idlelib/idle_test/test_config.py +++ b/Lib/idlelib/idle_test/test_config.py @@ -191,7 +191,7 @@ def setUpClass(cls): idle_dir = os.path.abspath(sys.path[0]) for ctype in conf.config_types: config_path = os.path.join(idle_dir, '../config-%s.def' % ctype) - with open(config_path, 'r') as f: + with open(config_path) as f: cls.config_string[ctype] = f.read() cls.orig_warn = config._warn diff --git a/Lib/idlelib/idle_test/test_outwin.py b/Lib/idlelib/idle_test/test_outwin.py index e347bfca7f191a..d6e85ad674417c 100644 --- a/Lib/idlelib/idle_test/test_outwin.py +++ b/Lib/idlelib/idle_test/test_outwin.py @@ -159,7 +159,7 @@ def test_file_line_helper(self, mock_open): for line, expected_output in test_lines: self.assertEqual(flh(line), expected_output) if expected_output: - mock_open.assert_called_with(expected_output[0], 'r') + mock_open.assert_called_with(expected_output[0]) if __name__ == '__main__': diff --git a/Lib/idlelib/idle_test/test_sidebar.py b/Lib/idlelib/idle_test/test_sidebar.py index 049531e66a414e..5506fd2b0e22a5 100644 --- a/Lib/idlelib/idle_test/test_sidebar.py +++ b/Lib/idlelib/idle_test/test_sidebar.py @@ -328,7 +328,7 @@ def test_scroll(self): self.assertEqual(self.linenumber.sidebar_text.index('@0,0'), '11.0') # Generate a mouse-wheel event and make sure it scrolled up or down. - # The meaning of the "delta" is OS-dependant, so this just checks for + # The meaning of the "delta" is OS-dependent, so this just checks for # any change. self.linenumber.sidebar_text.event_generate('<MouseWheel>', x=0, y=0, @@ -691,7 +691,7 @@ def test_mousewheel(self): self.assertIsNotNone(text.dlineinfo(text.index(f'{last_lineno}.0'))) # Scroll up using the <MouseWheel> event. - # The meaning delta is platform-dependant. + # The meaning of delta is platform-dependent. delta = -1 if sys.platform == 'darwin' else 120 sidebar.canvas.event_generate('<MouseWheel>', x=0, y=0, delta=delta) yield diff --git a/Lib/idlelib/multicall.py b/Lib/idlelib/multicall.py index dc02001292fc14..0200f445cc9340 100644 --- a/Lib/idlelib/multicall.py +++ b/Lib/idlelib/multicall.py @@ -52,9 +52,9 @@ _modifier_masks = (MC_CONTROL, MC_ALT, MC_SHIFT, MC_META) # a dictionary to map a modifier name into its number -_modifier_names = dict([(name, number) +_modifier_names = {name: number for number in range(len(_modifiers)) - for name in _modifiers[number]]) + for name in _modifiers[number]} # In 3.4, if no shell window is ever open, the underlying Tk widget is # destroyed before .__del__ methods here are called. The following @@ -134,7 +134,7 @@ def nbits(n): return nb statelist = [] for state in states: - substates = list(set(state & x for x in states)) + substates = list({state & x for x in states}) substates.sort(key=nbits, reverse=True) statelist.append(substates) return statelist @@ -258,9 +258,9 @@ def __del__(self): _binder_classes = (_ComplexBinder,) * 4 + (_SimpleBinder,) * (len(_types)-4) # A dictionary to map a type name into its number -_type_names = dict([(name, number) +_type_names = {name: number for number in range(len(_types)) - for name in _types[number]]) + for name in _types[number]} _keysym_re = re.compile(r"^\w+$") _button_re = re.compile(r"^[1-5]$") diff --git a/Lib/idlelib/outwin.py b/Lib/idlelib/outwin.py index 5ab08bbaf4bc95..610031e26f1dff 100644 --- a/Lib/idlelib/outwin.py +++ b/Lib/idlelib/outwin.py @@ -42,7 +42,7 @@ def file_line_helper(line): if match: filename, lineno = match.group(1, 2) try: - f = open(filename, "r") + f = open(filename) f.close() break except OSError: @@ -112,7 +112,7 @@ def write(self, s, tags=(), mark="insert"): assert isinstance(s, str) self.text.insert(mark, s, tags) self.text.see(mark) - self.text.update() + self.text.update_idletasks() return len(s) def writelines(self, lines): diff --git a/Lib/idlelib/pyshell.py b/Lib/idlelib/pyshell.py index e68233a5a4131e..bdde156166171b 100755 --- a/Lib/idlelib/pyshell.py +++ b/Lib/idlelib/pyshell.py @@ -249,7 +249,7 @@ def store_file_breaks(self): breaks = self.breakpoints filename = self.io.filename try: - with open(self.breakpointPath, "r") as fp: + with open(self.breakpointPath) as fp: lines = fp.readlines() except OSError: lines = [] @@ -279,7 +279,7 @@ def restore_file_breaks(self): if filename is None: return if os.path.isfile(self.breakpointPath): - with open(self.breakpointPath, "r") as fp: + with open(self.breakpointPath) as fp: lines = fp.readlines() for line in lines: if line.startswith(filename + '='): @@ -441,7 +441,7 @@ def build_subprocess_arglist(self): # run from the IDLE source directory. del_exitf = idleConf.GetOption('main', 'General', 'delete-exitfunc', default=False, type='bool') - command = "__import__('idlelib.run').run.main(%r)" % (del_exitf,) + command = f"__import__('idlelib.run').run.main({del_exitf!r})" return [sys.executable] + w + ["-c", command, str(self.port)] def start_subprocess(self): @@ -574,9 +574,9 @@ def transfer_path(self, with_cwd=False): self.runcommand("""if 1: import sys as _sys - _sys.path = %r + _sys.path = {!r} del _sys - \n""" % (path,)) + \n""".format(path)) active_seq = None @@ -703,14 +703,14 @@ def stuffsource(self, source): def prepend_syspath(self, filename): "Prepend sys.path with file's directory if not already included" self.runcommand("""if 1: - _filename = %r + _filename = {!r} import sys as _sys from os.path import dirname as _dirname _dir = _dirname(_filename) if not _dir in _sys.path: _sys.path.insert(0, _dir) del _filename, _sys, _dirname, _dir - \n""" % (filename,)) + \n""".format(filename)) def showsyntaxerror(self, filename=None): """Override Interactive Interpreter method: Use Colorizing @@ -1536,7 +1536,7 @@ def main(): try: opts, args = getopt.getopt(sys.argv[1:], "c:deihnr:st:") except getopt.error as msg: - print("Error: %s\n%s" % (msg, usage_msg), file=sys.stderr) + print(f"Error: {msg}\n{usage_msg}", file=sys.stderr) sys.exit(2) for o, a in opts: if o == '-c': @@ -1668,9 +1668,9 @@ def main(): if cmd or script: shell.interp.runcommand("""if 1: import sys as _sys - _sys.argv = %r + _sys.argv = {!r} del _sys - \n""" % (sys.argv,)) + \n""".format(sys.argv)) if cmd: shell.interp.execsource(cmd) elif script: diff --git a/Lib/idlelib/redirector.py b/Lib/idlelib/redirector.py index 9ab34c5acfb22c..4928340e98df68 100644 --- a/Lib/idlelib/redirector.py +++ b/Lib/idlelib/redirector.py @@ -47,9 +47,8 @@ def __init__(self, widget): tk.createcommand(w, self.dispatch) def __repr__(self): - return "%s(%s<%s>)" % (self.__class__.__name__, - self.widget.__class__.__name__, - self.widget._w) + w = self.widget + return f"{self.__class__.__name__,}({w.__class__.__name__}<{w._w}>)" def close(self): "Unregister operations and revert redirection created by .__init__." @@ -143,8 +142,7 @@ def __init__(self, redir, operation): self.orig_and_operation = (redir.orig, operation) def __repr__(self): - return "%s(%r, %r)" % (self.__class__.__name__, - self.redir, self.operation) + return f"{self.__class__.__name__,}({self.redir!r}, {self.operation!r})" def __call__(self, *args): return self.tk_call(self.orig_and_operation + args) diff --git a/Lib/idlelib/rpc.py b/Lib/idlelib/rpc.py index 62eec84c9c8d09..b08b80c9004551 100644 --- a/Lib/idlelib/rpc.py +++ b/Lib/idlelib/rpc.py @@ -174,7 +174,7 @@ def localcall(self, seq, request): except TypeError: return ("ERROR", "Bad request format") if oid not in self.objtable: - return ("ERROR", "Unknown object id: %r" % (oid,)) + return ("ERROR", f"Unknown object id: {oid!r}") obj = self.objtable[oid] if methodname == "__methods__": methods = {} @@ -185,7 +185,7 @@ def localcall(self, seq, request): _getattributes(obj, attributes) return ("OK", attributes) if not hasattr(obj, methodname): - return ("ERROR", "Unsupported method name: %r" % (methodname,)) + return ("ERROR", f"Unsupported method name: {methodname!r}") method = getattr(obj, methodname) try: if how == 'CALL': diff --git a/Lib/idlelib/run.py b/Lib/idlelib/run.py index 577c49eb67b20d..84792a82b0022c 100644 --- a/Lib/idlelib/run.py +++ b/Lib/idlelib/run.py @@ -52,13 +52,13 @@ def idle_formatwarning(message, category, filename, lineno, line=None): """Format warnings the IDLE way.""" s = "\nWarning (from warnings module):\n" - s += ' File \"%s\", line %s\n' % (filename, lineno) + s += f' File \"{filename}\", line {lineno}\n' if line is None: line = linecache.getline(filename, lineno) line = line.strip() if line: s += " %s\n" % line - s += "%s: %s\n" % (category.__name__, message) + s += f"{category.__name__}: {message}\n" return s def idle_showwarning_subproc( @@ -239,6 +239,7 @@ def print_exception(): efile = sys.stderr typ, val, tb = excinfo = sys.exc_info() sys.last_type, sys.last_value, sys.last_traceback = excinfo + sys.last_exc = val seen = set() def print_exc(typ, exc, tb): diff --git a/Lib/idlelib/textview.py b/Lib/idlelib/textview.py index a66c1a4309a617..23f0f4cb5027ec 100644 --- a/Lib/idlelib/textview.py +++ b/Lib/idlelib/textview.py @@ -169,7 +169,7 @@ def view_file(parent, title, filename, encoding, modal=True, wrap='word', with contents of the file. """ try: - with open(filename, 'r', encoding=encoding) as file: + with open(filename, encoding=encoding) as file: contents = file.read() except OSError: showerror(title='File Load Error', diff --git a/Lib/idlelib/tooltip.py b/Lib/idlelib/tooltip.py index d714318dae8ef1..3983690dd41177 100644 --- a/Lib/idlelib/tooltip.py +++ b/Lib/idlelib/tooltip.py @@ -92,7 +92,7 @@ def __init__(self, anchor_widget, hover_delay=1000): e.g. after hovering over the anchor widget with the mouse for enough time. """ - super(OnHoverTooltipBase, self).__init__(anchor_widget) + super().__init__(anchor_widget) self.hover_delay = hover_delay self._after_id = None @@ -107,7 +107,7 @@ def __del__(self): self.anchor_widget.unbind("<Button>", self._id3) # pragma: no cover except TclError: pass - super(OnHoverTooltipBase, self).__del__() + super().__del__() def _show_event(self, event=None): """event handler to display the tooltip""" @@ -139,7 +139,7 @@ def hidetip(self): self.unschedule() except TclError: # pragma: no cover pass - super(OnHoverTooltipBase, self).hidetip() + super().hidetip() class Hovertip(OnHoverTooltipBase): @@ -154,7 +154,7 @@ def __init__(self, anchor_widget, text, hover_delay=1000): e.g. after hovering over the anchor widget with the mouse for enough time. """ - super(Hovertip, self).__init__(anchor_widget, hover_delay=hover_delay) + super().__init__(anchor_widget, hover_delay=hover_delay) self.text = text def showcontents(self): diff --git a/Lib/idlelib/tree.py b/Lib/idlelib/tree.py index 5947268f5c35ae..5f30f0f6092bfa 100644 --- a/Lib/idlelib/tree.py +++ b/Lib/idlelib/tree.py @@ -32,7 +32,7 @@ if os.path.isdir(_icondir): ICONDIR = _icondir elif not os.path.isdir(ICONDIR): - raise RuntimeError("can't find icon directory (%r)" % (ICONDIR,)) + raise RuntimeError(f"can't find icon directory ({ICONDIR!r})") def listicons(icondir=ICONDIR): """Utility to display the available icons.""" diff --git a/Lib/idlelib/undo.py b/Lib/idlelib/undo.py index 85ecffecb4cbcb..5f10c0f05c1acb 100644 --- a/Lib/idlelib/undo.py +++ b/Lib/idlelib/undo.py @@ -309,7 +309,7 @@ def __repr__(self): s = self.__class__.__name__ strs = [] for cmd in self.cmds: - strs.append(" %r" % (cmd,)) + strs.append(f" {cmd!r}") return s + "(\n" + ",\n".join(strs) + "\n)" def __len__(self): diff --git a/Lib/imp.py b/Lib/imp.py deleted file mode 100644 index fe850f6a001814..00000000000000 --- a/Lib/imp.py +++ /dev/null @@ -1,346 +0,0 @@ -"""This module provides the components needed to build your own __import__ -function. Undocumented functions are obsolete. - -In most cases it is preferred you consider using the importlib module's -functionality over this module. - -""" -# (Probably) need to stay in _imp -from _imp import (lock_held, acquire_lock, release_lock, - get_frozen_object, is_frozen_package, - init_frozen, is_builtin, is_frozen, - _fix_co_filename, _frozen_module_names) -try: - from _imp import create_dynamic -except ImportError: - # Platform doesn't support dynamic loading. - create_dynamic = None - -from importlib._bootstrap import _ERR_MSG, _exec, _load, _builtin_from_name -from importlib._bootstrap_external import SourcelessFileLoader - -from importlib import machinery -from importlib import util -import importlib -import os -import sys -import tokenize -import types -import warnings - -warnings.warn("the imp module is deprecated in favour of importlib and slated " - "for removal in Python 3.12; " - "see the module's documentation for alternative uses", - DeprecationWarning, stacklevel=2) - -# DEPRECATED -SEARCH_ERROR = 0 -PY_SOURCE = 1 -PY_COMPILED = 2 -C_EXTENSION = 3 -PY_RESOURCE = 4 -PKG_DIRECTORY = 5 -C_BUILTIN = 6 -PY_FROZEN = 7 -PY_CODERESOURCE = 8 -IMP_HOOK = 9 - - -def new_module(name): - """**DEPRECATED** - - Create a new module. - - The module is not entered into sys.modules. - - """ - return types.ModuleType(name) - - -def get_magic(): - """**DEPRECATED** - - Return the magic number for .pyc files. - """ - return util.MAGIC_NUMBER - - -def get_tag(): - """Return the magic tag for .pyc files.""" - return sys.implementation.cache_tag - - -def cache_from_source(path, debug_override=None): - """**DEPRECATED** - - Given the path to a .py file, return the path to its .pyc file. - - The .py file does not need to exist; this simply returns the path to the - .pyc file calculated as if the .py file were imported. - - If debug_override is not None, then it must be a boolean and is used in - place of sys.flags.optimize. - - If sys.implementation.cache_tag is None then NotImplementedError is raised. - - """ - with warnings.catch_warnings(): - warnings.simplefilter('ignore') - return util.cache_from_source(path, debug_override) - - -def source_from_cache(path): - """**DEPRECATED** - - Given the path to a .pyc. file, return the path to its .py file. - - The .pyc file does not need to exist; this simply returns the path to - the .py file calculated to correspond to the .pyc file. If path does - not conform to PEP 3147 format, ValueError will be raised. If - sys.implementation.cache_tag is None then NotImplementedError is raised. - - """ - return util.source_from_cache(path) - - -def get_suffixes(): - """**DEPRECATED**""" - extensions = [(s, 'rb', C_EXTENSION) for s in machinery.EXTENSION_SUFFIXES] - source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES] - bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES] - - return extensions + source + bytecode - - -class NullImporter: - - """**DEPRECATED** - - Null import object. - - """ - - def __init__(self, path): - if path == '': - raise ImportError('empty pathname', path='') - elif os.path.isdir(path): - raise ImportError('existing directory', path=path) - - def find_module(self, fullname): - """Always returns None.""" - return None - - -class _HackedGetData: - - """Compatibility support for 'file' arguments of various load_*() - functions.""" - - def __init__(self, fullname, path, file=None): - super().__init__(fullname, path) - self.file = file - - def get_data(self, path): - """Gross hack to contort loader to deal w/ load_*()'s bad API.""" - if self.file and path == self.path: - # The contract of get_data() requires us to return bytes. Reopen the - # file in binary mode if needed. - if not self.file.closed: - file = self.file - if 'b' not in file.mode: - file.close() - if self.file.closed: - self.file = file = open(self.path, 'rb') - - with file: - return file.read() - else: - return super().get_data(path) - - -class _LoadSourceCompatibility(_HackedGetData, machinery.SourceFileLoader): - - """Compatibility support for implementing load_source().""" - - -def load_source(name, pathname, file=None): - loader = _LoadSourceCompatibility(name, pathname, file) - spec = util.spec_from_file_location(name, pathname, loader=loader) - if name in sys.modules: - module = _exec(spec, sys.modules[name]) - else: - module = _load(spec) - # To allow reloading to potentially work, use a non-hacked loader which - # won't rely on a now-closed file object. - module.__loader__ = machinery.SourceFileLoader(name, pathname) - module.__spec__.loader = module.__loader__ - return module - - -class _LoadCompiledCompatibility(_HackedGetData, SourcelessFileLoader): - - """Compatibility support for implementing load_compiled().""" - - -def load_compiled(name, pathname, file=None): - """**DEPRECATED**""" - loader = _LoadCompiledCompatibility(name, pathname, file) - spec = util.spec_from_file_location(name, pathname, loader=loader) - if name in sys.modules: - module = _exec(spec, sys.modules[name]) - else: - module = _load(spec) - # To allow reloading to potentially work, use a non-hacked loader which - # won't rely on a now-closed file object. - module.__loader__ = SourcelessFileLoader(name, pathname) - module.__spec__.loader = module.__loader__ - return module - - -def load_package(name, path): - """**DEPRECATED**""" - if os.path.isdir(path): - extensions = (machinery.SOURCE_SUFFIXES[:] + - machinery.BYTECODE_SUFFIXES[:]) - for extension in extensions: - init_path = os.path.join(path, '__init__' + extension) - if os.path.exists(init_path): - path = init_path - break - else: - raise ValueError('{!r} is not a package'.format(path)) - spec = util.spec_from_file_location(name, path, - submodule_search_locations=[]) - if name in sys.modules: - return _exec(spec, sys.modules[name]) - else: - return _load(spec) - - -def load_module(name, file, filename, details): - """**DEPRECATED** - - Load a module, given information returned by find_module(). - - The module name must include the full package name, if any. - - """ - suffix, mode, type_ = details - if mode and (not mode.startswith('r') or '+' in mode): - raise ValueError('invalid file open mode {!r}'.format(mode)) - elif file is None and type_ in {PY_SOURCE, PY_COMPILED}: - msg = 'file object required for import (type code {})'.format(type_) - raise ValueError(msg) - elif type_ == PY_SOURCE: - return load_source(name, filename, file) - elif type_ == PY_COMPILED: - return load_compiled(name, filename, file) - elif type_ == C_EXTENSION and load_dynamic is not None: - if file is None: - with open(filename, 'rb') as opened_file: - return load_dynamic(name, filename, opened_file) - else: - return load_dynamic(name, filename, file) - elif type_ == PKG_DIRECTORY: - return load_package(name, filename) - elif type_ == C_BUILTIN: - return init_builtin(name) - elif type_ == PY_FROZEN: - return init_frozen(name) - else: - msg = "Don't know how to import {} (type code {})".format(name, type_) - raise ImportError(msg, name=name) - - -def find_module(name, path=None): - """**DEPRECATED** - - Search for a module. - - If path is omitted or None, search for a built-in, frozen or special - module and continue search in sys.path. The module name cannot - contain '.'; to search for a submodule of a package, pass the - submodule name and the package's __path__. - - """ - if not isinstance(name, str): - raise TypeError("'name' must be a str, not {}".format(type(name))) - elif not isinstance(path, (type(None), list)): - # Backwards-compatibility - raise RuntimeError("'path' must be None or a list, " - "not {}".format(type(path))) - - if path is None: - if is_builtin(name): - return None, None, ('', '', C_BUILTIN) - elif is_frozen(name): - return None, None, ('', '', PY_FROZEN) - else: - path = sys.path - - for entry in path: - package_directory = os.path.join(entry, name) - for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]: - package_file_name = '__init__' + suffix - file_path = os.path.join(package_directory, package_file_name) - if os.path.isfile(file_path): - return None, package_directory, ('', '', PKG_DIRECTORY) - for suffix, mode, type_ in get_suffixes(): - file_name = name + suffix - file_path = os.path.join(entry, file_name) - if os.path.isfile(file_path): - break - else: - continue - break # Break out of outer loop when breaking out of inner loop. - else: - raise ImportError(_ERR_MSG.format(name), name=name) - - encoding = None - if 'b' not in mode: - with open(file_path, 'rb') as file: - encoding = tokenize.detect_encoding(file.readline)[0] - file = open(file_path, mode, encoding=encoding) - return file, file_path, (suffix, mode, type_) - - -def reload(module): - """**DEPRECATED** - - Reload the module and return it. - - The module must have been successfully imported before. - - """ - return importlib.reload(module) - - -def init_builtin(name): - """**DEPRECATED** - - Load and return a built-in module by name, or None is such module doesn't - exist - """ - try: - return _builtin_from_name(name) - except ImportError: - return None - - -if create_dynamic: - def load_dynamic(name, path, file=None): - """**DEPRECATED** - - Load an extension module. - """ - import importlib.machinery - loader = importlib.machinery.ExtensionFileLoader(name, path) - - # Issue #24748: Skip the sys.modules check in _load_module_shim; - # always load new extension - spec = importlib.util.spec_from_file_location( - name, path, loader=loader) - return _load(spec) - -else: - load_dynamic = None diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py index bebe7e15cbce67..e4fcaa61e6de29 100644 --- a/Lib/importlib/_bootstrap.py +++ b/Lib/importlib/_bootstrap.py @@ -887,8 +887,6 @@ class BuiltinImporter: @classmethod def find_spec(cls, fullname, path=None, target=None): - if path is not None: - return None if _imp.is_builtin(fullname): return spec_from_loader(fullname, cls, origin=cls._ORIGIN) else: @@ -1262,7 +1260,7 @@ def _find_and_load_unlocked(name, import_): try: path = parent_module.__path__ except AttributeError: - msg = f'{_ERR_MSG_PREFIX} {name!r}; {parent!r} is not a package' + msg = f'{_ERR_MSG_PREFIX}{name!r}; {parent!r} is not a package' raise ModuleNotFoundError(msg, name=name) from None parent_spec = parent_module.__spec__ child = name.rpartition('.')[2] diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index d6b0765aa18efa..58d093184ce307 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -435,7 +435,14 @@ def _write_atomic(path, data, mode=0o666): # Python 3.12a6 3519 (Modify SEND instruction) # Python 3.12a6 3520 (Remove PREP_RERAISE_STAR, add CALL_INTRINSIC_2) # Python 3.12a7 3521 (Shrink the LOAD_GLOBAL caches) -# Python 3.12a7 3522 (Inline list/dict/set comprehensions) +# Python 3.12a7 3522 (Removed JUMP_IF_FALSE_OR_POP/JUMP_IF_TRUE_OR_POP) +# Python 3.12a7 3523 (Convert COMPARE_AND_BRANCH back to COMPARE_OP) +# Python 3.12a7 3524 (Shrink the BINARY_SUBSCR caches) +# Python 3.12b1 3525 (Shrink the CALL caches) +# Python 3.12b1 3526 (Add instrumentation support) +# Python 3.12b1 3527 (Add LOAD_SUPER_ATTR) +# Python 3.12b1 3528 (Add LOAD_SUPER_ATTR_METHOD specialization) +# Python 3.12b1 3529 (Inline list/dict/set comprehensions) # Python 3.13 will start with 3550 @@ -452,7 +459,7 @@ def _write_atomic(path, data, mode=0o666): # Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array # in PC/launcher.c must also be updated. -MAGIC_NUMBER = (3522).to_bytes(2, 'little') + b'\r\n' +MAGIC_NUMBER = (3529).to_bytes(2, 'little') + b'\r\n' _RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c diff --git a/Lib/importlib/metadata/__init__.py b/Lib/importlib/metadata/__init__.py index 40ab1a1aaac328..82e0ce1b281c54 100644 --- a/Lib/importlib/metadata/__init__.py +++ b/Lib/importlib/metadata/__init__.py @@ -12,7 +12,9 @@ import functools import itertools import posixpath +import contextlib import collections +import inspect from . import _adapters, _meta from ._collections import FreezableDefaultDict, Pair @@ -24,7 +26,7 @@ from importlib import import_module from importlib.abc import MetaPathFinder from itertools import starmap -from typing import List, Mapping, Optional +from typing import List, Mapping, Optional, cast __all__ = [ @@ -341,11 +343,30 @@ def __repr__(self): return f'<FileHash mode: {self.mode} value: {self.value}>' -class Distribution: +class DeprecatedNonAbstract: + def __new__(cls, *args, **kwargs): + all_names = { + name for subclass in inspect.getmro(cls) for name in vars(subclass) + } + abstract = { + name + for name in all_names + if getattr(getattr(cls, name), '__isabstractmethod__', False) + } + if abstract: + warnings.warn( + f"Unimplemented abstract methods {abstract}", + DeprecationWarning, + stacklevel=2, + ) + return super().__new__(cls) + + +class Distribution(DeprecatedNonAbstract): """A Python distribution package.""" @abc.abstractmethod - def read_text(self, filename): + def read_text(self, filename) -> Optional[str]: """Attempt to load metadata file given by the name. :param filename: The name of the file in the distribution info. @@ -419,7 +440,7 @@ def metadata(self) -> _meta.PackageMetadata: The returned object will have keys that name the various bits of metadata. See PEP 566 for details. """ - text = ( + opt_text = ( self.read_text('METADATA') or self.read_text('PKG-INFO') # This last clause is here to support old egg-info files. Its @@ -427,6 +448,7 @@ def metadata(self) -> _meta.PackageMetadata: # (which points to the egg-info file) attribute unchanged. or self.read_text('') ) + text = cast(str, opt_text) return _adapters.Message(email.message_from_string(text)) @property @@ -455,8 +477,8 @@ def files(self): :return: List of PackagePath for this distribution or None Result is `None` if the metadata file that enumerates files - (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is - missing. + (i.e. RECORD for dist-info, or installed-files.txt or + SOURCES.txt for egg-info) is missing. Result may be empty if the metadata exists but is empty. """ @@ -469,9 +491,19 @@ def make_file(name, hash=None, size_str=None): @pass_none def make_files(lines): - return list(starmap(make_file, csv.reader(lines))) + return starmap(make_file, csv.reader(lines)) - return make_files(self._read_files_distinfo() or self._read_files_egginfo()) + @pass_none + def skip_missing_files(package_paths): + return list(filter(lambda path: path.locate().exists(), package_paths)) + + return skip_missing_files( + make_files( + self._read_files_distinfo() + or self._read_files_egginfo_installed() + or self._read_files_egginfo_sources() + ) + ) def _read_files_distinfo(self): """ @@ -480,10 +512,45 @@ def _read_files_distinfo(self): text = self.read_text('RECORD') return text and text.splitlines() - def _read_files_egginfo(self): + def _read_files_egginfo_installed(self): + """ + Read installed-files.txt and return lines in a similar + CSV-parsable format as RECORD: each file must be placed + relative to the site-packages directory and must also be + quoted (since file names can contain literal commas). + + This file is written when the package is installed by pip, + but it might not be written for other installation methods. + Assume the file is accurate if it exists. """ - SOURCES.txt might contain literal commas, so wrap each line - in quotes. + text = self.read_text('installed-files.txt') + # Prepend the .egg-info/ subdir to the lines in this file. + # But this subdir is only available from PathDistribution's + # self._path. + subdir = getattr(self, '_path', None) + if not text or not subdir: + return + + paths = ( + (subdir / name) + .resolve() + .relative_to(self.locate_file('').resolve()) + .as_posix() + for name in text.splitlines() + ) + return map('"{}"'.format, paths) + + def _read_files_egginfo_sources(self): + """ + Read SOURCES.txt and return lines in a similar CSV-parsable + format as RECORD: each file name must be quoted (since it + might contain literal commas). + + Note that SOURCES.txt is not a reliable source for what + files are installed by a package. This file is generated + for a source archive, and the files that are present + there (e.g. setup.py) may not correctly reflect the files + that are present after the package has been installed. """ text = self.read_text('SOURCES.txt') return text and map('"{}"'.format, text.splitlines()) @@ -886,8 +953,13 @@ def _top_level_declared(dist): def _top_level_inferred(dist): - return { - f.parts[0] if len(f.parts) > 1 else f.with_suffix('').name + opt_names = { + f.parts[0] if len(f.parts) > 1 else inspect.getmodulename(f) for f in always_iterable(dist.files) - if f.suffix == ".py" } + + @pass_none + def importable_name(name): + return '.' not in name + + return filter(importable_name, opt_names) diff --git a/Lib/importlib/metadata/_adapters.py b/Lib/importlib/metadata/_adapters.py index aa460d3eda50fb..6aed69a30857e4 100644 --- a/Lib/importlib/metadata/_adapters.py +++ b/Lib/importlib/metadata/_adapters.py @@ -1,3 +1,5 @@ +import functools +import warnings import re import textwrap import email.message @@ -5,6 +7,15 @@ from ._text import FoldedCase +# Do not remove prior to 2024-01-01 or Python 3.14 +_warn = functools.partial( + warnings.warn, + "Implicit None on return values is deprecated and will raise KeyErrors.", + DeprecationWarning, + stacklevel=2, +) + + class Message(email.message.Message): multiple_use_keys = set( map( @@ -39,6 +50,16 @@ def __init__(self, *args, **kwargs): def __iter__(self): return super().__iter__() + def __getitem__(self, item): + """ + Warn users that a ``KeyError`` can be expected when a + mising key is supplied. Ref python/importlib_metadata#371. + """ + res = super().__getitem__(item) + if res is None: + _warn() + return res + def _repair_headers(self): def redent(value): "Correct for RFC822 indentation" diff --git a/Lib/importlib/metadata/_meta.py b/Lib/importlib/metadata/_meta.py index d5c0576194ece2..c9a7ef906a8a8c 100644 --- a/Lib/importlib/metadata/_meta.py +++ b/Lib/importlib/metadata/_meta.py @@ -1,4 +1,5 @@ -from typing import Any, Dict, Iterator, List, Protocol, TypeVar, Union +from typing import Protocol +from typing import Any, Dict, Iterator, List, Optional, TypeVar, Union, overload _T = TypeVar("_T") @@ -17,7 +18,21 @@ def __getitem__(self, key: str) -> str: def __iter__(self) -> Iterator[str]: ... # pragma: no cover - def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]: + @overload + def get(self, name: str, failobj: None = None) -> Optional[str]: + ... # pragma: no cover + + @overload + def get(self, name: str, failobj: _T) -> Union[str, _T]: + ... # pragma: no cover + + # overload per python/importlib_metadata#435 + @overload + def get_all(self, name: str, failobj: None = None) -> Optional[List[Any]]: + ... # pragma: no cover + + @overload + def get_all(self, name: str, failobj: _T) -> Union[List[Any], _T]: """ Return all values associated with a possibly multi-valued key. """ @@ -29,18 +44,19 @@ def json(self) -> Dict[str, Union[str, List[str]]]: """ -class SimplePath(Protocol): +class SimplePath(Protocol[_T]): """ A minimal subset of pathlib.Path required by PathDistribution. """ - def joinpath(self) -> 'SimplePath': + def joinpath(self) -> _T: ... # pragma: no cover - def __truediv__(self) -> 'SimplePath': + def __truediv__(self, other: Union[str, _T]) -> _T: ... # pragma: no cover - def parent(self) -> 'SimplePath': + @property + def parent(self) -> _T: ... # pragma: no cover def read_text(self) -> str: diff --git a/Lib/inspect.py b/Lib/inspect.py index 0eceaaf9a24f5d..92c2675cfd7d32 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -1766,15 +1766,17 @@ def stack(context=1): def trace(context=1): """Return a list of records for the stack below the current exception.""" - return getinnerframes(sys.exc_info()[2], context) + exc = sys.exception() + tb = None if exc is None else exc.__traceback__ + return getinnerframes(tb, context) # ------------------------------------------------ static version of getattr _sentinel = object() +_static_getmro = type.__dict__['__mro__'].__get__ +_get_dunder_dict_of_class = type.__dict__["__dict__"].__get__ -def _static_getmro(klass): - return type.__dict__['__mro__'].__get__(klass) def _check_instance(obj, attr): instance_dict = {} @@ -1787,28 +1789,15 @@ def _check_instance(obj, attr): def _check_class(klass, attr): for entry in _static_getmro(klass): - if _shadowed_dict(type(entry)) is _sentinel: - try: - return entry.__dict__[attr] - except KeyError: - pass + if _shadowed_dict(type(entry)) is _sentinel and attr in entry.__dict__: + return entry.__dict__[attr] return _sentinel -def _is_type(obj): - try: - _static_getmro(obj) - except TypeError: - return False - return True - def _shadowed_dict(klass): - dict_attr = type.__dict__["__dict__"] for entry in _static_getmro(klass): - try: - class_dict = dict_attr.__get__(entry)["__dict__"] - except KeyError: - pass - else: + dunder_dict = _get_dunder_dict_of_class(entry) + if '__dict__' in dunder_dict: + class_dict = dunder_dict['__dict__'] if not (type(class_dict) is types.GetSetDescriptorType and class_dict.__name__ == "__dict__" and class_dict.__objclass__ is entry): @@ -1827,8 +1816,10 @@ def getattr_static(obj, attr, default=_sentinel): documentation for details. """ instance_result = _sentinel - if not _is_type(obj): - klass = type(obj) + + objtype = type(obj) + if type not in _static_getmro(objtype): + klass = objtype dict_attr = _shadowed_dict(klass) if (dict_attr is _sentinel or type(dict_attr) is types.MemberDescriptorType): @@ -1851,11 +1842,11 @@ def getattr_static(obj, attr, default=_sentinel): if obj is klass: # for types we check the metaclass too for entry in _static_getmro(type(klass)): - if _shadowed_dict(type(entry)) is _sentinel: - try: - return entry.__dict__[attr] - except KeyError: - pass + if ( + _shadowed_dict(type(entry)) is _sentinel + and attr in entry.__dict__ + ): + return entry.__dict__[attr] if default is not _sentinel: return default raise AttributeError(attr) @@ -3017,7 +3008,7 @@ def __init__(self, parameters=None, *, return_annotation=_empty, if __validate_parameters__: params = OrderedDict() top_kind = _POSITIONAL_ONLY - kind_defaults = False + seen_default = False for param in parameters: kind = param.kind @@ -3032,21 +3023,19 @@ def __init__(self, parameters=None, *, return_annotation=_empty, kind.description) raise ValueError(msg) elif kind > top_kind: - kind_defaults = False top_kind = kind if kind in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD): if param.default is _empty: - if kind_defaults: + if seen_default: # No default for this parameter, but the - # previous parameter of the same kind had - # a default + # previous parameter of had a default msg = 'non-default argument follows default ' \ 'argument' raise ValueError(msg) else: # There is a default for this parameter. - kind_defaults = True + seen_default = True if name in params: msg = 'duplicate parameter name: {!r}'.format(name) diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py index 1cb71d8032e173..af1d5c4800cce8 100644 --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -1821,9 +1821,6 @@ def _string_from_ip_int(cls, ip_int=None): def _explode_shorthand_ip_string(self): """Expand a shortened IPv6 address. - Args: - ip_str: A string, the IPv6 address. - Returns: A string, the expanded IPv6 address. diff --git a/Lib/locale.py b/Lib/locale.py index c2c7a04b280708..e94f0d1acbaa7d 100644 --- a/Lib/locale.py +++ b/Lib/locale.py @@ -545,7 +545,9 @@ def getdefaultlocale(envvars=('LC_ALL', 'LC_CTYPE', 'LANG', 'LANGUAGE')): "Use setlocale(), getencoding() and getlocale() instead", DeprecationWarning, stacklevel=2 ) + return _getdefaultlocale(envvars) +def _getdefaultlocale(envvars=('LC_ALL', 'LC_CTYPE', 'LANG', 'LANGUAGE')): try: # check if it's supported by the _locale module import _locale @@ -639,7 +641,7 @@ def getencoding(): # On Android langinfo.h and CODESET are missing, and UTF-8 is # always used in mbstowcs() and wcstombs(). return 'utf-8' - encoding = getdefaultlocale()[1] + encoding = _getdefaultlocale()[1] if encoding is None: # LANG not set, default to UTF-8 encoding = 'utf-8' @@ -960,7 +962,7 @@ def getpreferredencoding(do_setlocale=True): 'c.ascii': 'C', 'c.en': 'C', 'c.iso88591': 'en_US.ISO8859-1', - 'c.utf8': 'en_US.UTF-8', + 'c.utf8': 'C.UTF-8', 'c_c': 'C', 'c_c.c': 'C', 'ca': 'ca_ES.ISO8859-1', diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py index 9241d73d0fd03c..056380fb2287af 100644 --- a/Lib/logging/__init__.py +++ b/Lib/logging/__init__.py @@ -173,8 +173,8 @@ def currentframe(): """Return the frame object for the caller's stack frame.""" try: raise Exception - except Exception: - return sys.exc_info()[2].tb_frame.f_back + except Exception as exc: + return exc.__traceback__.tb_frame.f_back # # _srcfile is used when walking the stack to check when we've got the first diff --git a/Lib/logging/config.py b/Lib/logging/config.py index 7cd16c643e9dad..16c54a6a4f7a2f 100644 --- a/Lib/logging/config.py +++ b/Lib/logging/config.py @@ -114,11 +114,18 @@ def _create_formatters(cp): fs = cp.get(sectname, "format", raw=True, fallback=None) dfs = cp.get(sectname, "datefmt", raw=True, fallback=None) stl = cp.get(sectname, "style", raw=True, fallback='%') + defaults = cp.get(sectname, "defaults", raw=True, fallback=None) + c = logging.Formatter class_name = cp[sectname].get("class") if class_name: c = _resolve(class_name) - f = c(fs, dfs, stl) + + if defaults is not None: + defaults = eval(defaults, vars(logging)) + f = c(fs, dfs, stl, defaults=defaults) + else: + f = c(fs, dfs, stl) formatters[form] = f return formatters @@ -668,18 +675,27 @@ def configure_formatter(self, config): dfmt = config.get('datefmt', None) style = config.get('style', '%') cname = config.get('class', None) + defaults = config.get('defaults', None) if not cname: c = logging.Formatter else: c = _resolve(cname) + kwargs = {} + + # Add defaults only if it exists. + # Prevents TypeError in custom formatter callables that do not + # accept it. + if defaults is not None: + kwargs['defaults'] = defaults + # A TypeError would be raised if "validate" key is passed in with a formatter callable # that does not accept "validate" as a parameter if 'validate' in config: # if user hasn't mentioned it, the default will be fine - result = c(fmt, dfmt, style, config['validate']) + result = c(fmt, dfmt, style, config['validate'], **kwargs) else: - result = c(fmt, dfmt, style) + result = c(fmt, dfmt, style, **kwargs) return result diff --git a/Lib/ntpath.py b/Lib/ntpath.py index e93a5e69600973..0f3674fe11eecd 100644 --- a/Lib/ntpath.py +++ b/Lib/ntpath.py @@ -142,7 +142,7 @@ def join(path, *paths): result_path = result_path + p_path ## add separator between UNC and non-absolute path if (result_path and not result_root and - result_drive and result_drive[-1:] != colon): + result_drive and result_drive[-1:] not in colon + seps): return result_drive + sep + result_path return result_drive + result_root + result_path except (TypeError, AttributeError, BytesWarning): @@ -670,7 +670,7 @@ def _getfinalpathname_nonstrict(path): # Non-strict algorithm is to find as much of the target directory # as we can and join the rest. - tail = '' + tail = path[:0] while path: try: path = _getfinalpathname(path) diff --git a/Lib/opcode.py b/Lib/opcode.py index 0f93cf279c7d68..b4a107fce6d082 100644 --- a/Lib/opcode.py +++ b/Lib/opcode.py @@ -83,6 +83,7 @@ def pseudo_op(name, op, real_ops): def_op('INTERPRETER_EXIT', 3) def_op('END_FOR', 4) +def_op('END_SEND', 5) def_op('NOP', 9) @@ -91,6 +92,10 @@ def pseudo_op(name, op, real_ops): def_op('UNARY_INVERT', 15) +# We reserve 17 as it is the initial value for the specializing counter +# This helps us catch cases where we attempt to execute a cache. +def_op('RESERVED', 17) + def_op('BINARY_SUBSCR', 25) def_op('BINARY_SLICE', 26) def_op('STORE_SLICE', 27) @@ -154,8 +159,6 @@ def pseudo_op(name, op, real_ops): name_op('IMPORT_NAME', 108) # Index in name list name_op('IMPORT_FROM', 109) # Index in name list jrel_op('JUMP_FORWARD', 110) # Number of words to skip -jrel_op('JUMP_IF_FALSE_OR_POP', 111) # Number of words to skip -jrel_op('JUMP_IF_TRUE_OR_POP', 112) # "" jrel_op('POP_JUMP_IF_FALSE', 114) jrel_op('POP_JUMP_IF_TRUE', 115) name_op('LOAD_GLOBAL', 116) # Index in name list @@ -193,9 +196,7 @@ def pseudo_op(name, op, real_ops): def_op('DELETE_DEREF', 139) hasfree.append(139) jrel_op('JUMP_BACKWARD', 140) # Number of words to skip (backwards) -def_op('COMPARE_AND_BRANCH', 141) # Comparison and jump -hascompare.append(141) - +name_op('LOAD_SUPER_ATTR', 141) def_op('CALL_FUNCTION_EX', 142) # Flags def_op('LOAD_FAST_AND_CLEAR', 143) # Local variable number haslocal.append(143) @@ -227,6 +228,28 @@ def pseudo_op(name, op, real_ops): def_op('CALL_INTRINSIC_1', 173) def_op('CALL_INTRINSIC_2', 174) +# Instrumented instructions +MIN_INSTRUMENTED_OPCODE = 238 + +def_op('INSTRUMENTED_POP_JUMP_IF_NONE', 238) +def_op('INSTRUMENTED_POP_JUMP_IF_NOT_NONE', 239) +def_op('INSTRUMENTED_RESUME', 240) +def_op('INSTRUMENTED_CALL', 241) +def_op('INSTRUMENTED_RETURN_VALUE', 242) +def_op('INSTRUMENTED_YIELD_VALUE', 243) +def_op('INSTRUMENTED_CALL_FUNCTION_EX', 244) +def_op('INSTRUMENTED_JUMP_FORWARD', 245) +def_op('INSTRUMENTED_JUMP_BACKWARD', 246) +def_op('INSTRUMENTED_RETURN_CONST', 247) +def_op('INSTRUMENTED_FOR_ITER', 248) +def_op('INSTRUMENTED_POP_JUMP_IF_FALSE', 249) +def_op('INSTRUMENTED_POP_JUMP_IF_TRUE', 250) +def_op('INSTRUMENTED_END_FOR', 251) +def_op('INSTRUMENTED_END_SEND', 252) +def_op('INSTRUMENTED_INSTRUCTION', 253) +def_op('INSTRUMENTED_LINE', 254) +# 255 is reserved + hasarg.extend([op for op in opmap.values() if op >= HAVE_ARGUMENT]) MIN_PSEUDO_OPCODE = 256 @@ -243,8 +266,11 @@ def pseudo_op(name, op, real_ops): pseudo_op('JUMP_NO_INTERRUPT', 261, ['JUMP_FORWARD', 'JUMP_BACKWARD_NO_INTERRUPT']) pseudo_op('LOAD_METHOD', 262, ['LOAD_ATTR']) +pseudo_op('LOAD_SUPER_METHOD', 263, ['LOAD_SUPER_ATTR']) +pseudo_op('LOAD_ZERO_SUPER_METHOD', 264, ['LOAD_SUPER_ATTR']) +pseudo_op('LOAD_ZERO_SUPER_ATTR', 265, ['LOAD_SUPER_ATTR']) -pseudo_op('STORE_FAST_MAYBE_NULL', 263, ['STORE_FAST']) +pseudo_op('STORE_FAST_MAYBE_NULL', 266, ['STORE_FAST']) MAX_PSEUDO_OPCODE = MIN_PSEUDO_OPCODE + len(_pseudo_ops) - 1 @@ -320,10 +346,10 @@ def pseudo_op(name, op, real_ops): "CALL_NO_KW_TUPLE_1", "CALL_NO_KW_TYPE_1", ], - "COMPARE_AND_BRANCH": [ - "COMPARE_AND_BRANCH_FLOAT", - "COMPARE_AND_BRANCH_INT", - "COMPARE_AND_BRANCH_STR", + "COMPARE_OP": [ + "COMPARE_OP_FLOAT", + "COMPARE_OP_INT", + "COMPARE_OP_STR", ], "FOR_ITER": [ "FOR_ITER_LIST", @@ -331,6 +357,9 @@ def pseudo_op(name, op, real_ops): "FOR_ITER_RANGE", "FOR_ITER_GEN", ], + "LOAD_SUPER_ATTR": [ + "LOAD_SUPER_ATTR_METHOD", + ], "LOAD_ATTR": [ # These potentially push [NULL, bound method] onto the stack. "LOAD_ATTR_CLASS", @@ -398,17 +427,18 @@ def pseudo_op(name, op, real_ops): "COMPARE_OP": { "counter": 1, }, - "COMPARE_AND_BRANCH": { - "counter": 1, - }, "BINARY_SUBSCR": { "counter": 1, - "type_version": 2, - "func_version": 1, }, "FOR_ITER": { "counter": 1, }, + "LOAD_SUPER_ATTR": { + "counter": 1, + "class_version": 2, + "self_type_version": 2, + "method": 4, + }, "LOAD_ATTR": { "counter": 1, "version": 2, @@ -423,7 +453,6 @@ def pseudo_op(name, op, real_ops): "CALL": { "counter": 1, "func_version": 2, - "min_args": 1, }, "STORE_SUBSCR": { "counter": 1, diff --git a/Lib/pathlib.py b/Lib/pathlib.py index 55c44f12e5a2fb..f43f01ef41a97f 100644 --- a/Lib/pathlib.py +++ b/Lib/pathlib.py @@ -16,7 +16,6 @@ import warnings from _collections_abc import Sequence from errno import ENOENT, ENOTDIR, EBADF, ELOOP -from operator import attrgetter from stat import S_ISDIR, S_ISLNK, S_ISREG, S_ISSOCK, S_ISBLK, S_ISCHR, S_ISFIFO from urllib.parse import quote_from_bytes as urlquote_from_bytes @@ -211,20 +210,17 @@ def _select_from(self, parent_path, is_dir, exists, scandir, normcase): class _PathParents(Sequence): """This object provides sequence-like access to the logical ancestors of a path. Don't try to construct it yourself.""" - __slots__ = ('_pathcls', '_drv', '_root', '_parts') + __slots__ = ('_pathcls', '_drv', '_root', '_tail') def __init__(self, path): # We don't store the instance to avoid reference cycles self._pathcls = type(path) - self._drv = path._drv - self._root = path._root - self._parts = path._parts + self._drv = path.drive + self._root = path.root + self._tail = path._tail def __len__(self): - if self._drv or self._root: - return len(self._parts) - 1 - else: - return len(self._parts) + return len(self._tail) def __getitem__(self, idx): if isinstance(idx, slice): @@ -235,7 +231,7 @@ def __getitem__(self, idx): if idx < 0: idx += len(self) return self._pathcls._from_parsed_parts(self._drv, self._root, - self._parts[:-idx - 1]) + self._tail[:-idx - 1]) def __repr__(self): return "<{}.parents>".format(self._pathcls.__name__) @@ -250,13 +246,45 @@ class PurePath(object): PureWindowsPath object. You can also instantiate either of these classes directly, regardless of your system. """ + __slots__ = ( - '_drv', '_root', '_parts', - '_str', '_hash', '_parts_tuple', '_parts_normcase_cached', + # The `_raw_path` slot stores an unnormalized string path. This is set + # in the `__init__()` method. + '_raw_path', + + # The `_drv`, `_root` and `_tail_cached` slots store parsed and + # normalized parts of the path. They are set when any of the `drive`, + # `root` or `_tail` properties are accessed for the first time. The + # three-part division corresponds to the result of + # `os.path.splitroot()`, except that the tail is further split on path + # separators (i.e. it is a list of strings), and that the root and + # tail are normalized. + '_drv', '_root', '_tail_cached', + + # The `_str` slot stores the string representation of the path, + # computed from the drive, root and tail when `__str__()` is called + # for the first time. It's used to implement `_str_normcase` + '_str', + + # The `_str_normcase_cached` slot stores the string path with + # normalized case. It is set when the `_str_normcase` property is + # accessed for the first time. It's used to implement `__eq__()` + # `__hash__()`, and `_parts_normcase` + '_str_normcase_cached', + + # The `_parts_normcase_cached` slot stores the case-normalized + # string path after splitting on path separators. It's set when the + # `_parts_normcase` property is accessed for the first time. It's used + # to implement comparison methods like `__lt__()`. + '_parts_normcase_cached', + + # The `_hash` slot stores the hash of the case-normalized string + # path. It's set when `__hash__()` is called for the first time. + '_hash', ) _flavour = os.path - def __new__(cls, *args): + def __new__(cls, *args, **kwargs): """Construct a PurePath from one or several strings and or existing PurePath objects. The strings and path objects are combined so as to yield a canonicalized path, which is incorporated into the @@ -264,65 +292,70 @@ def __new__(cls, *args): """ if cls is PurePath: cls = PureWindowsPath if os.name == 'nt' else PurePosixPath - return cls._from_parts(args) + return object.__new__(cls) def __reduce__(self): # Using the parts tuple helps share interned path parts # when pickling related paths. - return (self.__class__, tuple(self._parts)) + return (self.__class__, self.parts) - @classmethod - def _parse_parts(cls, parts): - if not parts: - return '', '', [] - elif len(parts) == 1: - path = os.fspath(parts[0]) - else: - path = cls._flavour.join(*parts) - sep = cls._flavour.sep - altsep = cls._flavour.altsep - if isinstance(path, str): - # Force-cast str subclasses to str (issue #21127) - path = str(path) + def __init__(self, *args): + if not args: + path = '' + elif len(args) == 1: + path = os.fspath(args[0]) else: + path = self._flavour.join(*args) + if not isinstance(path, str): raise TypeError( "argument should be a str or an os.PathLike " "object where __fspath__ returns a str, " f"not {type(path).__name__!r}") + self._raw_path = path + + @classmethod + def _parse_path(cls, path): + if not path: + return '', '', [] + sep = cls._flavour.sep + altsep = cls._flavour.altsep if altsep: path = path.replace(altsep, sep) drv, root, rel = cls._flavour.splitroot(path) - if drv.startswith(sep): - # pathlib assumes that UNC paths always have a root. - root = sep - unfiltered_parsed = [drv + root] + rel.split(sep) - parsed = [sys.intern(x) for x in unfiltered_parsed if x and x != '.'] + if not root and drv.startswith(sep) and not drv.endswith(sep): + drv_parts = drv.split(sep) + if len(drv_parts) == 4 and drv_parts[2] not in '?.': + # e.g. //server/share + root = sep + elif len(drv_parts) == 6: + # e.g. //?/unc/server/share + root = sep + parsed = [sys.intern(str(x)) for x in rel.split(sep) if x and x != '.'] return drv, root, parsed - @classmethod - def _from_parts(cls, args): - self = object.__new__(cls) - drv, root, parts = self._parse_parts(args) + def _load_parts(self): + drv, root, tail = self._parse_path(self._raw_path) self._drv = drv self._root = root - self._parts = parts - return self + self._tail_cached = tail @classmethod - def _from_parsed_parts(cls, drv, root, parts): - self = object.__new__(cls) + def _from_parsed_parts(cls, drv, root, tail): + path = cls._format_parsed_parts(drv, root, tail) + self = cls(path) + self._str = path or '.' self._drv = drv self._root = root - self._parts = parts + self._tail_cached = tail return self @classmethod - def _format_parsed_parts(cls, drv, root, parts): + def _format_parsed_parts(cls, drv, root, tail): if drv or root: - return drv + root + cls._flavour.sep.join(parts[1:]) - elif parts and cls._flavour.splitdrive(parts[0])[0]: - parts = ['.'] + parts - return cls._flavour.sep.join(parts) + return drv + root + cls._flavour.sep.join(tail) + elif tail and cls._flavour.splitdrive(tail[0])[0]: + tail = ['.'] + tail + return cls._flavour.sep.join(tail) def __str__(self): """Return the string representation of the path, suitable for @@ -330,8 +363,8 @@ def __str__(self): try: return self._str except AttributeError: - self._str = self._format_parsed_parts(self._drv, self._root, - self._parts) or '.' + self._str = self._format_parsed_parts(self.drive, self.root, + self._tail) or '.' return self._str def __fspath__(self): @@ -356,7 +389,7 @@ def as_uri(self): if not self.is_absolute(): raise ValueError("relative path can't be expressed as a file URI") - drive = self._drv + drive = self.drive if len(drive) == 2 and drive[1] == ':': # It's a path on a local drive => 'file:///c:/a/b' prefix = 'file:///' + drive @@ -371,25 +404,34 @@ def as_uri(self): path = str(self) return prefix + urlquote_from_bytes(os.fsencode(path)) + @property + def _str_normcase(self): + # String with normalized case, for hashing and equality checks + try: + return self._str_normcase_cached + except AttributeError: + self._str_normcase_cached = self._flavour.normcase(str(self)) + return self._str_normcase_cached + @property def _parts_normcase(self): - # Cached parts with normalized case, for hashing and comparison. + # Cached parts with normalized case, for comparisons. try: return self._parts_normcase_cached except AttributeError: - self._parts_normcase_cached = [self._flavour.normcase(p) for p in self._parts] + self._parts_normcase_cached = self._str_normcase.split(self._flavour.sep) return self._parts_normcase_cached def __eq__(self, other): if not isinstance(other, PurePath): return NotImplemented - return self._parts_normcase == other._parts_normcase and self._flavour is other._flavour + return self._str_normcase == other._str_normcase and self._flavour is other._flavour def __hash__(self): try: return self._hash except AttributeError: - self._hash = hash(tuple(self._parts_normcase)) + self._hash = hash(self._str_normcase) return self._hash def __lt__(self, other): @@ -412,25 +454,45 @@ def __ge__(self, other): return NotImplemented return self._parts_normcase >= other._parts_normcase - drive = property(attrgetter('_drv'), - doc="""The drive prefix (letter or UNC path), if any.""") + @property + def drive(self): + """The drive prefix (letter or UNC path), if any.""" + try: + return self._drv + except AttributeError: + self._load_parts() + return self._drv - root = property(attrgetter('_root'), - doc="""The root of the path, if any.""") + @property + def root(self): + """The root of the path, if any.""" + try: + return self._root + except AttributeError: + self._load_parts() + return self._root + + @property + def _tail(self): + try: + return self._tail_cached + except AttributeError: + self._load_parts() + return self._tail_cached @property def anchor(self): """The concatenation of the drive and root, or ''.""" - anchor = self._drv + self._root + anchor = self.drive + self.root return anchor @property def name(self): """The final path component, if any.""" - parts = self._parts - if len(parts) == (1 if (self._drv or self._root) else 0): + tail = self._tail + if not tail: return '' - return parts[-1] + return tail[-1] @property def suffix(self): @@ -477,8 +539,8 @@ def with_name(self, name): drv, root, tail = f.splitroot(name) if drv or root or not tail or f.sep in tail or (f.altsep and f.altsep in tail): raise ValueError("Invalid name %r" % (name)) - return self._from_parsed_parts(self._drv, self._root, - self._parts[:-1] + [name]) + return self._from_parsed_parts(self.drive, self.root, + self._tail[:-1] + [name]) def with_stem(self, stem): """Return a new path with the stem changed.""" @@ -502,8 +564,8 @@ def with_suffix(self, suffix): name = name + suffix else: name = name[:-len(old_suffix)] + suffix - return self._from_parsed_parts(self._drv, self._root, - self._parts[:-1] + [name]) + return self._from_parsed_parts(self.drive, self.root, + self._tail[:-1] + [name]) def relative_to(self, other, /, *_deprecated, walk_up=False): """Return the relative path to another path identified by the passed @@ -528,7 +590,7 @@ def relative_to(self, other, /, *_deprecated, walk_up=False): raise ValueError(f"{str(self)!r} and {str(other)!r} have different anchors") if step and not walk_up: raise ValueError(f"{str(self)!r} is not in the subpath of {str(other)!r}") - parts = ('..',) * step + self.parts[len(path.parts):] + parts = ['..'] * step + self._tail[len(path._tail):] return path_cls(*parts) def is_relative_to(self, other, /, *_deprecated): @@ -547,13 +609,10 @@ def is_relative_to(self, other, /, *_deprecated): def parts(self): """An object providing sequence-like access to the components in the filesystem path.""" - # We cache the tuple to avoid building a new one each time .parts - # is accessed. XXX is this necessary? - try: - return self._parts_tuple - except AttributeError: - self._parts_tuple = tuple(self._parts) - return self._parts_tuple + if self.drive or self.root: + return (self.drive + self.root,) + tuple(self._tail) + else: + return tuple(self._tail) def joinpath(self, *args): """Combine this path with one or several arguments, and return a @@ -561,22 +620,7 @@ def joinpath(self, *args): paths) or a totally different path (if one of the arguments is anchored). """ - drv1, root1, parts1 = self._drv, self._root, self._parts - drv2, root2, parts2 = self._parse_parts(args) - if root2: - if not drv2 and drv1: - return self._from_parsed_parts(drv1, root2, [drv1 + root2] + parts2[1:]) - else: - return self._from_parsed_parts(drv2, root2, parts2) - elif drv2: - if drv2 == drv1 or self._flavour.normcase(drv2) == self._flavour.normcase(drv1): - # Same drive => second path is relative to the first. - return self._from_parsed_parts(drv1, root1, parts1 + parts2[1:]) - else: - return self._from_parsed_parts(drv2, root2, parts2) - else: - # Second path is non-anchored (common case). - return self._from_parsed_parts(drv1, root1, parts1 + parts2) + return self.__class__(self._raw_path, *args) def __truediv__(self, key): try: @@ -586,19 +630,19 @@ def __truediv__(self, key): def __rtruediv__(self, key): try: - return self._from_parts([key] + self._parts) + return type(self)(key, self._raw_path) except TypeError: return NotImplemented @property def parent(self): """The logical parent of the path.""" - drv = self._drv - root = self._root - parts = self._parts - if len(parts) == 1 and (drv or root): + drv = self.drive + root = self.root + tail = self._tail + if not tail: return self - return self._from_parsed_parts(drv, root, parts[:-1]) + return self._from_parsed_parts(drv, root, tail[:-1]) @property def parents(self): @@ -610,35 +654,35 @@ def is_absolute(self): a drive).""" # ntpath.isabs() is defective - see GH-44626 . if self._flavour is ntpath: - return bool(self._drv and self._root) + return bool(self.drive and self.root) return self._flavour.isabs(self) def is_reserved(self): """Return True if the path contains one of the special names reserved by the system, if any.""" - if self._flavour is posixpath or not self._parts: + if self._flavour is posixpath or not self._tail: return False # NOTE: the rules for reserved names seem somewhat complicated # (e.g. r"..\NUL" is reserved but not r"foo\NUL" if "foo" does not # exist). We err on the side of caution and return True for paths # which are not considered reserved by Windows. - if self._parts[0].startswith('\\\\'): + if self.drive.startswith('\\\\'): # UNC paths are never reserved. return False - name = self._parts[-1].partition('.')[0].partition(':')[0].rstrip(' ') + name = self._tail[-1].partition('.')[0].partition(':')[0].rstrip(' ') return name.upper() in _WIN_RESERVED_NAMES def match(self, path_pattern): """ Return True if this path matches the given pattern. """ - path_pattern = self._flavour.normcase(path_pattern) - drv, root, pat_parts = self._parse_parts((path_pattern,)) - if not pat_parts: + pat = type(self)(path_pattern) + if not pat.parts: raise ValueError("empty pattern") + pat_parts = pat._parts_normcase parts = self._parts_normcase - if drv or root: + if pat.drive or pat.root: if len(pat_parts) != len(parts): return False elif len(pat_parts) > len(parts): @@ -687,20 +731,33 @@ class Path(PurePath): """ __slots__ = () - def __new__(cls, *args, **kwargs): + def __init__(self, *args, **kwargs): if kwargs: msg = ("support for supplying keyword arguments to pathlib.PurePath " "is deprecated and scheduled for removal in Python {remove}") warnings._deprecated("pathlib.PurePath(**kwargs)", msg, remove=(3, 14)) + super().__init__(*args) + + def __new__(cls, *args, **kwargs): if cls is Path: cls = WindowsPath if os.name == 'nt' else PosixPath - return cls._from_parts(args) - - def _make_child_relpath(self, part): - # This is an optimization used for dir walking. `part` must be - # a single part relative to this path. - parts = self._parts + [part] - return self._from_parsed_parts(self._drv, self._root, parts) + return object.__new__(cls) + + def _make_child_relpath(self, name): + path_str = str(self) + tail = self._tail + if tail: + path_str = f'{path_str}{self._flavour.sep}{name}' + elif path_str != '.': + path_str = f'{path_str}{name}' + else: + path_str = name + path = type(self)(path_str) + path._str = path_str + path._drv = self.drive + path._root = self.root + path._tail_cached = tail + [name] + return path def __enter__(self): # In previous versions of pathlib, __exit__() marked this path as @@ -770,7 +827,7 @@ def glob(self, pattern): sys.audit("pathlib.Path.glob", self, pattern) if not pattern: raise ValueError("Unacceptable pattern: {!r}".format(pattern)) - drv, root, pattern_parts = self._parse_parts((pattern,)) + drv, root, pattern_parts = self._parse_path(pattern) if drv or root: raise NotImplementedError("Non-relative patterns are unsupported") if pattern[-1] in (self._flavour.sep, self._flavour.altsep): @@ -785,7 +842,7 @@ def rglob(self, pattern): this subtree. """ sys.audit("pathlib.Path.rglob", self, pattern) - drv, root, pattern_parts = self._parse_parts((pattern,)) + drv, root, pattern_parts = self._parse_path(pattern) if drv or root: raise NotImplementedError("Non-relative patterns are unsupported") if pattern and pattern[-1] in (self._flavour.sep, self._flavour.altsep): @@ -802,12 +859,12 @@ def absolute(self): """ if self.is_absolute(): return self - elif self._drv: + elif self.drive: # There is a CWD on each drive-letter drive. - cwd = self._flavour.abspath(self._drv) + cwd = self._flavour.abspath(self.drive) else: cwd = os.getcwd() - return self._from_parts([cwd] + self._parts) + return type(self)(cwd, self._raw_path) def resolve(self, strict=False): """ @@ -825,7 +882,7 @@ def check_eloop(e): except OSError as e: check_eloop(e) raise - p = self._from_parts((s,)) + p = type(self)(s) # In non-strict mode, realpath() doesn't raise on symlink loops. # Ensure we get an exception by calling stat() @@ -915,7 +972,7 @@ def readlink(self): """ if not hasattr(os, "readlink"): raise NotImplementedError("os.readlink() not available on this system") - return self._from_parts((os.readlink(self),)) + return type(self)(os.readlink(self)) def touch(self, mode=0o666, exist_ok=True): """ @@ -1184,58 +1241,60 @@ def expanduser(self): """ Return a new path with expanded ~ and ~user constructs (as returned by os.path.expanduser) """ - if (not (self._drv or self._root) and - self._parts and self._parts[0][:1] == '~'): - homedir = self._flavour.expanduser(self._parts[0]) + if (not (self.drive or self.root) and + self._tail and self._tail[0][:1] == '~'): + homedir = self._flavour.expanduser(self._tail[0]) if homedir[:1] == "~": raise RuntimeError("Could not determine home directory.") - drv, root, parts = self._parse_parts((homedir,)) - return self._from_parsed_parts(drv, root, parts + self._parts[1:]) + drv, root, tail = self._parse_path(homedir) + return self._from_parsed_parts(drv, root, tail + self._tail[1:]) return self def walk(self, top_down=True, on_error=None, follow_symlinks=False): """Walk the directory tree from this directory, similar to os.walk().""" sys.audit("pathlib.Path.walk", self, on_error, follow_symlinks) - return self._walk(top_down, on_error, follow_symlinks) - - def _walk(self, top_down, on_error, follow_symlinks): - # We may not have read permission for self, in which case we can't - # get a list of the files the directory contains. os.walk - # always suppressed the exception then, rather than blow up for a - # minor reason when (say) a thousand readable directories are still - # left to visit. That logic is copied here. - try: - scandir_it = self._scandir() - except OSError as error: - if on_error is not None: - on_error(error) - return - - with scandir_it: - dirnames = [] - filenames = [] - for entry in scandir_it: - try: - is_dir = entry.is_dir(follow_symlinks=follow_symlinks) - except OSError: - # Carried over from os.path.isdir(). - is_dir = False - - if is_dir: - dirnames.append(entry.name) - else: - filenames.append(entry.name) - - if top_down: - yield self, dirnames, filenames - - for dirname in dirnames: - dirpath = self._make_child_relpath(dirname) - yield from dirpath._walk(top_down, on_error, follow_symlinks) + paths = [self] + + while paths: + path = paths.pop() + if isinstance(path, tuple): + yield path + continue + + # We may not have read permission for self, in which case we can't + # get a list of the files the directory contains. os.walk() + # always suppressed the exception in that instance, rather than + # blow up for a minor reason when (say) a thousand readable + # directories are still left to visit. That logic is copied here. + try: + scandir_it = path._scandir() + except OSError as error: + if on_error is not None: + on_error(error) + continue + + with scandir_it: + dirnames = [] + filenames = [] + for entry in scandir_it: + try: + is_dir = entry.is_dir(follow_symlinks=follow_symlinks) + except OSError: + # Carried over from os.path.isdir(). + is_dir = False + + if is_dir: + dirnames.append(entry.name) + else: + filenames.append(entry.name) + + if top_down: + yield path, dirnames, filenames + else: + paths.append((path, dirnames, filenames)) - if not top_down: - yield self, dirnames, filenames + paths += [path._make_child_relpath(d) for d in reversed(dirnames)] class PosixPath(Path, PurePosixPath): diff --git a/Lib/pdb.py b/Lib/pdb.py index f11fc55536810f..645cbf518e58e3 100755 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -154,7 +154,7 @@ def namespace(self): @property def code(self): - with io.open(self) as fp: + with io.open_code(self) as fp: return f"exec(compile({fp.read()!r}, {self!r}, 'exec'))" @@ -377,8 +377,7 @@ def user_exception(self, frame, exc_info): # stop when the debuggee is returning from such generators. prefix = 'Internal ' if (not exc_traceback and exc_type is StopIteration) else '' - self.message('%s%s' % (prefix, - traceback.format_exception_only(exc_type, exc_value)[-1].strip())) + self.message('%s%s' % (prefix, self._format_exc(exc_value))) self.interaction(frame, exc_traceback) # General interaction function @@ -587,7 +586,7 @@ def _complete_expression(self, text, line, begidx, endidx): # Return true to exit from the command loop def do_commands(self, arg): - """commands [bpnumber] + """(Pdb) commands [bpnumber] (com) ... (com) end (Pdb) @@ -673,6 +672,7 @@ def do_commands(self, arg): def do_break(self, arg, temporary = 0): """b(reak) [ ([filename:]lineno | function) [, condition] ] + Without argument, list all breaks. With a line number argument, set a break at this line in the @@ -702,6 +702,9 @@ def do_break(self, arg, temporary = 0): if comma > 0: # parse stuff after comma: "condition" cond = arg[comma+1:].lstrip() + if err := self._compile_error_message(cond): + self.error('Invalid condition %s: %r' % (cond, err)) + return arg = arg[:comma].rstrip() # parse stuff before comma: [filename:]lineno | function colon = arg.rfind(':') @@ -778,6 +781,7 @@ def defaultFile(self): def do_tbreak(self, arg): """tbreak [ ([filename:]lineno | function) [, condition] ] + Same arguments as break, but sets a temporary breakpoint: it is automatically deleted when first hit. """ @@ -842,6 +846,7 @@ def checkline(self, filename, lineno): def do_enable(self, arg): """enable bpnumber [bpnumber ...] + Enables the breakpoints given as a space separated list of breakpoint numbers. """ @@ -859,6 +864,7 @@ def do_enable(self, arg): def do_disable(self, arg): """disable bpnumber [bpnumber ...] + Disables the breakpoints given as a space separated list of breakpoint numbers. Disabling a breakpoint means it cannot cause the program to stop execution, but unlike clearing a @@ -879,6 +885,7 @@ def do_disable(self, arg): def do_condition(self, arg): """condition bpnumber [condition] + Set a new condition for the breakpoint, an expression which must evaluate to true before the breakpoint is honored. If condition is absent, any existing condition is removed; i.e., @@ -887,6 +894,9 @@ def do_condition(self, arg): args = arg.split(' ', 1) try: cond = args[1] + if err := self._compile_error_message(cond): + self.error('Invalid condition %s: %r' % (cond, err)) + return except IndexError: cond = None try: @@ -906,6 +916,7 @@ def do_condition(self, arg): def do_ignore(self, arg): """ignore bpnumber [count] + Set the ignore count for the given breakpoint number. If count is omitted, the ignore count is set to 0. A breakpoint becomes active when the ignore count is zero. When non-zero, @@ -940,7 +951,8 @@ def do_ignore(self, arg): complete_ignore = _complete_bpnumber def do_clear(self, arg): - """cl(ear) filename:lineno\ncl(ear) [bpnumber [bpnumber...]] + """cl(ear) [filename:lineno | bpnumber ...] + With a space separated list of breakpoint numbers, clear those breakpoints. Without argument, clear all breaks (but first ask confirmation). With a filename:lineno argument, @@ -992,6 +1004,7 @@ def do_clear(self, arg): def do_where(self, arg): """w(here) + Print a stack trace, with the most recent frame at the bottom. An arrow indicates the "current frame", which determines the context of most commands. 'bt' is an alias for this command. @@ -1010,6 +1023,7 @@ def _select_frame(self, number): def do_up(self, arg): """u(p) [count] + Move the current frame count (default one) levels up in the stack trace (to an older frame). """ @@ -1030,6 +1044,7 @@ def do_up(self, arg): def do_down(self, arg): """d(own) [count] + Move the current frame count (default one) levels down in the stack trace (to a newer frame). """ @@ -1050,6 +1065,7 @@ def do_down(self, arg): def do_until(self, arg): """unt(il) [lineno] + Without argument, continue execution until the line with a number greater than the current one is reached. With a line number, continue execution until a line with a number greater @@ -1074,6 +1090,7 @@ def do_until(self, arg): def do_step(self, arg): """s(tep) + Execute the current line, stop at the first possible occasion (either in a function that is called or in the current function). @@ -1084,6 +1101,7 @@ def do_step(self, arg): def do_next(self, arg): """n(ext) + Continue execution until the next line in the current function is reached or it returns. """ @@ -1093,6 +1111,7 @@ def do_next(self, arg): def do_run(self, arg): """run [args...] + Restart the debugged python program. If a string is supplied it is split with "shlex", and the result is used as the new sys.argv. History, breakpoints, actions and debugger options @@ -1114,6 +1133,7 @@ def do_run(self, arg): def do_return(self, arg): """r(eturn) + Continue execution until the current function returns. """ self.set_return(self.curframe) @@ -1122,6 +1142,7 @@ def do_return(self, arg): def do_continue(self, arg): """c(ont(inue)) + Continue execution, only stop when a breakpoint is encountered. """ if not self.nosigint: @@ -1140,6 +1161,7 @@ def do_continue(self, arg): def do_jump(self, arg): """j(ump) lineno + Set the next line that will be executed. Only available in the bottom-most frame. This lets you jump back and execute code again, or jump forward to skip code that you don't want @@ -1169,6 +1191,7 @@ def do_jump(self, arg): def do_debug(self, arg): """debug code + Enter a recursive debugger that steps through the code argument (which is an arbitrary expression or statement to be executed in the current environment). @@ -1190,7 +1213,8 @@ def do_debug(self, arg): complete_debug = _complete_expression def do_quit(self, arg): - """q(uit)\nexit + """q(uit) | exit + Quit from the debugger. The program being executed is aborted. """ self._user_requested_quit = True @@ -1202,6 +1226,7 @@ def do_quit(self, arg): def do_EOF(self, arg): """EOF + Handles the receipt of EOF as a command. """ self.message('') @@ -1211,6 +1236,7 @@ def do_EOF(self, arg): def do_args(self, arg): """a(rgs) + Print the argument list of the current function. """ co = self.curframe.f_code @@ -1228,6 +1254,7 @@ def do_args(self, arg): def do_retval(self, arg): """retval + Print the return value for the last return of a function. """ if '__return__' in self.curframe_locals: @@ -1249,14 +1276,12 @@ def _getval_except(self, arg, frame=None): return eval(arg, self.curframe.f_globals, self.curframe_locals) else: return eval(arg, frame.f_globals, frame.f_locals) - except: - exc_info = sys.exc_info()[:2] - err = traceback.format_exception_only(*exc_info)[-1].strip() - return _rstr('** raised %s **' % err) + except BaseException as exc: + return _rstr('** raised %s **' % self._format_exc(exc)) def _error_exc(self): - exc_info = sys.exc_info()[:2] - self.error(traceback.format_exception_only(*exc_info)[-1].strip()) + exc = sys.exception() + self.error(self._format_exc(exc)) def _msg_val_func(self, arg, func): try: @@ -1270,12 +1295,14 @@ def _msg_val_func(self, arg, func): def do_p(self, arg): """p expression + Print the value of the expression. """ self._msg_val_func(arg, repr) def do_pp(self, arg): """pp expression + Pretty-print the value of the expression. """ self._msg_val_func(arg, pprint.pformat) @@ -1285,7 +1312,7 @@ def do_pp(self, arg): complete_pp = _complete_expression def do_list(self, arg): - """l(ist) [first [,last] | .] + """l(ist) [first[, last] | .] List source code for the current file. Without arguments, list 11 lines around the current line or continue the previous @@ -1342,13 +1369,14 @@ def do_list(self, arg): do_l = do_list def do_longlist(self, arg): - """longlist | ll + """ll | longlist + List the whole source code for the current function or frame. """ filename = self.curframe.f_code.co_filename breaklist = self.get_file_breaks(filename) try: - lines, lineno = inspect.getsourcelines(self.curframe) + lines, lineno = self._getsourcelines(self.curframe) except OSError as err: self.error(err) return @@ -1357,6 +1385,7 @@ def do_longlist(self, arg): def do_source(self, arg): """source expression + Try to get source code for the given object and display it. """ try: @@ -1364,7 +1393,7 @@ def do_source(self, arg): except: return try: - lines, lineno = inspect.getsourcelines(obj) + lines, lineno = self._getsourcelines(obj) except (OSError, TypeError) as err: self.error(err) return @@ -1394,7 +1423,8 @@ def _print_lines(self, lines, start, breaks=(), frame=None): self.message(s + '\t' + line.rstrip()) def do_whatis(self, arg): - """whatis arg + """whatis expression + Print the type of the argument. """ try: @@ -1437,13 +1467,19 @@ def do_display(self, arg): Without expression, list all display expressions for the current frame. """ if not arg: - self.message('Currently displaying:') - for item in self.displaying.get(self.curframe, {}).items(): - self.message('%s: %r' % item) + if self.displaying: + self.message('Currently displaying:') + for item in self.displaying.get(self.curframe, {}).items(): + self.message('%s: %r' % item) + else: + self.message('No expression is being displayed') else: - val = self._getval_except(arg) - self.displaying.setdefault(self.curframe, {})[arg] = val - self.message('display %s: %r' % (arg, val)) + if err := self._compile_error_message(arg): + self.error('Unable to display %s: %r' % (arg, err)) + else: + val = self._getval_except(arg) + self.displaying.setdefault(self.curframe, {})[arg] = val + self.message('display %s: %r' % (arg, val)) complete_display = _complete_expression @@ -1476,7 +1512,8 @@ def do_interact(self, arg): code.interact("*interactive*", local=ns) def do_alias(self, arg): - """alias [name [command [parameter parameter ...] ]] + """alias [name [command]] + Create an alias called 'name' that executes 'command'. The command must *not* be enclosed in quotes. Replaceable parameters can be indicated by %1, %2, and so on, while %* is @@ -1512,6 +1549,7 @@ def do_alias(self, arg): def do_unalias(self, arg): """unalias name + Delete the specified alias. """ args = arg.split() @@ -1554,6 +1592,7 @@ def print_stack_entry(self, frame_lineno, prompt_prefix=line_prefix): def do_help(self, arg): """h(elp) + Without argument, print the list of available commands. With a command name as argument, print help about that command. "help pdb" shows the full pdb documentation. @@ -1577,12 +1616,13 @@ def do_help(self, arg): if command.__doc__ is None: self.error('No help for %r; __doc__ string missing' % arg) return - self.message(command.__doc__.rstrip()) + self.message(self._help_message_from_doc(command.__doc__)) do_h = do_help def help_exec(self): """(!) statement + Execute the (one-line) statement in the context of the current stack frame. The exclamation point can be omitted unless the first word of the statement resembles a debugger command. To @@ -1642,6 +1682,46 @@ def _run(self, target: Union[_ModuleTarget, _ScriptTarget]): self.run(target.code) + def _format_exc(self, exc: BaseException): + return traceback.format_exception_only(exc)[-1].strip() + + def _compile_error_message(self, expr): + """Return the error message as string if compiling `expr` fails.""" + try: + compile(expr, "<stdin>", "eval") + except SyntaxError as exc: + return _rstr(self._format_exc(exc)) + return "" + + def _getsourcelines(self, obj): + # GH-103319 + # inspect.getsourcelines() returns lineno = 0 for + # module-level frame which breaks our code print line number + # This method should be replaced by inspect.getsourcelines(obj) + # once this bug is fixed in inspect + lines, lineno = inspect.getsourcelines(obj) + lineno = max(1, lineno) + return lines, lineno + + def _help_message_from_doc(self, doc): + lines = [line.strip() for line in doc.rstrip().splitlines()] + if not lines: + return "No help message found." + if "" in lines: + usage_end = lines.index("") + else: + usage_end = 1 + formatted = [] + indent = " " * len(self.prompt) + for i, line in enumerate(lines): + if i == 0: + prefix = "Usage: " + elif i < usage_end: + prefix = " " + else: + prefix = "" + formatted.append(indent + prefix + line) + return "\n".join(formatted) # Collect all command help into docstring, if not run with -OO @@ -1726,9 +1806,10 @@ def post_mortem(t=None): """ # handling the default if t is None: - # sys.exc_info() returns (type, value, traceback) if an exception is - # being handled, otherwise it returns None - t = sys.exc_info()[2] + exc = sys.exception() + if exc is not None: + t = exc.__traceback__ + if t is None: raise ValueError("A valid traceback must be passed if no " "exception is being handled") @@ -1739,7 +1820,11 @@ def post_mortem(t=None): def pm(): """Enter post-mortem debugging of the traceback found in sys.last_traceback.""" - post_mortem(sys.last_traceback) + if hasattr(sys, 'last_exc'): + tb = sys.last_exc.__traceback__ + else: + tb = sys.last_traceback + post_mortem(tb) # Main program for testing @@ -1808,18 +1893,18 @@ def main(): except Restart: print("Restarting", target, "with arguments:") print("\t" + " ".join(sys.argv[1:])) - except SystemExit: + except SystemExit as e: # In most cases SystemExit does not warrant a post-mortem session. print("The program exited via sys.exit(). Exit status:", end=' ') - print(sys.exc_info()[1]) + print(e) except SyntaxError: traceback.print_exc() sys.exit(1) - except: + except BaseException as e: traceback.print_exc() print("Uncaught exception. Entering post mortem debugging") print("Running 'cont' or 'step' will restart the program") - t = sys.exc_info()[2] + t = e.__traceback__ pdb.interaction(None, t) print("Post mortem debugger finished. The " + target + " will be restarted") diff --git a/Lib/pickle.py b/Lib/pickle.py index 15fa5f6e579932..fe86f80f51d3b9 100644 --- a/Lib/pickle.py +++ b/Lib/pickle.py @@ -1481,7 +1481,7 @@ def _instantiate(self, klass, args): value = klass(*args) except TypeError as err: raise TypeError("in constructor for %s: %s" % - (klass.__name__, str(err)), sys.exc_info()[2]) + (klass.__name__, str(err)), err.__traceback__) else: value = klass.__new__(klass) self.append(value) diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py index bdebfd2fc8ac32..fb977eaaa05767 100644 --- a/Lib/pkgutil.py +++ b/Lib/pkgutil.py @@ -14,7 +14,7 @@ __all__ = [ 'get_importer', 'iter_importers', 'get_loader', 'find_loader', 'walk_packages', 'iter_modules', 'get_data', - 'ImpImporter', 'ImpLoader', 'read_code', 'extend_path', + 'read_code', 'extend_path', 'ModuleInfo', ] @@ -185,187 +185,6 @@ def _iter_file_finder_modules(importer, prefix=''): importlib.machinery.FileFinder, _iter_file_finder_modules) -def _import_imp(): - global imp - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - imp = importlib.import_module('imp') - -class ImpImporter: - """PEP 302 Finder that wraps Python's "classic" import algorithm - - ImpImporter(dirname) produces a PEP 302 finder that searches that - directory. ImpImporter(None) produces a PEP 302 finder that searches - the current sys.path, plus any modules that are frozen or built-in. - - Note that ImpImporter does not currently support being used by placement - on sys.meta_path. - """ - - def __init__(self, path=None): - global imp - warnings.warn("This emulation is deprecated and slated for removal " - "in Python 3.12; use 'importlib' instead", - DeprecationWarning) - _import_imp() - self.path = path - - def find_module(self, fullname, path=None): - # Note: we ignore 'path' argument since it is only used via meta_path - subname = fullname.split(".")[-1] - if subname != fullname and self.path is None: - return None - if self.path is None: - path = None - else: - path = [os.path.realpath(self.path)] - try: - file, filename, etc = imp.find_module(subname, path) - except ImportError: - return None - return ImpLoader(fullname, file, filename, etc) - - def iter_modules(self, prefix=''): - if self.path is None or not os.path.isdir(self.path): - return - - yielded = {} - import inspect - try: - filenames = os.listdir(self.path) - except OSError: - # ignore unreadable directories like import does - filenames = [] - filenames.sort() # handle packages before same-named modules - - for fn in filenames: - modname = inspect.getmodulename(fn) - if modname=='__init__' or modname in yielded: - continue - - path = os.path.join(self.path, fn) - ispkg = False - - if not modname and os.path.isdir(path) and '.' not in fn: - modname = fn - try: - dircontents = os.listdir(path) - except OSError: - # ignore unreadable directories like import does - dircontents = [] - for fn in dircontents: - subname = inspect.getmodulename(fn) - if subname=='__init__': - ispkg = True - break - else: - continue # not a package - - if modname and '.' not in modname: - yielded[modname] = 1 - yield prefix + modname, ispkg - - -class ImpLoader: - """PEP 302 Loader that wraps Python's "classic" import algorithm - """ - code = source = None - - def __init__(self, fullname, file, filename, etc): - warnings.warn("This emulation is deprecated and slated for removal in " - "Python 3.12; use 'importlib' instead", - DeprecationWarning) - _import_imp() - self.file = file - self.filename = filename - self.fullname = fullname - self.etc = etc - - def load_module(self, fullname): - self._reopen() - try: - mod = imp.load_module(fullname, self.file, self.filename, self.etc) - finally: - if self.file: - self.file.close() - # Note: we don't set __loader__ because we want the module to look - # normal; i.e. this is just a wrapper for standard import machinery - return mod - - def get_data(self, pathname): - with open(pathname, "rb") as file: - return file.read() - - def _reopen(self): - if self.file and self.file.closed: - mod_type = self.etc[2] - if mod_type==imp.PY_SOURCE: - self.file = open(self.filename, 'r') - elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION): - self.file = open(self.filename, 'rb') - - def _fix_name(self, fullname): - if fullname is None: - fullname = self.fullname - elif fullname != self.fullname: - raise ImportError("Loader for module %s cannot handle " - "module %s" % (self.fullname, fullname)) - return fullname - - def is_package(self, fullname): - fullname = self._fix_name(fullname) - return self.etc[2]==imp.PKG_DIRECTORY - - def get_code(self, fullname=None): - fullname = self._fix_name(fullname) - if self.code is None: - mod_type = self.etc[2] - if mod_type==imp.PY_SOURCE: - source = self.get_source(fullname) - self.code = compile(source, self.filename, 'exec') - elif mod_type==imp.PY_COMPILED: - self._reopen() - try: - self.code = read_code(self.file) - finally: - self.file.close() - elif mod_type==imp.PKG_DIRECTORY: - self.code = self._get_delegate().get_code() - return self.code - - def get_source(self, fullname=None): - fullname = self._fix_name(fullname) - if self.source is None: - mod_type = self.etc[2] - if mod_type==imp.PY_SOURCE: - self._reopen() - try: - self.source = self.file.read() - finally: - self.file.close() - elif mod_type==imp.PY_COMPILED: - if os.path.exists(self.filename[:-1]): - with open(self.filename[:-1], 'r') as f: - self.source = f.read() - elif mod_type==imp.PKG_DIRECTORY: - self.source = self._get_delegate().get_source() - return self.source - - def _get_delegate(self): - finder = ImpImporter(self.filename) - spec = _get_spec(finder, '__init__') - return spec.loader - - def get_filename(self, fullname=None): - fullname = self._fix_name(fullname) - mod_type = self.etc[2] - if mod_type==imp.PKG_DIRECTORY: - return self._get_delegate().get_filename() - elif mod_type in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION): - return self.filename - return None - - try: import zipimport from zipimport import zipimporter @@ -511,10 +330,10 @@ def extend_path(path, name): from pkgutil import extend_path __path__ = extend_path(__path__, __name__) - This will add to the package's __path__ all subdirectories of - directories on sys.path named after the package. This is useful - if one wants to distribute different parts of a single logical - package as multiple directories. + For each directory on sys.path that has a subdirectory that + matches the package name, add the subdirectory to the package's + __path__. This is useful if one wants to distribute different + parts of a single logical package as multiple directories. It also looks for *.pkg files beginning where * matches the name argument. This feature is similar to *.pth files (see site.py), diff --git a/Lib/platform.py b/Lib/platform.py index f2b0d1d1bd3f5d..7bb222088d5061 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -136,11 +136,11 @@ 'pl': 200, 'p': 200, } -_component_re = re.compile(r'([0-9]+|[._+-])') def _comparable_version(version): + component_re = re.compile(r'([0-9]+|[._+-])') result = [] - for v in _component_re.split(version): + for v in component_re.split(version): if v not in '._+-': try: v = int(v, 10) @@ -152,11 +152,6 @@ def _comparable_version(version): ### Platform specific APIs -_libc_search = re.compile(b'(__libc_init)' - b'|' - b'(GLIBC_([0-9.]+))' - b'|' - br'(libc(_\w+)?\.so(?:\.(\d[0-9.]*))?)', re.ASCII) def libc_ver(executable=None, lib='', version='', chunksize=16384): @@ -190,6 +185,12 @@ def libc_ver(executable=None, lib='', version='', chunksize=16384): # sys.executable is not set. return lib, version + libc_search = re.compile(b'(__libc_init)' + b'|' + b'(GLIBC_([0-9.]+))' + b'|' + br'(libc(_\w+)?\.so(?:\.(\d[0-9.]*))?)', re.ASCII) + V = _comparable_version # We use os.path.realpath() # here to work around problems with Cygwin not being @@ -200,7 +201,7 @@ def libc_ver(executable=None, lib='', version='', chunksize=16384): pos = 0 while pos < len(binary): if b'libc' in binary or b'GLIBC' in binary: - m = _libc_search.search(binary, pos) + m = libc_search.search(binary, pos) else: m = None if not m or m.end() == len(binary): @@ -247,9 +248,6 @@ def _norm_version(version, build=''): version = '.'.join(strings[:3]) return version -_ver_output = re.compile(r'(?:([\w ]+) ([\w.]+) ' - r'.*' - r'\[.* ([\d.]+)\])') # Examples of VER command output: # @@ -295,9 +293,13 @@ def _syscmd_ver(system='', release='', version='', else: return system, release, version + ver_output = re.compile(r'(?:([\w ]+) ([\w.]+) ' + r'.*' + r'\[.* ([\d.]+)\])') + # Parse the output info = info.strip() - m = _ver_output.match(info) + m = ver_output.match(info) if m is not None: system, release, version = m.groups() # Strip trailing dots from version and release @@ -1033,32 +1035,6 @@ def processor(): ### Various APIs for extracting information from sys.version -_sys_version_parser = re.compile( - r'([\w.+]+)\s*' # "version<space>" - r'\(#?([^,]+)' # "(#buildno" - r'(?:,\s*([\w ]*)' # ", builddate" - r'(?:,\s*([\w :]*))?)?\)\s*' # ", buildtime)<space>" - r'\[([^\]]+)\]?', re.ASCII) # "[compiler]" - -_ironpython_sys_version_parser = re.compile( - r'IronPython\s*' - r'([\d\.]+)' - r'(?: \(([\d\.]+)\))?' - r' on (.NET [\d\.]+)', re.ASCII) - -# IronPython covering 2.6 and 2.7 -_ironpython26_sys_version_parser = re.compile( - r'([\d.]+)\s*' - r'\(IronPython\s*' - r'[\d.]+\s*' - r'\(([\d.]+)\) on ([\w.]+ [\d.]+(?: \(\d+-bit\))?)\)' -) - -_pypy_sys_version_parser = re.compile( - r'([\w.+]+)\s*' - r'\(#?([^,]+),\s*([\w ]+),\s*([\w :]+)\)\s*' - r'\[PyPy [^\]]+\]?') - _sys_version_cache = {} def _sys_version(sys_version=None): @@ -1090,28 +1066,17 @@ def _sys_version(sys_version=None): if result is not None: return result - # Parse it - if 'IronPython' in sys_version: - # IronPython - name = 'IronPython' - if sys_version.startswith('IronPython'): - match = _ironpython_sys_version_parser.match(sys_version) - else: - match = _ironpython26_sys_version_parser.match(sys_version) - - if match is None: - raise ValueError( - 'failed to parse IronPython sys.version: %s' % - repr(sys_version)) - - version, alt_version, compiler = match.groups() - buildno = '' - builddate = '' + sys_version_parser = re.compile( + r'([\w.+]+)\s*' # "version<space>" + r'\(#?([^,]+)' # "(#buildno" + r'(?:,\s*([\w ]*)' # ", builddate" + r'(?:,\s*([\w :]*))?)?\)\s*' # ", buildtime)<space>" + r'\[([^\]]+)\]?', re.ASCII) # "[compiler]" - elif sys.platform.startswith('java'): + if sys.platform.startswith('java'): # Jython name = 'Jython' - match = _sys_version_parser.match(sys_version) + match = sys_version_parser.match(sys_version) if match is None: raise ValueError( 'failed to parse Jython sys.version: %s' % @@ -1123,8 +1088,13 @@ def _sys_version(sys_version=None): elif "PyPy" in sys_version: # PyPy + pypy_sys_version_parser = re.compile( + r'([\w.+]+)\s*' + r'\(#?([^,]+),\s*([\w ]+),\s*([\w :]+)\)\s*' + r'\[PyPy [^\]]+\]?') + name = "PyPy" - match = _pypy_sys_version_parser.match(sys_version) + match = pypy_sys_version_parser.match(sys_version) if match is None: raise ValueError("failed to parse PyPy sys.version: %s" % repr(sys_version)) @@ -1133,7 +1103,7 @@ def _sys_version(sys_version=None): else: # CPython - match = _sys_version_parser.match(sys_version) + match = sys_version_parser.match(sys_version) if match is None: raise ValueError( 'failed to parse CPython sys.version: %s' % @@ -1171,7 +1141,6 @@ def python_implementation(): Currently, the following implementations are identified: 'CPython' (C implementation of Python), - 'IronPython' (.NET implementation of Python), 'Jython' (Java implementation of Python), 'PyPy' (Python implementation of Python). @@ -1323,13 +1292,6 @@ def platform(aliased=False, terse=False): ### freedesktop.org os-release standard # https://www.freedesktop.org/software/systemd/man/os-release.html -# NAME=value with optional quotes (' or "). The regular expression is less -# strict than shell lexer, but that's ok. -_os_release_line = re.compile( - "^(?P<name>[a-zA-Z0-9_]+)=(?P<quote>[\"\']?)(?P<value>.*)(?P=quote)$" -) -# unescape five special characters mentioned in the standard -_os_release_unescape = re.compile(r"\\([\\\$\"\'`])") # /etc takes precedence over /usr/lib _os_release_candidates = ("/etc/os-release", "/usr/lib/os-release") _os_release_cache = None @@ -1344,10 +1306,18 @@ def _parse_os_release(lines): "PRETTY_NAME": "Linux", } + # NAME=value with optional quotes (' or "). The regular expression is less + # strict than shell lexer, but that's ok. + os_release_line = re.compile( + "^(?P<name>[a-zA-Z0-9_]+)=(?P<quote>[\"\']?)(?P<value>.*)(?P=quote)$" + ) + # unescape five special characters mentioned in the standard + os_release_unescape = re.compile(r"\\([\\\$\"\'`])") + for line in lines: - mo = _os_release_line.match(line) + mo = os_release_line.match(line) if mo is not None: - info[mo.group('name')] = _os_release_unescape.sub( + info[mo.group('name')] = os_release_unescape.sub( r"\1", mo.group('value') ) diff --git a/Lib/profile.py b/Lib/profile.py index 453e56285c510c..4b82523b03d64b 100755 --- a/Lib/profile.py +++ b/Lib/profile.py @@ -25,6 +25,7 @@ import importlib.machinery +import io import sys import time import marshal @@ -588,7 +589,7 @@ def main(): else: progname = args[0] sys.path.insert(0, os.path.dirname(progname)) - with open(progname, 'rb') as fp: + with io.open_code(progname) as fp: code = compile(fp.read(), progname, 'exec') spec = importlib.machinery.ModuleSpec(name='__main__', loader=None, origin=progname) diff --git a/Lib/pydoc.py b/Lib/pydoc.py index 0a693f45230c93..1c3443fa8469f7 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -389,8 +389,17 @@ def synopsis(filename, cache={}): class ErrorDuringImport(Exception): """Errors that occurred while trying to import something to document it.""" def __init__(self, filename, exc_info): + if not isinstance(exc_info, tuple): + assert isinstance(exc_info, BaseException) + self.exc = type(exc_info) + self.value = exc_info + self.tb = exc_info.__traceback__ + else: + warnings.warn("A tuple value for exc_info is deprecated, use an exception instance", + DeprecationWarning) + + self.exc, self.value, self.tb = exc_info self.filename = filename - self.exc, self.value, self.tb = exc_info def __str__(self): exc = self.exc.__name__ @@ -411,8 +420,8 @@ def importfile(path): spec = importlib.util.spec_from_file_location(name, path, loader=loader) try: return importlib._bootstrap._load(spec) - except: - raise ErrorDuringImport(path, sys.exc_info()) + except BaseException as err: + raise ErrorDuringImport(path, err) def safeimport(path, forceload=0, cache={}): """Import a module; handle errors; return None if the module isn't found. @@ -440,21 +449,20 @@ def safeimport(path, forceload=0, cache={}): cache[key] = sys.modules[key] del sys.modules[key] module = __import__(path) - except: + except BaseException as err: # Did the error occur before or after the module was found? - (exc, value, tb) = info = sys.exc_info() if path in sys.modules: # An error occurred while executing the imported module. - raise ErrorDuringImport(sys.modules[path].__file__, info) - elif exc is SyntaxError: + raise ErrorDuringImport(sys.modules[path].__file__, err) + elif type(err) is SyntaxError: # A SyntaxError occurred before we could execute the module. - raise ErrorDuringImport(value.filename, info) - elif issubclass(exc, ImportError) and value.name == path: + raise ErrorDuringImport(err.filename, err) + elif isinstance(err, ImportError) and err.name == path: # No such module in the path. return None else: # Some other error occurred during the importing process. - raise ErrorDuringImport(path, sys.exc_info()) + raise ErrorDuringImport(path, err) for part in path.split('.')[1:]: try: module = getattr(module, part) except AttributeError: return None @@ -504,7 +512,7 @@ def getdocloc(self, object, basedir=sysconfig.get_path('stdlib')): basedir = os.path.normcase(basedir) if (isinstance(object, type(os)) and - (object.__name__ in ('errno', 'exceptions', 'gc', 'imp', + (object.__name__ in ('errno', 'exceptions', 'gc', 'marshal', 'posix', 'signal', 'sys', '_thread', 'zipimport') or (file.startswith(basedir) and @@ -1997,8 +2005,8 @@ def __call__(self, request=_GoInteractive): if request is not self._GoInteractive: try: self.help(request) - except ImportError as e: - self.output.write(f'{e}\n') + except ImportError as err: + self.output.write(f'{err}\n') else: self.intro() self.interact() @@ -2405,8 +2413,8 @@ def run(self): docsvr = DocServer(self.host, self.port, self.ready) self.docserver = docsvr docsvr.serve_until_quit() - except Exception as e: - self.error = e + except Exception as err: + self.error = err def ready(self, server): self.serving = True diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index 573065b4b714d9..1babb5ce9476c9 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Tue Mar 7 22:42:28 2023 +# Autogenerated by Sphinx on Tue Apr 4 17:52:21 2023 topics = {'assert': 'The "assert" statement\n' '**********************\n' '\n' @@ -1134,10 +1134,11 @@ 'future, a\n' ' check may be added to prevent this.\n' '\n' - '* Nonempty *__slots__* does not work for classes derived ' - 'from\n' - ' “variable-length” built-in types such as "int", ' - '"bytes" and "tuple".\n' + '* "TypeError" will be raised if nonempty *__slots__* are ' + 'defined for a\n' + ' class derived from a ""variable-length" built-in type" ' + 'such as\n' + ' "int", "bytes", and "tuple".\n' '\n' '* Any non-string *iterable* may be assigned to ' '*__slots__*.\n' @@ -3072,7 +3073,7 @@ 'AS\n' 'pattern binds the subject to the name on the right of the as ' 'keyword\n' - 'and succeeds. "capture_pattern" cannot be a a "_".\n' + 'and succeeds. "capture_pattern" cannot be a "_".\n' '\n' 'In simple terms "P as NAME" will match with "P", and on success ' 'it\n' @@ -4675,7 +4676,7 @@ 'scripts. For\n' 'example:\n' '\n' - ' python3 -m pdb myscript.py\n' + ' python -m pdb myscript.py\n' '\n' 'When invoked as a script, pdb will automatically enter ' 'post-mortem\n' @@ -4695,7 +4696,7 @@ '\n' 'New in version 3.7: "pdb.py" now accepts a "-m" option that ' 'execute\n' - 'modules similar to the way "python3 -m" does. As with a script, ' + 'modules similar to the way "python -m" does. As with a script, ' 'the\n' 'debugger will pause execution just before the first line of the\n' 'module.\n' @@ -4759,8 +4760,8 @@ 'object)\n' ' under debugger control. When "runeval()" returns, it returns ' 'the\n' - ' value of the expression. Otherwise this function is similar ' - 'to\n' + ' value of the *expression*. Otherwise this function is ' + 'similar to\n' ' "run()".\n' '\n' 'pdb.runcall(function, *args, **kwds)\n' @@ -5022,14 +5023,15 @@ 'ignore bpnumber [count]\n' '\n' ' Set the ignore count for the given breakpoint number. If ' - 'count is\n' - ' omitted, the ignore count is set to 0. A breakpoint becomes ' - 'active\n' - ' when the ignore count is zero. When non-zero, the count is\n' - ' decremented each time the breakpoint is reached and the ' - 'breakpoint\n' - ' is not disabled and any associated condition evaluates to ' - 'true.\n' + '*count*\n' + ' is omitted, the ignore count is set to 0. A breakpoint ' + 'becomes\n' + ' active when the ignore count is zero. When non-zero, the ' + '*count*\n' + ' is decremented each time the breakpoint is reached and the\n' + ' breakpoint is not disabled and any associated condition ' + 'evaluates\n' + ' to true.\n' '\n' 'condition bpnumber [condition]\n' '\n' @@ -5079,7 +5081,7 @@ ' breakpoint—which could have its own command list, leading to\n' ' ambiguities about which list to execute.\n' '\n' - ' If you use the ‘silent’ command in the command list, the ' + ' If you use the "silent" command in the command list, the ' 'usual\n' ' message about stopping at a breakpoint is not printed. This ' 'may be\n' @@ -5114,11 +5116,10 @@ 'number\n' ' greater than the current one is reached.\n' '\n' - ' With a line number, continue execution until a line with a ' - 'number\n' - ' greater or equal to that is reached. In both cases, also ' - 'stop when\n' - ' the current frame returns.\n' + ' With *lineno*, continue execution until a line with a number\n' + ' greater or equal to *lineno* is reached. In both cases, also ' + 'stop\n' + ' when the current frame returns.\n' '\n' ' Changed in version 3.2: Allow giving an explicit line ' 'number.\n' @@ -5182,9 +5183,8 @@ '\n' 'p expression\n' '\n' - ' Evaluate the *expression* in the current context and print ' - 'its\n' - ' value.\n' + ' Evaluate *expression* in the current context and print its ' + 'value.\n' '\n' ' Note:\n' '\n' @@ -5194,26 +5194,26 @@ '\n' 'pp expression\n' '\n' - ' Like the "p" command, except the value of the expression is ' + ' Like the "p" command, except the value of *expression* is ' 'pretty-\n' ' printed using the "pprint" module.\n' '\n' 'whatis expression\n' '\n' - ' Print the type of the *expression*.\n' + ' Print the type of *expression*.\n' '\n' 'source expression\n' '\n' - ' Try to get source code for the given object and display it.\n' + ' Try to get source code of *expression* and display it.\n' '\n' ' New in version 3.2.\n' '\n' 'display [expression]\n' '\n' - ' Display the value of the expression if it changed, each time\n' + ' Display the value of *expression* if it changed, each time\n' ' execution stops in the current frame.\n' '\n' - ' Without expression, list all display expressions for the ' + ' Without *expression*, list all display expressions for the ' 'current\n' ' frame.\n' '\n' @@ -5221,10 +5221,10 @@ '\n' 'undisplay [expression]\n' '\n' - ' Do not display the expression any more in the current frame.\n' - ' Without expression, clear all display expressions for the ' - 'current\n' - ' frame.\n' + ' Do not display *expression* anymore in the current frame. ' + 'Without\n' + ' *expression*, clear all display expressions for the current ' + 'frame.\n' '\n' ' New in version 3.2.\n' '\n' @@ -5240,16 +5240,16 @@ '\n' 'alias [name [command]]\n' '\n' - ' Create an alias called *name* that executes *command*. The ' - 'command\n' - ' must *not* be enclosed in quotes. Replaceable parameters can ' - 'be\n' - ' indicated by "%1", "%2", and so on, while "%*" is replaced by ' - 'all\n' - ' the parameters. If no command is given, the current alias ' - 'for\n' - ' *name* is shown. If no arguments are given, all aliases are ' - 'listed.\n' + ' Create an alias called *name* that executes *command*. The\n' + ' *command* must *not* be enclosed in quotes. Replaceable ' + 'parameters\n' + ' can be indicated by "%1", "%2", and so on, while "%*" is ' + 'replaced\n' + ' by all the parameters. If *command* is omitted, the current ' + 'alias\n' + ' for *name* is shown. If no arguments are given, all aliases ' + 'are\n' + ' listed.\n' '\n' ' Aliases may be nested and can contain anything that can be ' 'legally\n' @@ -5268,14 +5268,14 @@ ' in the ".pdbrc" file):\n' '\n' ' # Print instance variables (usage "pi classInst")\n' - ' alias pi for k in %1.__dict__.keys(): ' - 'print("%1.",k,"=",%1.__dict__[k])\n' + ' alias pi for k in %1.__dict__.keys(): print(f"%1.{k} = ' + '{%1.__dict__[k]}")\n' ' # Print instance variables in self\n' ' alias ps pi self\n' '\n' 'unalias name\n' '\n' - ' Delete the specified alias.\n' + ' Delete the specified alias *name*.\n' '\n' '! statement\n' '\n' @@ -5295,12 +5295,13 @@ 'run [args ...]\n' 'restart [args ...]\n' '\n' - ' Restart the debugged Python program. If an argument is ' - 'supplied,\n' - ' it is split with "shlex" and the result is used as the new\n' - ' "sys.argv". History, breakpoints, actions and debugger ' - 'options are\n' - ' preserved. "restart" is an alias for "run".\n' + ' Restart the debugged Python program. If *args* is supplied, ' + 'it is\n' + ' split with "shlex" and the result is used as the new ' + '"sys.argv".\n' + ' History, breakpoints, actions and debugger options are ' + 'preserved.\n' + ' "restart" is an alias for "run".\n' '\n' 'q(uit)\n' '\n' @@ -5309,11 +5310,11 @@ '\n' 'debug code\n' '\n' - ' Enter a recursive debugger that steps through the code ' - 'argument\n' - ' (which is an arbitrary expression or statement to be executed ' - 'in\n' - ' the current environment).\n' + ' Enter a recursive debugger that steps through *code* (which ' + 'is an\n' + ' arbitrary expression or statement to be executed in the ' + 'current\n' + ' environment).\n' '\n' 'retval\n' '\n' @@ -6170,7 +6171,8 @@ 'The general form of a *standard format specifier* is:\n' '\n' ' format_spec ::= ' - '[[fill]align][sign][z][#][0][width][grouping_option][.precision][type]\n' + '[[fill]align][sign]["z"]["#"]["0"][width][grouping_option]["." ' + 'precision][type]\n' ' fill ::= <any character>\n' ' align ::= "<" | ">" | "=" | "^"\n' ' sign ::= "+" | "-" | " "\n' @@ -9981,10 +9983,11 @@ 'future, a\n' ' check may be added to prevent this.\n' '\n' - '* Nonempty *__slots__* does not work for classes derived ' - 'from\n' - ' “variable-length” built-in types such as "int", "bytes" ' - 'and "tuple".\n' + '* "TypeError" will be raised if nonempty *__slots__* are ' + 'defined for a\n' + ' class derived from a ""variable-length" built-in type" ' + 'such as\n' + ' "int", "bytes", and "tuple".\n' '\n' '* Any non-string *iterable* may be assigned to *__slots__*.\n' '\n' @@ -13691,11 +13694,10 @@ ' compiled; "co_firstlineno" is the first line number of the\n' ' function; "co_lnotab" is a string encoding the mapping from\n' ' bytecode offsets to line numbers (for details see the source\n' - ' code of the interpreter); "co_stacksize" is the required ' - 'stack\n' - ' size; "co_flags" is an integer encoding a number of flags ' - 'for\n' - ' the interpreter.\n' + ' code of the interpreter, is deprecated since 3.12 and may be\n' + ' removed in 3.14); "co_stacksize" is the required stack size;\n' + ' "co_flags" is an integer encoding a number of flags for the\n' + ' interpreter.\n' '\n' ' The following flag bits are defined for "co_flags": bit ' '"0x04"\n' diff --git a/Lib/random.py b/Lib/random.py index 3c4291f6a652a0..586c3f7f9da938 100644 --- a/Lib/random.py +++ b/Lib/random.py @@ -24,7 +24,6 @@ negative exponential gamma beta - binomial pareto Weibull @@ -33,6 +32,11 @@ circular uniform von Mises + discrete distributions + ---------------------- + binomial + + General notes on the underlying Mersenne Twister core generator: * The period is 2**19937-1. @@ -731,6 +735,26 @@ def betavariate(self, alpha, beta): return y / (y + self.gammavariate(beta, 1.0)) return 0.0 + def paretovariate(self, alpha): + """Pareto distribution. alpha is the shape parameter.""" + # Jain, pg. 495 + + u = 1.0 - self.random() + return u ** (-1.0 / alpha) + + def weibullvariate(self, alpha, beta): + """Weibull distribution. + + alpha is the scale parameter and beta is the shape parameter. + + """ + # Jain, pg. 499; bug fix courtesy Bill Arms + + u = 1.0 - self.random() + return alpha * (-_log(u)) ** (1.0 / beta) + + + ## -------------------- discrete distributions --------------------- def binomialvariate(self, n=1, p=0.5): """Binomial random variable. @@ -816,25 +840,6 @@ def binomialvariate(self, n=1, p=0.5): return k - def paretovariate(self, alpha): - """Pareto distribution. alpha is the shape parameter.""" - # Jain, pg. 495 - - u = 1.0 - self.random() - return u ** (-1.0 / alpha) - - def weibullvariate(self, alpha, beta): - """Weibull distribution. - - alpha is the scale parameter and beta is the shape parameter. - - """ - # Jain, pg. 499; bug fix courtesy Bill Arms - - u = 1.0 - self.random() - return alpha * (-_log(u)) ** (1.0 / beta) - - ## ------------------------------------------------------------------ ## --------------- Operating System Random Source ------------------ diff --git a/Lib/runpy.py b/Lib/runpy.py index 54fc136d4074f2..42f896c9cd5094 100644 --- a/Lib/runpy.py +++ b/Lib/runpy.py @@ -279,12 +279,7 @@ def run_path(path_name, init_globals=None, run_name=None): pkg_name = run_name.rpartition(".")[0] from pkgutil import get_importer importer = get_importer(path_name) - # Trying to avoid importing imp so as to not consume the deprecation warning. - is_NullImporter = False - if type(importer).__module__ == 'imp': - if type(importer).__name__ == 'NullImporter': - is_NullImporter = True - if isinstance(importer, type(None)) or is_NullImporter: + if isinstance(importer, type(None)): # Not a valid sys.path entry, so run the code directly # execfile() doesn't help as we want to allow compiled files code, fname = _get_code_from_file(run_name, path_name) diff --git a/Lib/shutil.py b/Lib/shutil.py index 867925aa10cc04..7d1a3d00011f37 100644 --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -10,6 +10,7 @@ import fnmatch import collections import errno +import warnings try: import zlib @@ -39,6 +40,9 @@ elif _WINDOWS: import nt +if sys.platform == 'win32': + import _winapi + COPY_BUFSIZE = 1024 * 1024 if _WINDOWS else 64 * 1024 # This should never be removed, see rationale in: # https://bugs.python.org/issue43743#msg393429 @@ -328,7 +332,7 @@ def _copyxattr(src, dst, *, follow_symlinks=True): os.setxattr(dst, name, value, follow_symlinks=follow_symlinks) except OSError as e: if e.errno not in (errno.EPERM, errno.ENOTSUP, errno.ENODATA, - errno.EINVAL): + errno.EINVAL, errno.EACCES): raise else: def _copyxattr(*args, **kwargs): @@ -575,12 +579,12 @@ def _rmtree_islink(path): return os.path.islink(path) # version vulnerable to race conditions -def _rmtree_unsafe(path, onerror): +def _rmtree_unsafe(path, onexc): try: with os.scandir(path) as scandir_it: entries = list(scandir_it) - except OSError: - onerror(os.scandir, path, sys.exc_info()) + except OSError as err: + onexc(os.scandir, path, err) entries = [] for entry in entries: fullname = entry.path @@ -596,28 +600,28 @@ def _rmtree_unsafe(path, onerror): # a directory with a symlink after the call to # os.scandir or entry.is_dir above. raise OSError("Cannot call rmtree on a symbolic link") - except OSError: - onerror(os.path.islink, fullname, sys.exc_info()) + except OSError as err: + onexc(os.path.islink, fullname, err) continue - _rmtree_unsafe(fullname, onerror) + _rmtree_unsafe(fullname, onexc) else: try: os.unlink(fullname) - except OSError: - onerror(os.unlink, fullname, sys.exc_info()) + except OSError as err: + onexc(os.unlink, fullname, err) try: os.rmdir(path) - except OSError: - onerror(os.rmdir, path, sys.exc_info()) + except OSError as err: + onexc(os.rmdir, path, err) # Version using fd-based APIs to protect against races -def _rmtree_safe_fd(topfd, path, onerror): +def _rmtree_safe_fd(topfd, path, onexc): try: with os.scandir(topfd) as scandir_it: entries = list(scandir_it) except OSError as err: err.filename = path - onerror(os.scandir, path, sys.exc_info()) + onexc(os.scandir, path, err) return for entry in entries: fullname = os.path.join(path, entry.name) @@ -630,25 +634,25 @@ def _rmtree_safe_fd(topfd, path, onerror): try: orig_st = entry.stat(follow_symlinks=False) is_dir = stat.S_ISDIR(orig_st.st_mode) - except OSError: - onerror(os.lstat, fullname, sys.exc_info()) + except OSError as err: + onexc(os.lstat, fullname, err) continue if is_dir: try: dirfd = os.open(entry.name, os.O_RDONLY, dir_fd=topfd) dirfd_closed = False - except OSError: - onerror(os.open, fullname, sys.exc_info()) + except OSError as err: + onexc(os.open, fullname, err) else: try: if os.path.samestat(orig_st, os.fstat(dirfd)): - _rmtree_safe_fd(dirfd, fullname, onerror) + _rmtree_safe_fd(dirfd, fullname, onexc) try: os.close(dirfd) dirfd_closed = True os.rmdir(entry.name, dir_fd=topfd) - except OSError: - onerror(os.rmdir, fullname, sys.exc_info()) + except OSError as err: + onexc(os.rmdir, fullname, err) else: try: # This can only happen if someone replaces @@ -656,23 +660,23 @@ def _rmtree_safe_fd(topfd, path, onerror): # os.scandir or stat.S_ISDIR above. raise OSError("Cannot call rmtree on a symbolic " "link") - except OSError: - onerror(os.path.islink, fullname, sys.exc_info()) + except OSError as err: + onexc(os.path.islink, fullname, err) finally: if not dirfd_closed: os.close(dirfd) else: try: os.unlink(entry.name, dir_fd=topfd) - except OSError: - onerror(os.unlink, fullname, sys.exc_info()) + except OSError as err: + onexc(os.unlink, fullname, err) _use_fd_functions = ({os.open, os.stat, os.unlink, os.rmdir} <= os.supports_dir_fd and os.scandir in os.supports_fd and os.stat in os.supports_follow_symlinks) -def rmtree(path, ignore_errors=False, onerror=None, *, dir_fd=None): +def rmtree(path, ignore_errors=False, onerror=None, *, onexc=None, dir_fd=None): """Recursively delete a directory tree. If dir_fd is not None, it should be a file descriptor open to a directory; @@ -680,21 +684,44 @@ def rmtree(path, ignore_errors=False, onerror=None, *, dir_fd=None): dir_fd may not be implemented on your platform. If it is unavailable, using it will raise a NotImplementedError. - If ignore_errors is set, errors are ignored; otherwise, if onerror - is set, it is called to handle the error with arguments (func, + If ignore_errors is set, errors are ignored; otherwise, if onexc or + onerror is set, it is called to handle the error with arguments (func, path, exc_info) where func is platform and implementation dependent; path is the argument to that function that caused it to fail; and - exc_info is a tuple returned by sys.exc_info(). If ignore_errors - is false and onerror is None, an exception is raised. + the value of exc_info describes the exception. For onexc it is the + exception instance, and for onerror it is a tuple as returned by + sys.exc_info(). If ignore_errors is false and both onexc and + onerror are None, the exception is reraised. + onerror is deprecated and only remains for backwards compatibility. + If both onerror and onexc are set, onerror is ignored and onexc is used. """ + + if onerror is not None: + warnings.warn("onerror argument is deprecated, use onexc instead", + DeprecationWarning, stacklevel=2) + sys.audit("shutil.rmtree", path, dir_fd) if ignore_errors: - def onerror(*args): + def onexc(*args): pass - elif onerror is None: - def onerror(*args): + elif onerror is None and onexc is None: + def onexc(*args): raise + elif onexc is None: + if onerror is None: + def onexc(*args): + raise + else: + # delegate to onerror + def onexc(*args): + func, path, exc = args + if exc is None: + exc_info = None, None, None + else: + exc_info = type(exc), exc, exc.__traceback__ + return onerror(func, path, exc_info) + if _use_fd_functions: # While the unsafe rmtree works fine on bytes, the fd based does not. if isinstance(path, bytes): @@ -703,30 +730,30 @@ def onerror(*args): # lstat()/open()/fstat() trick. try: orig_st = os.lstat(path, dir_fd=dir_fd) - except Exception: - onerror(os.lstat, path, sys.exc_info()) + except Exception as err: + onexc(os.lstat, path, err) return try: fd = os.open(path, os.O_RDONLY, dir_fd=dir_fd) fd_closed = False - except Exception: - onerror(os.open, path, sys.exc_info()) + except Exception as err: + onexc(os.open, path, err) return try: if os.path.samestat(orig_st, os.fstat(fd)): - _rmtree_safe_fd(fd, path, onerror) + _rmtree_safe_fd(fd, path, onexc) try: os.close(fd) fd_closed = True os.rmdir(path, dir_fd=dir_fd) - except OSError: - onerror(os.rmdir, path, sys.exc_info()) + except OSError as err: + onexc(os.rmdir, path, err) else: try: # symlinks to directories are forbidden, see bug #1669 raise OSError("Cannot call rmtree on a symbolic link") - except OSError: - onerror(os.path.islink, path, sys.exc_info()) + except OSError as err: + onexc(os.path.islink, path, err) finally: if not fd_closed: os.close(fd) @@ -737,11 +764,11 @@ def onerror(*args): if _rmtree_islink(path): # symlinks to directories are forbidden, see bug #1669 raise OSError("Cannot call rmtree on a symbolic link") - except OSError: - onerror(os.path.islink, path, sys.exc_info()) - # can't continue even if onerror hook returns + except OSError as err: + onexc(os.path.islink, path, err) + # can't continue even if onexc hook returns return - return _rmtree_unsafe(path, onerror) + return _rmtree_unsafe(path, onexc) # Allow introspection of whether or not the hardening against symlink # attacks is supported on the current platform @@ -1218,7 +1245,7 @@ def _unpack_zipfile(filename, extract_dir): finally: zip.close() -def _unpack_tarfile(filename, extract_dir): +def _unpack_tarfile(filename, extract_dir, *, filter=None): """Unpack tar/tar.gz/tar.bz2/tar.xz `filename` to `extract_dir` """ import tarfile # late import for breaking circular dependency @@ -1228,7 +1255,7 @@ def _unpack_tarfile(filename, extract_dir): raise ReadError( "%s is not a compressed or uncompressed tar file" % filename) try: - tarobj.extractall(extract_dir) + tarobj.extractall(extract_dir, filter=filter) finally: tarobj.close() @@ -1261,7 +1288,7 @@ def _find_unpack_format(filename): return name return None -def unpack_archive(filename, extract_dir=None, format=None): +def unpack_archive(filename, extract_dir=None, format=None, *, filter=None): """Unpack an archive. `filename` is the name of the archive. @@ -1275,6 +1302,9 @@ def unpack_archive(filename, extract_dir=None, format=None): was registered for that extension. In case none is found, a ValueError is raised. + + If `filter` is given, it is passed to the underlying + extraction function. """ sys.audit("shutil.unpack_archive", filename, extract_dir, format) @@ -1284,6 +1314,10 @@ def unpack_archive(filename, extract_dir=None, format=None): extract_dir = os.fspath(extract_dir) filename = os.fspath(filename) + if filter is None: + filter_kwargs = {} + else: + filter_kwargs = {'filter': filter} if format is not None: try: format_info = _UNPACK_FORMATS[format] @@ -1291,7 +1325,7 @@ def unpack_archive(filename, extract_dir=None, format=None): raise ValueError("Unknown unpack format '{0}'".format(format)) from None func = format_info[1] - func(filename, extract_dir, **dict(format_info[2])) + func(filename, extract_dir, **dict(format_info[2]), **filter_kwargs) else: # we need to look at the registered unpackers supported extensions format = _find_unpack_format(filename) @@ -1299,7 +1333,7 @@ def unpack_archive(filename, extract_dir=None, format=None): raise ReadError("Unknown archive format '{0}'".format(filename)) func = _UNPACK_FORMATS[format][1] - kwargs = dict(_UNPACK_FORMATS[format][2]) + kwargs = dict(_UNPACK_FORMATS[format][2]) | filter_kwargs func(filename, extract_dir, **kwargs) @@ -1425,6 +1459,16 @@ def _access_check(fn, mode): and not os.path.isdir(fn)) +def _win_path_needs_curdir(cmd, mode): + """ + On Windows, we can use NeedCurrentDirectoryForExePath to figure out + if we should add the cwd to PATH when searching for executables if + the mode is executable. + """ + return (not (mode & os.X_OK)) or _winapi.NeedCurrentDirectoryForExePath( + os.fsdecode(cmd)) + + def which(cmd, mode=os.F_OK | os.X_OK, path=None): """Given a command, mode, and a PATH string, return the path which conforms to the given mode on the PATH, or None if there is no such @@ -1435,60 +1479,54 @@ def which(cmd, mode=os.F_OK | os.X_OK, path=None): path. """ - # If we're given a path with a directory part, look it up directly rather - # than referring to PATH directories. This includes checking relative to the - # current directory, e.g. ./script - if os.path.dirname(cmd): - if _access_check(cmd, mode): - return cmd - return None - use_bytes = isinstance(cmd, bytes) - if path is None: - path = os.environ.get("PATH", None) - if path is None: - try: - path = os.confstr("CS_PATH") - except (AttributeError, ValueError): - # os.confstr() or CS_PATH is not available - path = os.defpath - # bpo-35755: Don't use os.defpath if the PATH environment variable is - # set to an empty string - - # PATH='' doesn't match, whereas PATH=':' looks in the current directory - if not path: - return None - - if use_bytes: - path = os.fsencode(path) - path = path.split(os.fsencode(os.pathsep)) + # If we're given a path with a directory part, look it up directly rather + # than referring to PATH directories. This includes checking relative to + # the current directory, e.g. ./script + dirname, cmd = os.path.split(cmd) + if dirname: + path = [dirname] else: - path = os.fsdecode(path) - path = path.split(os.pathsep) + if path is None: + path = os.environ.get("PATH", None) + if path is None: + try: + path = os.confstr("CS_PATH") + except (AttributeError, ValueError): + # os.confstr() or CS_PATH is not available + path = os.defpath + # bpo-35755: Don't use os.defpath if the PATH environment variable + # is set to an empty string + + # PATH='' doesn't match, whereas PATH=':' looks in the current + # directory + if not path: + return None - if sys.platform == "win32": - # The current directory takes precedence on Windows. - curdir = os.curdir if use_bytes: - curdir = os.fsencode(curdir) - if curdir not in path: + path = os.fsencode(path) + path = path.split(os.fsencode(os.pathsep)) + else: + path = os.fsdecode(path) + path = path.split(os.pathsep) + + if sys.platform == "win32" and _win_path_needs_curdir(cmd, mode): + curdir = os.curdir + if use_bytes: + curdir = os.fsencode(curdir) path.insert(0, curdir) + if sys.platform == "win32": # PATHEXT is necessary to check on Windows. pathext_source = os.getenv("PATHEXT") or _WIN_DEFAULT_PATHEXT pathext = [ext for ext in pathext_source.split(os.pathsep) if ext] if use_bytes: pathext = [os.fsencode(ext) for ext in pathext] - # See if the given file matches any of the expected path extensions. - # This will allow us to short circuit when given "python.exe". - # If it does match, only test that one, otherwise we have to try - # others. - if any(cmd.lower().endswith(ext.lower()) for ext in pathext): - files = [cmd] - else: - files = [cmd + ext for ext in pathext] + + # Always try checking the originally given cmd, if it doesn't match, try pathext + files = [cmd] + [cmd + ext for ext in pathext] else: # On other platforms you don't have things like PATHEXT to tell you # what file suffixes are executable, so just pass on cmd as-is. diff --git a/Lib/site.py b/Lib/site.py index 7faf1c6f6af223..672fa7b000ad02 100644 --- a/Lib/site.py +++ b/Lib/site.py @@ -190,11 +190,11 @@ def addpackage(sitedir, name, known_paths): if not dircase in known_paths and os.path.exists(dir): sys.path.append(dir) known_paths.add(dircase) - except Exception: + except Exception as exc: print("Error processing line {:d} of {}:\n".format(n+1, fullname), file=sys.stderr) import traceback - for record in traceback.format_exception(*sys.exc_info()): + for record in traceback.format_exception(exc): for line in record.splitlines(): print(' '+line, file=sys.stderr) print("\nRemainder of file ignored", file=sys.stderr) @@ -492,20 +492,23 @@ def venv(known_paths): executable = sys._base_executable = os.environ['__PYVENV_LAUNCHER__'] else: executable = sys.executable - exe_dir, _ = os.path.split(os.path.abspath(executable)) + exe_dir = os.path.dirname(os.path.abspath(executable)) site_prefix = os.path.dirname(exe_dir) sys._home = None conf_basename = 'pyvenv.cfg' - candidate_confs = [ - conffile for conffile in ( - os.path.join(exe_dir, conf_basename), - os.path.join(site_prefix, conf_basename) + candidate_conf = next( + ( + conffile for conffile in ( + os.path.join(exe_dir, conf_basename), + os.path.join(site_prefix, conf_basename) ) - if os.path.isfile(conffile) - ] + if os.path.isfile(conffile) + ), + None + ) - if candidate_confs: - virtual_conf = candidate_confs[0] + if candidate_conf: + virtual_conf = candidate_conf system_site = "true" # Issue 25185: Use UTF-8, as that's what the venv module uses when # writing the file. diff --git a/Lib/socketserver.py b/Lib/socketserver.py index 842d526b011911..cd028ef1c63b85 100644 --- a/Lib/socketserver.py +++ b/Lib/socketserver.py @@ -141,6 +141,8 @@ class will essentially render the service "deaf" while one request is __all__.extend(["UnixStreamServer","UnixDatagramServer", "ThreadingUnixStreamServer", "ThreadingUnixDatagramServer"]) + if hasattr(os, "fork"): + __all__.extend(["ForkingUnixStreamServer", "ForkingUnixDatagramServer"]) # poll/select have the advantage of not requiring any extra file descriptor, # contrarily to epoll/kqueue (also, they require a single syscall). @@ -727,6 +729,11 @@ class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass + if hasattr(os, "fork"): + class ForkingUnixStreamServer(ForkingMixIn, UnixStreamServer): pass + + class ForkingUnixDatagramServer(ForkingMixIn, UnixDatagramServer): pass + class BaseRequestHandler: """Base class for request handler classes. diff --git a/Lib/sqlite3/__main__.py b/Lib/sqlite3/__main__.py index f8a5cca24e56af..3228dbc09d502a 100644 --- a/Lib/sqlite3/__main__.py +++ b/Lib/sqlite3/__main__.py @@ -94,12 +94,16 @@ def main(): db_name = repr(args.filename) # Prepare REPL banner and prompts. + if sys.platform == "win32" and "idlelib.run" not in sys.modules: + eofkey = "CTRL-Z" + else: + eofkey = "CTRL-D" banner = dedent(f""" sqlite3 shell, running on SQLite version {sqlite3.sqlite_version} Connected to {db_name} Each command will be run using execute() on the cursor. - Type ".help" for more information; type ".quit" or CTRL-D to quit. + Type ".help" for more information; type ".quit" or {eofkey} to quit. """).strip() sys.ps1 = "sqlite> " sys.ps2 = " ... " diff --git a/Lib/tarfile.py b/Lib/tarfile.py index d686435d90ad1b..7781a430839ea5 100755 --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -46,6 +46,7 @@ import struct import copy import re +import warnings try: import pwd @@ -65,7 +66,11 @@ __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError", "CompressionError", "StreamError", "ExtractError", "HeaderError", "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT", - "DEFAULT_FORMAT", "open"] + "DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter", + "tar_filter", "FilterError", "AbsoluteLinkError", + "OutsideDestinationError", "SpecialFileError", "AbsolutePathError", + "LinkOutsideDestinationError"] + #--------------------------------------------------------- # tar constants @@ -154,6 +159,8 @@ def stn(s, length, encoding, errors): """Convert a string to a null-terminated bytes object. """ + if s is None: + raise ValueError("metadata cannot contain None") s = s.encode(encoding, errors) return s[:length] + (length - len(s)) * NUL @@ -601,12 +608,12 @@ class _FileInFile(object): object. """ - def __init__(self, fileobj, offset, size, blockinfo=None): + def __init__(self, fileobj, offset, size, name, blockinfo=None): self.fileobj = fileobj self.offset = offset self.size = size self.position = 0 - self.name = getattr(fileobj, "name", None) + self.name = name self.closed = False if blockinfo is None: @@ -703,13 +710,131 @@ class ExFileObject(io.BufferedReader): def __init__(self, tarfile, tarinfo): fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data, - tarinfo.size, tarinfo.sparse) + tarinfo.size, tarinfo.name, tarinfo.sparse) super().__init__(fileobj) #class ExFileObject + +#----------------------------- +# extraction filters (PEP 706) +#----------------------------- + +class FilterError(TarError): + pass + +class AbsolutePathError(FilterError): + def __init__(self, tarinfo): + self.tarinfo = tarinfo + super().__init__(f'member {tarinfo.name!r} has an absolute path') + +class OutsideDestinationError(FilterError): + def __init__(self, tarinfo, path): + self.tarinfo = tarinfo + self._path = path + super().__init__(f'{tarinfo.name!r} would be extracted to {path!r}, ' + + 'which is outside the destination') + +class SpecialFileError(FilterError): + def __init__(self, tarinfo): + self.tarinfo = tarinfo + super().__init__(f'{tarinfo.name!r} is a special file') + +class AbsoluteLinkError(FilterError): + def __init__(self, tarinfo): + self.tarinfo = tarinfo + super().__init__(f'{tarinfo.name!r} is a symlink to an absolute path') + +class LinkOutsideDestinationError(FilterError): + def __init__(self, tarinfo, path): + self.tarinfo = tarinfo + self._path = path + super().__init__(f'{tarinfo.name!r} would link to {path!r}, ' + + 'which is outside the destination') + +def _get_filtered_attrs(member, dest_path, for_data=True): + new_attrs = {} + name = member.name + dest_path = os.path.realpath(dest_path) + # Strip leading / (tar's directory separator) from filenames. + # Include os.sep (target OS directory separator) as well. + if name.startswith(('/', os.sep)): + name = new_attrs['name'] = member.path.lstrip('/' + os.sep) + if os.path.isabs(name): + # Path is absolute even after stripping. + # For example, 'C:/foo' on Windows. + raise AbsolutePathError(member) + # Ensure we stay in the destination + target_path = os.path.realpath(os.path.join(dest_path, name)) + if os.path.commonpath([target_path, dest_path]) != dest_path: + raise OutsideDestinationError(member, target_path) + # Limit permissions (no high bits, and go-w) + mode = member.mode + if mode is not None: + # Strip high bits & group/other write bits + mode = mode & 0o755 + if for_data: + # For data, handle permissions & file types + if member.isreg() or member.islnk(): + if not mode & 0o100: + # Clear executable bits if not executable by user + mode &= ~0o111 + # Ensure owner can read & write + mode |= 0o600 + elif member.isdir() or member.issym(): + # Ignore mode for directories & symlinks + mode = None + else: + # Reject special files + raise SpecialFileError(member) + if mode != member.mode: + new_attrs['mode'] = mode + if for_data: + # Ignore ownership for 'data' + if member.uid is not None: + new_attrs['uid'] = None + if member.gid is not None: + new_attrs['gid'] = None + if member.uname is not None: + new_attrs['uname'] = None + if member.gname is not None: + new_attrs['gname'] = None + # Check link destination for 'data' + if member.islnk() or member.issym(): + if os.path.isabs(member.linkname): + raise AbsoluteLinkError(member) + target_path = os.path.realpath(os.path.join(dest_path, member.linkname)) + if os.path.commonpath([target_path, dest_path]) != dest_path: + raise LinkOutsideDestinationError(member, target_path) + return new_attrs + +def fully_trusted_filter(member, dest_path): + return member + +def tar_filter(member, dest_path): + new_attrs = _get_filtered_attrs(member, dest_path, False) + if new_attrs: + return member.replace(**new_attrs, deep=False) + return member + +def data_filter(member, dest_path): + new_attrs = _get_filtered_attrs(member, dest_path, True) + if new_attrs: + return member.replace(**new_attrs, deep=False) + return member + +_NAMED_FILTERS = { + "fully_trusted": fully_trusted_filter, + "tar": tar_filter, + "data": data_filter, +} + #------------------ # Exported Classes #------------------ + +# Sentinel for replace() defaults, meaning "don't change the attribute" +_KEEP = object() + class TarInfo(object): """Informational class which holds the details about an archive member given by a tar header block. @@ -790,12 +915,44 @@ def linkpath(self, linkname): def __repr__(self): return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) + def replace(self, *, + name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP, + uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP, + deep=True, _KEEP=_KEEP): + """Return a deep copy of self with the given attributes replaced. + """ + if deep: + result = copy.deepcopy(self) + else: + result = copy.copy(self) + if name is not _KEEP: + result.name = name + if mtime is not _KEEP: + result.mtime = mtime + if mode is not _KEEP: + result.mode = mode + if linkname is not _KEEP: + result.linkname = linkname + if uid is not _KEEP: + result.uid = uid + if gid is not _KEEP: + result.gid = gid + if uname is not _KEEP: + result.uname = uname + if gname is not _KEEP: + result.gname = gname + return result + def get_info(self): """Return the TarInfo's attributes as a dictionary. """ + if self.mode is None: + mode = None + else: + mode = self.mode & 0o7777 info = { "name": self.name, - "mode": self.mode & 0o7777, + "mode": mode, "uid": self.uid, "gid": self.gid, "size": self.size, @@ -818,6 +975,9 @@ def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescap """Return a tar header as a string of 512 byte blocks. """ info = self.get_info() + for name, value in info.items(): + if value is None: + raise ValueError("%s may not be None" % name) if format == USTAR_FORMAT: return self.create_ustar_header(info, encoding, errors) @@ -948,6 +1108,12 @@ def _create_header(info, format, encoding, errors): devmajor = stn("", 8, encoding, errors) devminor = stn("", 8, encoding, errors) + # None values in metadata should cause ValueError. + # itn()/stn() do this for all fields except type. + filetype = info.get("type", REGTYPE) + if filetype is None: + raise ValueError("TarInfo.type must not be None") + parts = [ stn(info.get("name", ""), 100, encoding, errors), itn(info.get("mode", 0) & 0o7777, 8, format), @@ -956,7 +1122,7 @@ def _create_header(info, format, encoding, errors): itn(info.get("size", 0), 12, format), itn(info.get("mtime", 0), 12, format), b" ", # checksum field - info.get("type", REGTYPE), + filetype, stn(info.get("linkname", ""), 100, encoding, errors), info.get("magic", POSIX_MAGIC), stn(info.get("uname", ""), 32, encoding, errors), @@ -1462,6 +1628,8 @@ class TarFile(object): fileobject = ExFileObject # The file-object for extractfile(). + extraction_filter = None # The default filter for extraction. + def __init__(self, name=None, mode="r", fileobj=None, format=None, tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, errors="surrogateescape", pax_headers=None, debug=None, @@ -1936,7 +2104,10 @@ def list(self, verbose=True, *, members=None): members = self for tarinfo in members: if verbose: - _safe_print(stat.filemode(tarinfo.mode)) + if tarinfo.mode is None: + _safe_print("??????????") + else: + _safe_print(stat.filemode(tarinfo.mode)) _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid, tarinfo.gname or tarinfo.gid)) if tarinfo.ischr() or tarinfo.isblk(): @@ -1944,8 +2115,11 @@ def list(self, verbose=True, *, members=None): ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor))) else: _safe_print("%10d" % tarinfo.size) - _safe_print("%d-%02d-%02d %02d:%02d:%02d" \ - % time.localtime(tarinfo.mtime)[:6]) + if tarinfo.mtime is None: + _safe_print("????-??-?? ??:??:??") + else: + _safe_print("%d-%02d-%02d %02d:%02d:%02d" \ + % time.localtime(tarinfo.mtime)[:6]) _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else "")) @@ -2032,32 +2206,63 @@ def addfile(self, tarinfo, fileobj=None): self.members.append(tarinfo) - def extractall(self, path=".", members=None, *, numeric_owner=False): + def _get_filter_function(self, filter): + if filter is None: + filter = self.extraction_filter + if filter is None: + warnings.warn( + 'Python 3.14 will, by default, filter extracted tar ' + + 'archives and reject files or modify their metadata. ' + + 'Use the filter argument to control this behavior.', + DeprecationWarning) + return fully_trusted_filter + if isinstance(filter, str): + raise TypeError( + 'String names are not supported for ' + + 'TarFile.extraction_filter. Use a function such as ' + + 'tarfile.data_filter directly.') + return filter + if callable(filter): + return filter + try: + return _NAMED_FILTERS[filter] + except KeyError: + raise ValueError(f"filter {filter!r} not found") from None + + def extractall(self, path=".", members=None, *, numeric_owner=False, + filter=None): """Extract all members from the archive to the current working directory and set owner, modification time and permissions on directories afterwards. `path' specifies a different directory to extract to. `members' is optional and must be a subset of the list returned by getmembers(). If `numeric_owner` is True, only the numbers for user/group names are used and not the names. + + The `filter` function will be called on each member just + before extraction. + It can return a changed TarInfo or None to skip the member. + String names of common filters are accepted. """ directories = [] + filter_function = self._get_filter_function(filter) if members is None: members = self - for tarinfo in members: + for member in members: + tarinfo = self._get_extract_tarinfo(member, filter_function, path) + if tarinfo is None: + continue if tarinfo.isdir(): - # Extract directories with a safe mode. + # For directories, delay setting attributes until later, + # since permissions can interfere with extraction and + # extracting contents can reset mtime. directories.append(tarinfo) - tarinfo = copy.copy(tarinfo) - tarinfo.mode = 0o700 - # Do not set_attrs directories, as we will do that further down - self.extract(tarinfo, path, set_attrs=not tarinfo.isdir(), - numeric_owner=numeric_owner) + self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(), + numeric_owner=numeric_owner) # Reverse sort directories. - directories.sort(key=lambda a: a.name) - directories.reverse() + directories.sort(key=lambda a: a.name, reverse=True) # Set correct owner, mtime and filemode on directories. for tarinfo in directories: @@ -2067,12 +2272,10 @@ def extractall(self, path=".", members=None, *, numeric_owner=False): self.utime(tarinfo, dirpath) self.chmod(tarinfo, dirpath) except ExtractError as e: - if self.errorlevel > 1: - raise - else: - self._dbg(1, "tarfile: %s" % e) + self._handle_nonfatal_error(e) - def extract(self, member, path="", set_attrs=True, *, numeric_owner=False): + def extract(self, member, path="", set_attrs=True, *, numeric_owner=False, + filter=None): """Extract a member from the archive to the current working directory, using its full name. Its file information is extracted as accurately as possible. `member' may be a filename or a TarInfo object. You can @@ -2080,35 +2283,70 @@ def extract(self, member, path="", set_attrs=True, *, numeric_owner=False): mtime, mode) are set unless `set_attrs' is False. If `numeric_owner` is True, only the numbers for user/group names are used and not the names. + + The `filter` function will be called before extraction. + It can return a changed TarInfo or None to skip the member. + String names of common filters are accepted. """ - self._check("r") + filter_function = self._get_filter_function(filter) + tarinfo = self._get_extract_tarinfo(member, filter_function, path) + if tarinfo is not None: + self._extract_one(tarinfo, path, set_attrs, numeric_owner) + def _get_extract_tarinfo(self, member, filter_function, path): + """Get filtered TarInfo (or None) from member, which might be a str""" if isinstance(member, str): tarinfo = self.getmember(member) else: tarinfo = member + unfiltered = tarinfo + try: + tarinfo = filter_function(tarinfo, path) + except (OSError, FilterError) as e: + self._handle_fatal_error(e) + except ExtractError as e: + self._handle_nonfatal_error(e) + if tarinfo is None: + self._dbg(2, "tarfile: Excluded %r" % unfiltered.name) + return None # Prepare the link target for makelink(). if tarinfo.islnk(): + tarinfo = copy.copy(tarinfo) tarinfo._link_target = os.path.join(path, tarinfo.linkname) + return tarinfo + + def _extract_one(self, tarinfo, path, set_attrs, numeric_owner): + """Extract from filtered tarinfo to disk""" + self._check("r") try: self._extract_member(tarinfo, os.path.join(path, tarinfo.name), set_attrs=set_attrs, numeric_owner=numeric_owner) except OSError as e: - if self.errorlevel > 0: - raise - else: - if e.filename is None: - self._dbg(1, "tarfile: %s" % e.strerror) - else: - self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) + self._handle_fatal_error(e) except ExtractError as e: - if self.errorlevel > 1: - raise + self._handle_nonfatal_error(e) + + def _handle_nonfatal_error(self, e): + """Handle non-fatal error (ExtractError) according to errorlevel""" + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def _handle_fatal_error(self, e): + """Handle "fatal" error according to self.errorlevel""" + if self.errorlevel > 0: + raise + elif isinstance(e, OSError): + if e.filename is None: + self._dbg(1, "tarfile: %s" % e.strerror) else: - self._dbg(1, "tarfile: %s" % e) + self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) + else: + self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e)) def extractfile(self, member): """Extract a member from the archive as a file object. `member' may be @@ -2195,9 +2433,13 @@ def makedir(self, tarinfo, targetpath): """Make a directory called targetpath. """ try: - # Use a safe mode for the directory, the real mode is set - # later in _extract_member(). - os.mkdir(targetpath, 0o700) + if tarinfo.mode is None: + # Use the system's default mode + os.mkdir(targetpath) + else: + # Use a safe mode for the directory, the real mode is set + # later in _extract_member(). + os.mkdir(targetpath, 0o700) except FileExistsError: pass @@ -2240,6 +2482,9 @@ def makedev(self, tarinfo, targetpath): raise ExtractError("special devices not supported by system") mode = tarinfo.mode + if mode is None: + # Use mknod's default + mode = 0o600 if tarinfo.isblk(): mode |= stat.S_IFBLK else: @@ -2261,7 +2506,6 @@ def makelink(self, tarinfo, targetpath): os.unlink(targetpath) os.symlink(tarinfo.linkname, targetpath) else: - # See extract(). if os.path.exists(tarinfo._link_target): os.link(tarinfo._link_target, targetpath) else: @@ -2286,15 +2530,19 @@ def chown(self, tarinfo, targetpath, numeric_owner): u = tarinfo.uid if not numeric_owner: try: - if grp: + if grp and tarinfo.gname: g = grp.getgrnam(tarinfo.gname)[2] except KeyError: pass try: - if pwd: + if pwd and tarinfo.uname: u = pwd.getpwnam(tarinfo.uname)[2] except KeyError: pass + if g is None: + g = -1 + if u is None: + u = -1 try: if tarinfo.issym() and hasattr(os, "lchown"): os.lchown(targetpath, u, g) @@ -2306,6 +2554,8 @@ def chown(self, tarinfo, targetpath, numeric_owner): def chmod(self, tarinfo, targetpath): """Set file permissions of targetpath according to tarinfo. """ + if tarinfo.mode is None: + return try: os.chmod(targetpath, tarinfo.mode) except OSError as e: @@ -2314,10 +2564,13 @@ def chmod(self, tarinfo, targetpath): def utime(self, tarinfo, targetpath): """Set modification time of targetpath according to tarinfo. """ + mtime = tarinfo.mtime + if mtime is None: + return if not hasattr(os, 'utime'): return try: - os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) + os.utime(targetpath, (mtime, mtime)) except OSError as e: raise ExtractError("could not change modification time") from e @@ -2395,13 +2648,26 @@ def _getmember(self, name, tarinfo=None, normalize=False): members = self.getmembers() # Limit the member search list up to tarinfo. + skipping = False if tarinfo is not None: - members = members[:members.index(tarinfo)] + try: + index = members.index(tarinfo) + except ValueError: + # The given starting point might be a (modified) copy. + # We'll later skip members until we find an equivalent. + skipping = True + else: + # Happy fast path + members = members[:index] if normalize: name = os.path.normpath(name) for member in reversed(members): + if skipping: + if tarinfo.offset == member.offset: + skipping = False + continue if normalize: member_name = os.path.normpath(member.name) else: @@ -2410,6 +2676,10 @@ def _getmember(self, name, tarinfo=None, normalize=False): if name == member_name: return member + if skipping: + # Starting point was not found + raise ValueError(tarinfo) + def _load(self): """Read through the entire archive file and look for readable members. @@ -2500,6 +2770,7 @@ def __exit__(self, type, value, traceback): #-------------------- # exported functions #-------------------- + def is_tarfile(name): """Return True if name points to a tar archive that we are able to handle, else return False. @@ -2528,6 +2799,10 @@ def main(): parser = argparse.ArgumentParser(description=description) parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Verbose output') + parser.add_argument('--filter', metavar='<filtername>', + choices=_NAMED_FILTERS, + help='Filter for extraction') + group = parser.add_mutually_exclusive_group(required=True) group.add_argument('-l', '--list', metavar='<tarfile>', help='Show listing of a tarfile') @@ -2539,8 +2814,12 @@ def main(): help='Create tarfile from sources') group.add_argument('-t', '--test', metavar='<tarfile>', help='Test if a tarfile is valid') + args = parser.parse_args() + if args.filter and args.extract is None: + parser.exit(1, '--filter is only valid for extraction\n') + if args.test is not None: src = args.test if is_tarfile(src): @@ -2571,7 +2850,7 @@ def main(): if is_tarfile(src): with TarFile.open(src, 'r:*') as tf: - tf.extractall(path=curdir) + tf.extractall(path=curdir, filter=args.filter) if args.verbose: if curdir == '.': msg = '{!r} file is extracted.'.format(src) diff --git a/Lib/tempfile.py b/Lib/tempfile.py index bb18d60db0d919..2b4f4313247128 100644 --- a/Lib/tempfile.py +++ b/Lib/tempfile.py @@ -376,7 +376,7 @@ def mkdtemp(suffix=None, prefix=None, dir=None): continue else: raise - return file + return _os.path.abspath(file) raise FileExistsError(_errno.EEXIST, "No usable temporary directory name found") @@ -850,22 +850,31 @@ class TemporaryDirectory: ... Upon exiting the context, the directory and everything contained - in it are removed. + in it are removed (unless delete=False is passed or an exception + is raised during cleanup and ignore_cleanup_errors is not True). + + Optional Arguments: + suffix - A str suffix for the directory name. (see mkdtemp) + prefix - A str prefix for the directory name. (see mkdtemp) + dir - A directory to create this temp dir in. (see mkdtemp) + ignore_cleanup_errors - False; ignore exceptions during cleanup? + delete - True; whether the directory is automatically deleted. """ def __init__(self, suffix=None, prefix=None, dir=None, - ignore_cleanup_errors=False): + ignore_cleanup_errors=False, *, delete=True): self.name = mkdtemp(suffix, prefix, dir) self._ignore_cleanup_errors = ignore_cleanup_errors + self._delete = delete self._finalizer = _weakref.finalize( self, self._cleanup, self.name, warn_message="Implicitly cleaning up {!r}".format(self), - ignore_errors=self._ignore_cleanup_errors) + ignore_errors=self._ignore_cleanup_errors, delete=self._delete) @classmethod def _rmtree(cls, name, ignore_errors=False): - def onerror(func, path, exc_info): - if issubclass(exc_info[0], PermissionError): + def onexc(func, path, exc): + if isinstance(exc, PermissionError): def resetperms(path): try: _os.chflags(path, 0) @@ -885,18 +894,19 @@ def resetperms(path): cls._rmtree(path, ignore_errors=ignore_errors) except FileNotFoundError: pass - elif issubclass(exc_info[0], FileNotFoundError): + elif isinstance(exc, FileNotFoundError): pass else: if not ignore_errors: raise - _shutil.rmtree(name, onerror=onerror) + _shutil.rmtree(name, onexc=onexc) @classmethod - def _cleanup(cls, name, warn_message, ignore_errors=False): - cls._rmtree(name, ignore_errors=ignore_errors) - _warnings.warn(warn_message, ResourceWarning) + def _cleanup(cls, name, warn_message, ignore_errors=False, delete=True): + if delete: + cls._rmtree(name, ignore_errors=ignore_errors) + _warnings.warn(warn_message, ResourceWarning) def __repr__(self): return "<{} {!r}>".format(self.__class__.__name__, self.name) @@ -905,7 +915,8 @@ def __enter__(self): return self.name def __exit__(self, exc, value, tb): - self.cleanup() + if self._delete: + self.cleanup() def cleanup(self): if self._finalizer.detach() or _os.path.exists(self.name): diff --git a/Lib/test/_test_embed_structseq.py b/Lib/test/_test_embed_structseq.py index 868f9f83e8be77..834daa4df55fec 100644 --- a/Lib/test/_test_embed_structseq.py +++ b/Lib/test/_test_embed_structseq.py @@ -1,27 +1,31 @@ import sys import types -import unittest +# Note: This test file can't import `unittest` since the runtime can't +# currently guarantee that it will not leak memory. Doing so will mark +# the test as passing but with reference leaks. This can safely import +# the `unittest` library once there's a strict guarantee of no leaks +# during runtime shutdown. # bpo-46417: Test that structseq types used by the sys module are still # valid when Py_Finalize()/Py_Initialize() are called multiple times. -class TestStructSeq(unittest.TestCase): +class TestStructSeq: # test PyTypeObject members - def check_structseq(self, obj_type): + def _check_structseq(self, obj_type): # ob_refcnt - self.assertGreaterEqual(sys.getrefcount(obj_type), 1) + assert sys.getrefcount(obj_type) > 1 # tp_base - self.assertTrue(issubclass(obj_type, tuple)) + assert issubclass(obj_type, tuple) # tp_bases - self.assertEqual(obj_type.__bases__, (tuple,)) + assert obj_type.__bases__ == (tuple,) # tp_dict - self.assertIsInstance(obj_type.__dict__, types.MappingProxyType) + assert isinstance(obj_type.__dict__, types.MappingProxyType) # tp_mro - self.assertEqual(obj_type.__mro__, (obj_type, tuple, object)) + assert obj_type.__mro__ == (obj_type, tuple, object) # tp_name - self.assertIsInstance(type.__name__, str) + assert isinstance(type.__name__, str) # tp_subclasses - self.assertEqual(obj_type.__subclasses__(), []) + assert obj_type.__subclasses__() == [] def test_sys_attrs(self): for attr_name in ( @@ -32,23 +36,23 @@ def test_sys_attrs(self): 'thread_info', # ThreadInfoType 'version_info', # VersionInfoType ): - with self.subTest(attr=attr_name): - attr = getattr(sys, attr_name) - self.check_structseq(type(attr)) + attr = getattr(sys, attr_name) + self._check_structseq(type(attr)) def test_sys_funcs(self): func_names = ['get_asyncgen_hooks'] # AsyncGenHooksType if hasattr(sys, 'getwindowsversion'): func_names.append('getwindowsversion') # WindowsVersionType for func_name in func_names: - with self.subTest(func=func_name): - func = getattr(sys, func_name) - obj = func() - self.check_structseq(type(obj)) + func = getattr(sys, func_name) + obj = func() + self._check_structseq(type(obj)) try: - unittest.main() + tests = TestStructSeq() + tests.test_sys_attrs() + tests.test_sys_funcs() except SystemExit as exc: if exc.args[0] != 0: raise diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py index 570f803918c1ef..c5eb6e7f1643ee 100644 --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -2437,7 +2437,8 @@ def test_utcfromtimestamp(self): ts = time.time() expected = time.gmtime(ts) - got = self.theclass.utcfromtimestamp(ts) + with self.assertWarns(DeprecationWarning): + got = self.theclass.utcfromtimestamp(ts) self.verify_field_equality(expected, got) # Run with US-style DST rules: DST begins 2 a.m. on second Sunday in @@ -2483,8 +2484,12 @@ def test_timestamp_aware(self): @support.run_with_tz('MSK-03') # Something east of Greenwich def test_microsecond_rounding(self): + def utcfromtimestamp(*args, **kwargs): + with self.assertWarns(DeprecationWarning): + return self.theclass.utcfromtimestamp(*args, **kwargs) + for fts in [self.theclass.fromtimestamp, - self.theclass.utcfromtimestamp]: + utcfromtimestamp]: zero = fts(0) self.assertEqual(zero.second, 0) self.assertEqual(zero.microsecond, 0) @@ -2581,10 +2586,11 @@ def test_fromtimestamp_limits(self): self.theclass.fromtimestamp(ts) def test_utcfromtimestamp_limits(self): - try: - self.theclass.utcfromtimestamp(-2**32 - 1) - except (OSError, OverflowError): - self.skipTest("Test not valid on this platform") + with self.assertWarns(DeprecationWarning): + try: + self.theclass.utcfromtimestamp(-2**32 - 1) + except (OSError, OverflowError): + self.skipTest("Test not valid on this platform") min_dt = self.theclass.min.replace(tzinfo=timezone.utc) min_ts = min_dt.timestamp() @@ -2597,10 +2603,11 @@ def test_utcfromtimestamp_limits(self): ("maximum", max_ts, max_dt.replace(tzinfo=None)), ]: with self.subTest(test_name, ts=ts, expected=expected): - try: - actual = self.theclass.utcfromtimestamp(ts) - except (OSError, OverflowError) as exc: - self.skipTest(str(exc)) + with self.assertWarns(DeprecationWarning): + try: + actual = self.theclass.utcfromtimestamp(ts) + except (OSError, OverflowError) as exc: + self.skipTest(str(exc)) self.assertEqual(actual, expected) @@ -2645,7 +2652,8 @@ def test_negative_float_fromtimestamp(self): @unittest.skipIf(sys.platform == "win32", "Windows doesn't accept negative timestamps") def test_negative_float_utcfromtimestamp(self): - d = self.theclass.utcfromtimestamp(-1.05) + with self.assertWarns(DeprecationWarning): + d = self.theclass.utcfromtimestamp(-1.05) self.assertEqual(d, self.theclass(1969, 12, 31, 23, 59, 58, 950000)) def test_utcnow(self): @@ -2655,8 +2663,11 @@ def test_utcnow(self): # a second of each other. tolerance = timedelta(seconds=1) for dummy in range(3): - from_now = self.theclass.utcnow() - from_timestamp = self.theclass.utcfromtimestamp(time.time()) + with self.assertWarns(DeprecationWarning): + from_now = self.theclass.utcnow() + + with self.assertWarns(DeprecationWarning): + from_timestamp = self.theclass.utcfromtimestamp(time.time()) if abs(from_timestamp - from_now) <= tolerance: break # Else try again a few times. @@ -2956,7 +2967,11 @@ def __new__(cls, *args, **kwargs): constr_name=constr_name): constructor = getattr(base_obj, constr_name) - dt = constructor(*constr_args) + if constr_name == "utcfromtimestamp": + with self.assertWarns(DeprecationWarning): + dt = constructor(*constr_args) + else: + dt = constructor(*constr_args) # Test that it creates the right subclass self.assertIsInstance(dt, DateTimeSubclass) @@ -2986,7 +3001,11 @@ def __new__(cls, *args, **kwargs): for name, meth_name, kwargs in test_cases: with self.subTest(name): constr = getattr(DateTimeSubclass, meth_name) - dt = constr(**kwargs) + if constr == "utcnow": + with self.assertWarns(DeprecationWarning): + dt = constr(**kwargs) + else: + dt = constr(**kwargs) self.assertIsInstance(dt, DateTimeSubclass) self.assertEqual(dt.extra, 7) @@ -4642,7 +4661,8 @@ def test_tzinfo_now(self): for dummy in range(3): now = datetime.now(weirdtz) self.assertIs(now.tzinfo, weirdtz) - utcnow = datetime.utcnow().replace(tzinfo=utc) + with self.assertWarns(DeprecationWarning): + utcnow = datetime.utcnow().replace(tzinfo=utc) now2 = utcnow.astimezone(weirdtz) if abs(now - now2) < timedelta(seconds=30): break @@ -4676,7 +4696,8 @@ def test_tzinfo_fromtimestamp(self): # Try to make sure tz= actually does some conversion. timestamp = 1000000000 - utcdatetime = datetime.utcfromtimestamp(timestamp) + with self.assertWarns(DeprecationWarning): + utcdatetime = datetime.utcfromtimestamp(timestamp) # In POSIX (epoch 1970), that's 2001-09-09 01:46:40 UTC, give or take. # But on some flavor of Mac, it's nowhere near that. So we can't have # any idea here what time that actually is, we can only test that @@ -4690,7 +4711,8 @@ def test_tzinfo_fromtimestamp(self): def test_tzinfo_utcnow(self): meth = self.theclass.utcnow # Ensure it doesn't require tzinfo (i.e., that this doesn't blow up). - base = meth() + with self.assertWarns(DeprecationWarning): + base = meth() # Try with and without naming the keyword; for whatever reason, # utcnow() doesn't accept a tzinfo argument. off42 = FixedOffset(42, "42") @@ -4702,7 +4724,8 @@ def test_tzinfo_utcfromtimestamp(self): meth = self.theclass.utcfromtimestamp ts = time.time() # Ensure it doesn't require tzinfo (i.e., that this doesn't blow up). - base = meth(ts) + with self.assertWarns(DeprecationWarning): + base = meth(ts) # Try with and without naming the keyword; for whatever reason, # utcfromtimestamp() doesn't accept a tzinfo argument. off42 = FixedOffset(42, "42") @@ -5309,7 +5332,7 @@ def dst(self, dt): def test_fromutc(self): self.assertRaises(TypeError, Eastern.fromutc) # not enough args - now = datetime.utcnow().replace(tzinfo=utc_real) + now = datetime.now(tz=utc_real) self.assertRaises(ValueError, Eastern.fromutc, now) # wrong tzinfo now = now.replace(tzinfo=Eastern) # insert correct tzinfo enow = Eastern.fromutc(now) # doesn't blow up @@ -5411,9 +5434,11 @@ def test_bug_1028306(self): self.assertEqual(datetime_sc, as_datetime) def test_extra_attributes(self): + with self.assertWarns(DeprecationWarning): + utcnow = datetime.utcnow() for x in [date.today(), time(), - datetime.utcnow(), + utcnow, timedelta(), tzinfo(), timezone(timedelta())]: @@ -6073,6 +6098,7 @@ def stats(cls, start_year=1): def transitions(self): for (_, prev_ti), (t, ti) in pairs(zip(self.ut, self.ti)): shift = ti[0] - prev_ti[0] + # TODO: Remove this use of utcfromtimestamp yield datetime.utcfromtimestamp(t), shift def nondst_folds(self): @@ -6212,6 +6238,10 @@ def test_system_transitions(self): ts1 = dt.replace(fold=1).timestamp() self.assertEqual(ts0, s0 + ss / 2) self.assertEqual(ts1, s0 - ss / 2) + # gh-83861 + utc0 = dt.astimezone(timezone.utc) + utc1 = dt.replace(fold=1).astimezone(timezone.utc) + self.assertEqual(utc0, utc1 + timedelta(0, ss)) finally: if TZ is None: del os.environ['TZ'] diff --git a/Lib/test/inspect_fodder.py b/Lib/test/inspect_fodder.py index e1287a315901cf..567dfbab804867 100644 --- a/Lib/test/inspect_fodder.py +++ b/Lib/test/inspect_fodder.py @@ -1,7 +1,7 @@ # line 1 'A module docstring.' -import sys, inspect +import inspect # line 5 # line 7 @@ -41,8 +41,8 @@ def abuse(self, a, b, c): def argue(self, a, b, c): try: spam(a, b, c) - except: - self.ex = sys.exc_info() + except BaseException as e: + self.ex = e self.tr = inspect.trace() @property @@ -78,8 +78,8 @@ async def lobbest(grenade): currentframe = inspect.currentframe() try: raise Exception() -except: - tb = sys.exc_info()[2] +except BaseException as e: + tb = e.__traceback__ class Callable: def __call__(self, *args): diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py index 19ccf2db5e7f06..3c3509d0303371 100644 --- a/Lib/test/libregrtest/main.py +++ b/Lib/test/libregrtest/main.py @@ -29,6 +29,14 @@ # Must be smaller than buildbot "1200 seconds without output" limit. EXIT_TIMEOUT = 120.0 +# gh-90681: When rerunning tests, we might need to rerun the whole +# class or module suite if some its life-cycle hooks fail. +# Test level hooks are not affected. +_TEST_LIFECYCLE_HOOKS = frozenset(( + 'setUpClass', 'tearDownClass', + 'setUpModule', 'tearDownModule', +)) + EXITCODE_BAD_TEST = 2 EXITCODE_INTERRUPTED = 130 EXITCODE_ENV_CHANGED = 3 @@ -337,8 +345,12 @@ def rerun_failed_tests(self): errors = result.errors or [] failures = result.failures or [] - error_names = [test_full_name.split(" ")[0] for (test_full_name, *_) in errors] - failure_names = [test_full_name.split(" ")[0] for (test_full_name, *_) in failures] + error_names = [ + self.normalize_test_name(test_full_name, is_error=True) + for (test_full_name, *_) in errors] + failure_names = [ + self.normalize_test_name(test_full_name) + for (test_full_name, *_) in failures] self.ns.verbose = True orig_match_tests = self.ns.match_tests if errors or failures: @@ -364,6 +376,21 @@ def rerun_failed_tests(self): self.display_result() + def normalize_test_name(self, test_full_name, *, is_error=False): + short_name = test_full_name.split(" ")[0] + if is_error and short_name in _TEST_LIFECYCLE_HOOKS: + # This means that we have a failure in a life-cycle hook, + # we need to rerun the whole module or class suite. + # Basically the error looks like this: + # ERROR: setUpClass (test.test_reg_ex.RegTest) + # or + # ERROR: setUpModule (test.test_reg_ex) + # So, we need to parse the class / module name. + lpar = test_full_name.index('(') + rpar = test_full_name.index(')') + return test_full_name[lpar + 1: rpar].split('.')[-1] + return short_name + def display_result(self): # If running the test suite for PGO then no one cares about results. if self.ns.pgo: diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py index 4298fa806e1065..2de8c6cfbc61a1 100644 --- a/Lib/test/libregrtest/refleak.py +++ b/Lib/test/libregrtest/refleak.py @@ -73,9 +73,10 @@ def get_pooled_int(value): fd_deltas = [0] * repcount getallocatedblocks = sys.getallocatedblocks gettotalrefcount = sys.gettotalrefcount + getunicodeinternedsize = sys.getunicodeinternedsize fd_count = os_helper.fd_count # initialize variables to make pyflakes quiet - rc_before = alloc_before = fd_before = 0 + rc_before = alloc_before = fd_before = interned_before = 0 if not ns.quiet: print("beginning", repcount, "repetitions", file=sys.stderr) @@ -91,9 +92,13 @@ def get_pooled_int(value): dash_R_cleanup(fs, ps, pic, zdc, abcs) support.gc_collect() - # Read memory statistics immediately after the garbage collection - alloc_after = getallocatedblocks() - rc_after = gettotalrefcount() + # Read memory statistics immediately after the garbage collection. + # Also, readjust the reference counts and alloc blocks by ignoring + # any strings that might have been interned during test_func. These + # strings will be deallocated at runtime shutdown + interned_after = getunicodeinternedsize() + alloc_after = getallocatedblocks() - interned_after + rc_after = gettotalrefcount() - interned_after * 2 fd_after = fd_count() if not ns.quiet: @@ -106,6 +111,7 @@ def get_pooled_int(value): alloc_before = alloc_after rc_before = rc_after fd_before = fd_after + interned_before = interned_after if not ns.quiet: print(file=sys.stderr) diff --git a/Lib/test/libregrtest/runtest.py b/Lib/test/libregrtest/runtest.py index e9bb72a7d77ee1..61595277ed6d5a 100644 --- a/Lib/test/libregrtest/runtest.py +++ b/Lib/test/libregrtest/runtest.py @@ -143,6 +143,14 @@ def __str__(self) -> str: # set of tests that we don't want to be executed when using regrtest NOTTESTS = set() +#If these test directories are encountered recurse into them and treat each +# test_ .py or dir as a separate test module. This can increase parallelism. +# Beware this can't generally be done for any directory with sub-tests as the +# __init__.py may do things which alter what tests are to be run. + +SPLITTESTDIRS = { + "test_asyncio", +} # Storage of uncollectable objects FOUND_GARBAGE = [] @@ -158,7 +166,7 @@ def findtestdir(path=None): return path or os.path.dirname(os.path.dirname(__file__)) or os.curdir -def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS): +def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS, *, split_test_dirs=SPLITTESTDIRS, base_mod=""): """Return a list of all applicable test modules.""" testdir = findtestdir(testdir) names = os.listdir(testdir) @@ -166,8 +174,13 @@ def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS): others = set(stdtests) | nottests for name in names: mod, ext = os.path.splitext(name) - if mod[:5] == "test_" and ext in (".py", "") and mod not in others: - tests.append(mod) + if mod[:5] == "test_" and mod not in others: + if mod in split_test_dirs: + subdir = os.path.join(testdir, mod) + mod = f"{base_mod or 'test'}.{mod}" + tests.extend(findtests(subdir, [], nottests, split_test_dirs=split_test_dirs, base_mod=mod)) + elif ext in (".py", ""): + tests.append(f"{base_mod}.{mod}" if base_mod else mod) return stdtests + sorted(tests) diff --git a/Lib/test/profilee.py b/Lib/test/profilee.py index 6ad2c8395634fd..b6a090a2e34613 100644 --- a/Lib/test/profilee.py +++ b/Lib/test/profilee.py @@ -79,7 +79,7 @@ def helper1(): TICKS += 19 lst = [] lst.append(42) # 0 - sys.exc_info() # 0 + sys.exception() # 0 def helper2_indirect(): helper2() # 50 diff --git a/Lib/test/setup_testcppext.py b/Lib/test/setup_testcppext.py index c6b68104d1333c..22fe750085fd70 100644 --- a/Lib/test/setup_testcppext.py +++ b/Lib/test/setup_testcppext.py @@ -1,5 +1,6 @@ # gh-91321: Build a basic C++ test extension to check that the Python C API is # compatible with C++ and does not emit C++ compiler warnings. +import os import sys from test import support @@ -25,14 +26,8 @@ def main(): cppflags = list(CPPFLAGS) - if '-std=c++03' in sys.argv: - sys.argv.remove('-std=c++03') - std = 'c++03' - name = '_testcpp03ext' - else: - # Python currently targets C++11 - std = 'c++11' - name = '_testcpp11ext' + std = os.environ["CPYTHON_TEST_CPP_STD"] + name = os.environ["CPYTHON_TEST_EXT_NAME"] cppflags = [*CPPFLAGS, f'-std={std}'] diff --git a/Lib/test/setuptools-67.6.1-py3-none-any.whl b/Lib/test/setuptools-67.6.1-py3-none-any.whl new file mode 100644 index 00000000000000..4b7ffd2e49e155 Binary files /dev/null and b/Lib/test/setuptools-67.6.1-py3-none-any.whl differ diff --git a/Lib/test/shadowed_super.py b/Lib/test/shadowed_super.py new file mode 100644 index 00000000000000..2a62f667e93818 --- /dev/null +++ b/Lib/test/shadowed_super.py @@ -0,0 +1,7 @@ +class super: + msg = "truly super" + + +class C: + def method(self): + return super().msg diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index c309fd7910e0e6..d063837baee2de 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -1108,7 +1108,7 @@ def _run_suite(suite): if junit_xml_list is not None: junit_xml_list.append(result.get_xml_element()) - if not result.testsRun and not result.skipped: + if not result.testsRun and not result.skipped and not result.errors: raise TestDidNotRun if not result.wasSuccessful(): if len(result.errors) == 1 and not result.failures: diff --git a/Lib/test/support/asyncore.py b/Lib/test/support/asyncore.py index 401fa60bcf35f2..b397aca5568079 100644 --- a/Lib/test/support/asyncore.py +++ b/Lib/test/support/asyncore.py @@ -537,10 +537,11 @@ def send(self, data): # --------------------------------------------------------------------------- def compact_traceback(): - t, v, tb = sys.exc_info() - tbinfo = [] + exc = sys.exception() + tb = exc.__traceback__ if not tb: # Must have a traceback raise AssertionError("traceback does not exist") + tbinfo = [] while tb: tbinfo.append(( tb.tb_frame.f_code.co_filename, @@ -554,7 +555,7 @@ def compact_traceback(): file, function, line = tbinfo[-1] info = ' '.join(['[%s|%s|%s]' % x for x in tbinfo]) - return (file, function, line), t, v, info + return (file, function, line), type(exc), exc, info def close_all(map=None, ignore_all=False): if map is None: diff --git a/Lib/test/support/bytecode_helper.py b/Lib/test/support/bytecode_helper.py index 1d9b889c920986..357ec44dbc218d 100644 --- a/Lib/test/support/bytecode_helper.py +++ b/Lib/test/support/bytecode_helper.py @@ -3,7 +3,7 @@ import unittest import dis import io -from _testinternalcapi import compiler_codegen, optimize_cfg +from _testinternalcapi import compiler_codegen, optimize_cfg, assemble_code_object _UNSPECIFIED = object() @@ -108,6 +108,18 @@ def normalize_insts(self, insts): res.append((opcode, arg, *loc)) return res + def complete_insts_info(self, insts): + # fill in omitted fields in location, and oparg 0 for ops with no arg. + res = [] + for item in insts: + assert isinstance(item, tuple) + inst = list(item) + opcode = dis.opmap[inst[0]] + oparg = inst[1] + loc = inst[2:] + [-1] * (6 - len(inst)) + res.append((opcode, oparg, *loc)) + return res + class CodegenTestCase(CompilationStepTestCase): @@ -118,20 +130,14 @@ def generate_code(self, ast): class CfgOptimizationTestCase(CompilationStepTestCase): - def complete_insts_info(self, insts): - # fill in omitted fields in location, and oparg 0 for ops with no arg. - res = [] - for item in insts: - assert isinstance(item, tuple) - inst = list(reversed(item)) - opcode = dis.opmap[inst.pop()] - oparg = inst.pop() - loc = inst + [-1] * (4 - len(inst)) - res.append((opcode, oparg, *loc)) - return res - def get_optimized(self, insts, consts): insts = self.normalize_insts(insts) insts = self.complete_insts_info(insts) insts = optimize_cfg(insts, consts) return insts, consts + +class AssemblerTestCase(CompilationStepTestCase): + + def get_code_object(self, filename, insts, metadata): + co = assemble_code_object(filename, insts, metadata) + return co diff --git a/Lib/test/support/testcase.py b/Lib/test/support/testcase.py new file mode 100644 index 00000000000000..1e4363b15783eb --- /dev/null +++ b/Lib/test/support/testcase.py @@ -0,0 +1,25 @@ +class ExceptionIsLikeMixin: + def assertExceptionIsLike(self, exc, template): + """ + Passes when the provided `exc` matches the structure of `template`. + Individual exceptions don't have to be the same objects or even pass + an equality test: they only need to be the same type and contain equal + `exc_obj.args`. + """ + if exc is None and template is None: + return + + if template is None: + self.fail(f"unexpected exception: {exc}") + + if exc is None: + self.fail(f"expected an exception like {template!r}, got None") + + if not isinstance(exc, ExceptionGroup): + self.assertEqual(exc.__class__, template.__class__) + self.assertEqual(exc.args[0], template.args[0]) + else: + self.assertEqual(exc.message, template.message) + self.assertEqual(len(exc.exceptions), len(template.exceptions)) + for e, t in zip(exc.exceptions, template.exceptions): + self.assertExceptionIsLike(e, t) diff --git a/Lib/test/support/testresult.py b/Lib/test/support/testresult.py index 2cd1366cd8a9e1..14474be222dc4b 100644 --- a/Lib/test/support/testresult.py +++ b/Lib/test/support/testresult.py @@ -18,10 +18,13 @@ def __init__(self, stream, descriptions, verbosity): self.buffer = True if self.USE_XML: from xml.etree import ElementTree as ET - from datetime import datetime + from datetime import datetime, UTC self.__ET = ET self.__suite = ET.Element('testsuite') - self.__suite.set('start', datetime.utcnow().isoformat(' ')) + self.__suite.set('start', + datetime.now(UTC) + .replace(tzinfo=None) + .isoformat(' ')) self.__e = None self.__start_time = None diff --git a/Lib/test/support/warnings_helper.py b/Lib/test/support/warnings_helper.py index 28e96f88b24441..c1bf0562300678 100644 --- a/Lib/test/support/warnings_helper.py +++ b/Lib/test/support/warnings_helper.py @@ -44,7 +44,7 @@ def check_syntax_warning(testcase, statement, errtext='', def ignore_warnings(*, category): - """Decorator to suppress deprecation warnings. + """Decorator to suppress warnings. Use of context managers to hide warnings make diffs more noisy and tools like 'git blame' less useful. diff --git a/Lib/test/test__opcode.py b/Lib/test/test__opcode.py index fb4ab15f7041ed..7640c6fb57d4f3 100644 --- a/Lib/test/test__opcode.py +++ b/Lib/test/test__opcode.py @@ -20,6 +20,8 @@ def test_stack_effect(self): # All defined opcodes has_arg = dis.hasarg for name, code in filter(lambda item: item[0] not in dis.deoptmap, dis.opmap.items()): + if code >= opcode.MIN_INSTRUMENTED_OPCODE: + continue with self.subTest(opname=name): if code not in has_arg: stack_effect(code) @@ -34,10 +36,6 @@ def test_stack_effect(self): self.assertRaises(ValueError, stack_effect, code, 0) def test_stack_effect_jump(self): - JUMP_IF_TRUE_OR_POP = dis.opmap['JUMP_IF_TRUE_OR_POP'] - self.assertEqual(stack_effect(JUMP_IF_TRUE_OR_POP, 0), 0) - self.assertEqual(stack_effect(JUMP_IF_TRUE_OR_POP, 0, jump=True), 0) - self.assertEqual(stack_effect(JUMP_IF_TRUE_OR_POP, 0, jump=False), -1) FOR_ITER = dis.opmap['FOR_ITER'] self.assertEqual(stack_effect(FOR_ITER, 0), 1) self.assertEqual(stack_effect(FOR_ITER, 0, jump=True), 1) @@ -51,6 +49,8 @@ def test_stack_effect_jump(self): has_exc = dis.hasexc has_jump = dis.hasjabs + dis.hasjrel for name, code in filter(lambda item: item[0] not in dis.deoptmap, dis.opmap.items()): + if code >= opcode.MIN_INSTRUMENTED_OPCODE: + continue with self.subTest(opname=name): if code not in has_arg: common = stack_effect(code) diff --git a/Lib/test/test__xxinterpchannels.py b/Lib/test/test__xxinterpchannels.py index 69bda89a1688f5..750cd99b85e7a6 100644 --- a/Lib/test/test__xxinterpchannels.py +++ b/Lib/test/test__xxinterpchannels.py @@ -550,6 +550,7 @@ def test_channel_list_interpreters_closed_send_end(self): import _xxinterpchannels as _channels _channels.close({cid}, force=True) """)) + return # Both ends should raise an error. with self.assertRaises(channels.ChannelClosedError): channels.list_interpreters(cid, send=True) @@ -673,17 +674,34 @@ def test_recv_default(self): self.assertIs(obj6, default) def test_recv_sending_interp_destroyed(self): - cid = channels.create() - interp = interpreters.create() - interpreters.run_string(interp, dedent(f""" - import _xxinterpchannels as _channels - _channels.send({cid}, b'spam') - """)) - interpreters.destroy(interp) + with self.subTest('closed'): + cid1 = channels.create() + interp = interpreters.create() + interpreters.run_string(interp, dedent(f""" + import _xxinterpchannels as _channels + _channels.send({cid1}, b'spam') + """)) + interpreters.destroy(interp) + + with self.assertRaisesRegex(RuntimeError, + f'channel {cid1} is closed'): + channels.recv(cid1) + del cid1 + with self.subTest('still open'): + cid2 = channels.create() + interp = interpreters.create() + interpreters.run_string(interp, dedent(f""" + import _xxinterpchannels as _channels + _channels.send({cid2}, b'spam') + """)) + channels.send(cid2, b'eggs') + interpreters.destroy(interp) - with self.assertRaisesRegex(RuntimeError, - 'unrecognized interpreter ID'): - channels.recv(cid) + channels.recv(cid2) + with self.assertRaisesRegex(RuntimeError, + f'channel {cid2} is empty'): + channels.recv(cid2) + del cid2 def test_allowed_types(self): cid = channels.create() @@ -1451,19 +1469,19 @@ def _assert_closed_in_interp(self, fix, interp=None): with self.assertRaises(channels.ChannelClosedError): channels.close(fix.cid, force=True) else: - run_interp(interp.id, f""" + run_interp(interp.id, """ with helpers.expect_channel_closed(): channels.recv(cid) """) - run_interp(interp.id, f""" + run_interp(interp.id, """ with helpers.expect_channel_closed(): channels.send(cid, b'spam') """) - run_interp(interp.id, f""" + run_interp(interp.id, """ with helpers.expect_channel_closed(): channels.close(cid) """) - run_interp(interp.id, f""" + run_interp(interp.id, """ with helpers.expect_channel_closed(): channels.close(cid, force=True) """) diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py index 965967e3f2734b..1ee18774d17209 100644 --- a/Lib/test/test__xxsubinterpreters.py +++ b/Lib/test/test__xxsubinterpreters.py @@ -798,7 +798,7 @@ def test_shared_overwrites(self): """)) shared = {'spam': b'ham'} - script = dedent(f""" + script = dedent(""" ns2 = dict(vars()) del ns2['__builtins__'] """) @@ -902,7 +902,7 @@ def test_execution_namespace_is_main(self): # XXX Fix this test! @unittest.skip('blocking forever') def test_still_running_at_exit(self): - script = dedent(f""" + script = dedent(""" from textwrap import dedent import threading import _xxsubinterpreters as _interpreters diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index 6c932e1305e1dd..8eef7baec70118 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -774,11 +774,6 @@ def test_parenthesized_with_feature_version(self): ast.parse('with (CtxManager() as example): ...', feature_version=(3, 8)) ast.parse('with CtxManager() as example: ...', feature_version=(3, 8)) - def test_debug_f_string_feature_version(self): - ast.parse('f"{x=}"', feature_version=(3, 8)) - with self.assertRaises(SyntaxError): - ast.parse('f"{x=}"', feature_version=(3, 7)) - def test_assignment_expression_feature_version(self): ast.parse('(x := 0)', feature_version=(3, 8)) with self.assertRaises(SyntaxError): @@ -2298,6 +2293,17 @@ class C: cdef = ast.parse(s).body[0] self.assertEqual(ast.get_source_segment(s, cdef.body[0], padded=True), s_method) + def test_source_segment_newlines(self): + s = 'def f():\n pass\ndef g():\r pass\r\ndef h():\r\n pass\r\n' + f, g, h = ast.parse(s).body + self._check_content(s, f, 'def f():\n pass') + self._check_content(s, g, 'def g():\r pass') + self._check_content(s, h, 'def h():\r\n pass') + + s = 'def f():\n a = 1\r b = 2\r\n c = 3\n' + f = ast.parse(s).body[0] + self._check_content(s, f, s.rstrip()) + def test_source_segment_missing_info(self): s = 'v = 1\r\nw = 1\nx = 1\n\ry = 1\r\n' v, w, x, y = ast.parse(s).body diff --git a/Lib/test/test_asyncio/test_eager_task_factory.py b/Lib/test/test_asyncio/test_eager_task_factory.py new file mode 100644 index 00000000000000..fe690934292a86 --- /dev/null +++ b/Lib/test/test_asyncio/test_eager_task_factory.py @@ -0,0 +1,344 @@ +"""Tests for base_events.py""" + +import asyncio +import contextvars +import gc +import time +import unittest + +from types import GenericAlias +from unittest import mock +from asyncio import base_events +from asyncio import tasks +from test.test_asyncio import utils as test_utils +from test.test_asyncio.test_tasks import get_innermost_context +from test import support + +MOCK_ANY = mock.ANY + + +def tearDownModule(): + asyncio.set_event_loop_policy(None) + + +class EagerTaskFactoryLoopTests: + + Task = None + + def run_coro(self, coro): + """ + Helper method to run the `coro` coroutine in the test event loop. + It helps with making sure the event loop is running before starting + to execute `coro`. This is important for testing the eager step + functionality, since an eager step is taken only if the event loop + is already running. + """ + + async def coro_runner(): + self.assertTrue(asyncio.get_event_loop().is_running()) + return await coro + + return self.loop.run_until_complete(coro) + + def setUp(self): + super().setUp() + self.loop = asyncio.new_event_loop() + self.eager_task_factory = asyncio.create_eager_task_factory(self.Task) + self.loop.set_task_factory(self.eager_task_factory) + self.set_event_loop(self.loop) + + def test_eager_task_factory_set(self): + self.assertIsNotNone(self.eager_task_factory) + self.assertIs(self.loop.get_task_factory(), self.eager_task_factory) + + async def noop(): pass + + async def run(): + t = self.loop.create_task(noop()) + self.assertIsInstance(t, self.Task) + await t + + self.run_coro(run()) + + def test_await_future_during_eager_step(self): + + async def set_result(fut, val): + fut.set_result(val) + + async def run(): + fut = self.loop.create_future() + t = self.loop.create_task(set_result(fut, 'my message')) + # assert the eager step completed the task + self.assertTrue(t.done()) + return await fut + + self.assertEqual(self.run_coro(run()), 'my message') + + def test_eager_completion(self): + + async def coro(): + return 'hello' + + async def run(): + t = self.loop.create_task(coro()) + # assert the eager step completed the task + self.assertTrue(t.done()) + return await t + + self.assertEqual(self.run_coro(run()), 'hello') + + def test_block_after_eager_step(self): + + async def coro(): + await asyncio.sleep(0.1) + return 'finished after blocking' + + async def run(): + t = self.loop.create_task(coro()) + self.assertFalse(t.done()) + result = await t + self.assertTrue(t.done()) + return result + + self.assertEqual(self.run_coro(run()), 'finished after blocking') + + def test_cancellation_after_eager_completion(self): + + async def coro(): + return 'finished without blocking' + + async def run(): + t = self.loop.create_task(coro()) + t.cancel() + result = await t + # finished task can't be cancelled + self.assertFalse(t.cancelled()) + return result + + self.assertEqual(self.run_coro(run()), 'finished without blocking') + + def test_cancellation_after_eager_step_blocks(self): + + async def coro(): + await asyncio.sleep(0.1) + return 'finished after blocking' + + async def run(): + t = self.loop.create_task(coro()) + t.cancel('cancellation message') + self.assertGreater(t.cancelling(), 0) + result = await t + + with self.assertRaises(asyncio.CancelledError) as cm: + self.run_coro(run()) + + self.assertEqual('cancellation message', cm.exception.args[0]) + + def test_current_task(self): + captured_current_task = None + + async def coro(): + nonlocal captured_current_task + captured_current_task = asyncio.current_task() + # verify the task before and after blocking is identical + await asyncio.sleep(0.1) + self.assertIs(asyncio.current_task(), captured_current_task) + + async def run(): + t = self.loop.create_task(coro()) + self.assertIs(captured_current_task, t) + await t + + self.run_coro(run()) + captured_current_task = None + + def test_all_tasks_with_eager_completion(self): + captured_all_tasks = None + + async def coro(): + nonlocal captured_all_tasks + captured_all_tasks = asyncio.all_tasks() + + async def run(): + t = self.loop.create_task(coro()) + self.assertIn(t, captured_all_tasks) + self.assertNotIn(t, asyncio.all_tasks()) + + self.run_coro(run()) + + def test_all_tasks_with_blocking(self): + captured_eager_all_tasks = None + + async def coro(fut1, fut2): + nonlocal captured_eager_all_tasks + captured_eager_all_tasks = asyncio.all_tasks() + await fut1 + fut2.set_result(None) + + async def run(): + fut1 = self.loop.create_future() + fut2 = self.loop.create_future() + t = self.loop.create_task(coro(fut1, fut2)) + self.assertIn(t, captured_eager_all_tasks) + self.assertIn(t, asyncio.all_tasks()) + fut1.set_result(None) + await fut2 + self.assertNotIn(t, asyncio.all_tasks()) + + self.run_coro(run()) + + def test_context_vars(self): + cv = contextvars.ContextVar('cv', default=0) + + coro_first_step_ran = False + coro_second_step_ran = False + + async def coro(): + nonlocal coro_first_step_ran + nonlocal coro_second_step_ran + self.assertEqual(cv.get(), 1) + cv.set(2) + self.assertEqual(cv.get(), 2) + coro_first_step_ran = True + await asyncio.sleep(0.1) + self.assertEqual(cv.get(), 2) + cv.set(3) + self.assertEqual(cv.get(), 3) + coro_second_step_ran = True + + async def run(): + cv.set(1) + t = self.loop.create_task(coro()) + self.assertTrue(coro_first_step_ran) + self.assertFalse(coro_second_step_ran) + self.assertEqual(cv.get(), 1) + await t + self.assertTrue(coro_second_step_ran) + self.assertEqual(cv.get(), 1) + + self.run_coro(run()) + + +class PyEagerTaskFactoryLoopTests(EagerTaskFactoryLoopTests, test_utils.TestCase): + Task = tasks._PyTask + + +@unittest.skipUnless(hasattr(tasks, '_CTask'), + 'requires the C _asyncio module') +class CEagerTaskFactoryLoopTests(EagerTaskFactoryLoopTests, test_utils.TestCase): + Task = getattr(tasks, '_CTask', None) + + +class AsyncTaskCounter: + def __init__(self, loop, *, task_class, eager): + self.suspense_count = 0 + self.task_count = 0 + + def CountingTask(*args, eager_start=False, **kwargs): + if not eager_start: + self.task_count += 1 + kwargs["eager_start"] = eager_start + return task_class(*args, **kwargs) + + if eager: + factory = asyncio.create_eager_task_factory(CountingTask) + else: + def factory(loop, coro, **kwargs): + return CountingTask(coro, loop=loop, **kwargs) + loop.set_task_factory(factory) + + def get(self): + return self.task_count + + +async def awaitable_chain(depth): + if depth == 0: + return 0 + return 1 + await awaitable_chain(depth - 1) + + +async def recursive_taskgroups(width, depth): + if depth == 0: + return + + async with asyncio.TaskGroup() as tg: + futures = [ + tg.create_task(recursive_taskgroups(width, depth - 1)) + for _ in range(width) + ] + + +async def recursive_gather(width, depth): + if depth == 0: + return + + await asyncio.gather( + *[recursive_gather(width, depth - 1) for _ in range(width)] + ) + + +class BaseTaskCountingTests: + + Task = None + eager = None + expected_task_count = None + + def setUp(self): + super().setUp() + self.loop = asyncio.new_event_loop() + self.counter = AsyncTaskCounter(self.loop, task_class=self.Task, eager=self.eager) + self.set_event_loop(self.loop) + + def test_awaitables_chain(self): + observed_depth = self.loop.run_until_complete(awaitable_chain(100)) + self.assertEqual(observed_depth, 100) + self.assertEqual(self.counter.get(), 0 if self.eager else 1) + + def test_recursive_taskgroups(self): + num_tasks = self.loop.run_until_complete(recursive_taskgroups(5, 4)) + self.assertEqual(self.counter.get(), self.expected_task_count) + + def test_recursive_gather(self): + self.loop.run_until_complete(recursive_gather(5, 4)) + self.assertEqual(self.counter.get(), self.expected_task_count) + + +class BaseNonEagerTaskFactoryTests(BaseTaskCountingTests): + eager = False + expected_task_count = 781 # 1 + 5 + 5^2 + 5^3 + 5^4 + + +class BaseEagerTaskFactoryTests(BaseTaskCountingTests): + eager = True + expected_task_count = 0 + + +class NonEagerTests(BaseNonEagerTaskFactoryTests, test_utils.TestCase): + Task = asyncio.Task + + +class EagerTests(BaseEagerTaskFactoryTests, test_utils.TestCase): + Task = asyncio.Task + + +class NonEagerPyTaskTests(BaseNonEagerTaskFactoryTests, test_utils.TestCase): + Task = tasks._PyTask + + +class EagerPyTaskTests(BaseEagerTaskFactoryTests, test_utils.TestCase): + Task = tasks._PyTask + + +@unittest.skipUnless(hasattr(tasks, '_CTask'), + 'requires the C _asyncio module') +class NonEagerCTaskTests(BaseNonEagerTaskFactoryTests, test_utils.TestCase): + Task = getattr(tasks, '_CTask', None) + + +@unittest.skipUnless(hasattr(tasks, '_CTask'), + 'requires the C _asyncio module') +class EagerCTaskTests(BaseEagerTaskFactoryTests, test_utils.TestCase): + Task = getattr(tasks, '_CTask', None) + +if __name__ == '__main__': + unittest.main() diff --git a/Lib/test/test_asyncio/test_proactor_events.py b/Lib/test/test_asyncio/test_proactor_events.py index 6cb7dc300c5331..c42856e578b8cc 100644 --- a/Lib/test/test_asyncio/test_proactor_events.py +++ b/Lib/test/test_asyncio/test_proactor_events.py @@ -447,6 +447,19 @@ def monkey(): self.assertFalse(tr.is_reading()) + def test_pause_reading_connection_made(self): + tr = self.socket_transport() + self.protocol.connection_made.side_effect = lambda _: tr.pause_reading() + test_utils.run_briefly(self.loop) + self.assertFalse(tr.is_reading()) + self.loop.assert_no_reader(7) + + tr.resume_reading() + self.assertTrue(tr.is_reading()) + + tr.close() + self.assertFalse(tr.is_reading()) + def pause_writing_transport(self, high): tr = self.socket_transport() diff --git a/Lib/test/test_asyncio/test_selector_events.py b/Lib/test/test_asyncio/test_selector_events.py index 921c98a2702d76..47693ea4d3ce2e 100644 --- a/Lib/test/test_asyncio/test_selector_events.py +++ b/Lib/test/test_asyncio/test_selector_events.py @@ -547,6 +547,22 @@ def test_pause_resume_reading(self): self.assertFalse(tr.is_reading()) self.loop.assert_no_reader(7) + def test_pause_reading_connection_made(self): + tr = self.socket_transport() + self.protocol.connection_made.side_effect = lambda _: tr.pause_reading() + test_utils.run_briefly(self.loop) + self.assertFalse(tr.is_reading()) + self.loop.assert_no_reader(7) + + tr.resume_reading() + self.assertTrue(tr.is_reading()) + self.loop.assert_reader(7, tr._read_ready) + + tr.close() + self.assertFalse(tr.is_reading()) + self.loop.assert_no_reader(7) + + def test_read_eof_received_error(self): transport = self.socket_transport() transport.close = mock.Mock() @@ -747,6 +763,48 @@ def test_write_sendmsg_no_data(self): self.assertFalse(self.sock.sendmsg.called) self.assertEqual(list_to_buffer([b'data']), transport._buffer) + @unittest.skipUnless(selector_events._HAS_SENDMSG, 'no sendmsg') + def test_writelines_sendmsg_full(self): + data = memoryview(b'data') + self.sock.sendmsg = mock.Mock() + self.sock.sendmsg.return_value = len(data) + + transport = self.socket_transport(sendmsg=True) + transport.writelines([data]) + self.assertTrue(self.sock.sendmsg.called) + self.assertFalse(self.loop.writers) + + @unittest.skipUnless(selector_events._HAS_SENDMSG, 'no sendmsg') + def test_writelines_sendmsg_partial(self): + data = memoryview(b'data') + self.sock.sendmsg = mock.Mock() + self.sock.sendmsg.return_value = 2 + + transport = self.socket_transport(sendmsg=True) + transport.writelines([data]) + self.assertTrue(self.sock.sendmsg.called) + self.assertTrue(self.loop.writers) + + def test_writelines_send_full(self): + data = memoryview(b'data') + self.sock.send.return_value = len(data) + self.sock.send.fileno.return_value = 7 + + transport = self.socket_transport() + transport.writelines([data]) + self.assertTrue(self.sock.send.called) + self.assertFalse(self.loop.writers) + + def test_writelines_send_partial(self): + data = memoryview(b'data') + self.sock.send.return_value = 2 + self.sock.send.fileno.return_value = 7 + + transport = self.socket_transport() + transport.writelines([data]) + self.assertTrue(self.sock.send.called) + self.assertTrue(self.loop.writers) + @unittest.skipUnless(selector_events._HAS_SENDMSG, 'no sendmsg') def test_write_sendmsg_full(self): data = memoryview(b'data') diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py index eba6e2d1f28f3e..eeeca40c15cd28 100644 --- a/Lib/test/test_asyncio/test_subprocess.py +++ b/Lib/test/test_asyncio/test_subprocess.py @@ -151,6 +151,24 @@ async def run(data): self.assertEqual(exitcode, 0) self.assertEqual(stdout, b'some data') + def test_communicate_none_input(self): + args = PROGRAM_CAT + + async def run(): + proc = await asyncio.create_subprocess_exec( + *args, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + ) + stdout, stderr = await proc.communicate() + return proc.returncode, stdout + + task = run() + task = asyncio.wait_for(task, support.LONG_TIMEOUT) + exitcode, stdout = self.loop.run_until_complete(task) + self.assertEqual(exitcode, 0) + self.assertEqual(stdout, b'') + def test_shell(self): proc = self.loop.run_until_complete( asyncio.create_subprocess_shell('exit 7') diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index 731fa0c5a60b9b..6e8a51ce2555d5 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -399,6 +399,18 @@ async def notmuch(): self.loop.run_until_complete(t1) self.loop.run_until_complete(t2) + def test_task_set_name_pylong(self): + # test that setting the task name to a PyLong explicitly doesn't + # incorrectly trigger the deferred name formatting logic + async def notmuch(): + return 123 + + t = self.new_task(self.loop, notmuch(), name=987654321) + self.assertEqual(t.get_name(), '987654321') + t.set_name(123456789) + self.assertEqual(t.get_name(), '123456789') + self.loop.run_until_complete(t) + def test_task_repr_name_not_str(self): async def notmuch(): return 123 @@ -606,7 +618,7 @@ def on_timeout(): if ( timed_out and task.uncancel() == 0 - and sys.exc_info()[0] is asyncio.CancelledError + and type(sys.exception()) is asyncio.CancelledError ): # Note the five rules that are needed here to satisfy proper # uncancellation: diff --git a/Lib/test/test_asyncio/test_timeouts.py b/Lib/test/test_asyncio/test_timeouts.py index b9bac6f783776b..8b6b9a1fea0be8 100644 --- a/Lib/test/test_asyncio/test_timeouts.py +++ b/Lib/test/test_asyncio/test_timeouts.py @@ -247,6 +247,36 @@ async def test_nested_timeout_in_finally(self): async with asyncio.timeout(0.01): await asyncio.sleep(10) + async def test_timeout_after_cancellation(self): + try: + asyncio.current_task().cancel() + await asyncio.sleep(1) # work which will be cancelled + except asyncio.CancelledError: + pass + finally: + with self.assertRaises(TimeoutError): + async with asyncio.timeout(0.0): + await asyncio.sleep(1) # some cleanup + + async def test_cancel_in_timeout_after_cancellation(self): + try: + asyncio.current_task().cancel() + await asyncio.sleep(1) # work which will be cancelled + except asyncio.CancelledError: + pass + finally: + with self.assertRaises(asyncio.CancelledError): + async with asyncio.timeout(1.0): + asyncio.current_task().cancel() + await asyncio.sleep(2) # some cleanup + + async def test_timeout_exception_cause (self): + with self.assertRaises(asyncio.TimeoutError) as exc: + async with asyncio.timeout(0): + await asyncio.sleep(1) + cause = exc.exception.__cause__ + assert isinstance(cause, asyncio.CancelledError) + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py index 33d0ea15c6de0e..cdf3eaac68af15 100644 --- a/Lib/test/test_asyncio/test_unix_events.py +++ b/Lib/test/test_asyncio/test_unix_events.py @@ -1712,11 +1712,11 @@ class PolicyTests(unittest.TestCase): def create_policy(self): return asyncio.DefaultEventLoopPolicy() - def test_get_default_child_watcher(self): + @mock.patch('asyncio.unix_events.can_use_pidfd') + def test_get_default_child_watcher(self, m_can_use_pidfd): + m_can_use_pidfd.return_value = False policy = self.create_policy() self.assertIsNone(policy._watcher) - unix_events.can_use_pidfd = mock.Mock() - unix_events.can_use_pidfd.return_value = False with self.assertWarns(DeprecationWarning): watcher = policy.get_child_watcher() self.assertIsInstance(watcher, asyncio.ThreadedChildWatcher) @@ -1725,10 +1725,9 @@ def test_get_default_child_watcher(self): with self.assertWarns(DeprecationWarning): self.assertIs(watcher, policy.get_child_watcher()) + m_can_use_pidfd.return_value = True policy = self.create_policy() self.assertIsNone(policy._watcher) - unix_events.can_use_pidfd = mock.Mock() - unix_events.can_use_pidfd.return_value = True with self.assertWarns(DeprecationWarning): watcher = policy.get_child_watcher() self.assertIsInstance(watcher, asyncio.PidfdChildWatcher) @@ -1889,8 +1888,8 @@ async def test_fork_not_share_event_loop(self): os.write(w, b'LOOP:' + str(id(loop)).encode()) except RuntimeError: os.write(w, b'NO LOOP') - except: - os.write(w, b'ERROR:' + ascii(sys.exc_info()).encode()) + except BaseException as e: + os.write(w, b'ERROR:' + ascii(e).encode()) finally: os._exit(0) else: diff --git a/Lib/test/test_asyncio/utils.py b/Lib/test/test_asyncio/utils.py index 5b9c86eb9859a0..6dee5bb33b2560 100644 --- a/Lib/test/test_asyncio/utils.py +++ b/Lib/test/test_asyncio/utils.py @@ -577,7 +577,7 @@ def tearDown(self): # Detect CPython bug #23353: ensure that yield/yield-from is not used # in an except block of a generator - self.assertEqual(sys.exc_info(), (None, None, None)) + self.assertIsNone(sys.exception()) self.doCleanups() threading_helper.threading_cleanup(*self._thread_cleanup) diff --git a/Lib/test/test_bdb.py b/Lib/test/test_bdb.py index 042c2daea7f797..568c88e326c087 100644 --- a/Lib/test/test_bdb.py +++ b/Lib/test/test_bdb.py @@ -433,8 +433,9 @@ def __exit__(self, type_=None, value=None, traceback=None): not_empty = '' if self.tracer.set_list: not_empty += 'All paired tuples have not been processed, ' - not_empty += ('the last one was number %d' % + not_empty += ('the last one was number %d\n' % self.tracer.expect_set_no) + not_empty += repr(self.tracer.set_list) # Make a BdbNotExpectedError a unittest failure. if type_ is not None and issubclass(BdbNotExpectedError, type_): @@ -1206,7 +1207,8 @@ def main(): class TestRegressions(unittest.TestCase): def test_format_stack_entry_no_lineno(self): # See gh-101517 - Bdb().format_stack_entry((sys._getframe(), None)) + self.assertIn('Warning: lineno is None', + Bdb().format_stack_entry((sys._getframe(), None))) if __name__ == "__main__": diff --git a/Lib/test/test_buffer.py b/Lib/test/test_buffer.py index 8ac3b7e7eb29d1..098d2d999643cb 100644 --- a/Lib/test/test_buffer.py +++ b/Lib/test/test_buffer.py @@ -965,8 +965,10 @@ def check_memoryview(m, expected_readonly=readonly): self.assertEqual(m.strides, tuple(strides)) self.assertEqual(m.suboffsets, tuple(suboffsets)) - n = 1 if ndim == 0 else len(lst) - self.assertEqual(len(m), n) + if ndim == 0: + self.assertRaises(TypeError, len, m) + else: + self.assertEqual(len(m), len(lst)) rep = result.tolist() if fmt else result.tobytes() self.assertEqual(rep, lst) diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index e7a79bc13b7f3d..04dd8ff3070c99 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -28,7 +28,7 @@ from types import AsyncGeneratorType, FunctionType, CellType from operator import neg from test import support -from test.support import (swap_attr, maybe_get_event_loop_policy) +from test.support import (cpython_only, swap_attr, maybe_get_event_loop_policy) from test.support.os_helper import (EnvironmentVarGuard, TESTFN, unlink) from test.support.script_helper import assert_python_ok from test.support.warnings_helper import check_warnings @@ -2370,6 +2370,28 @@ def __del__(self): self.assertEqual(["before", "after"], out.decode().splitlines()) +@cpython_only +class ImmortalTests(unittest.TestCase): + def test_immortal(self): + none_refcount = sys.getrefcount(None) + true_refcount = sys.getrefcount(True) + false_refcount = sys.getrefcount(False) + smallint_refcount = sys.getrefcount(100) + + # Assert that all of these immortal instances have large ref counts. + self.assertGreater(none_refcount, 2 ** 15) + self.assertGreater(true_refcount, 2 ** 15) + self.assertGreater(false_refcount, 2 ** 15) + self.assertGreater(smallint_refcount, 2 ** 15) + + # Confirm that the refcount doesn't change even with a new ref to them. + l = [None, True, False, 100] + self.assertEqual(sys.getrefcount(None), none_refcount) + self.assertEqual(sys.getrefcount(True), true_refcount) + self.assertEqual(sys.getrefcount(False), false_refcount) + self.assertEqual(sys.getrefcount(100), smallint_refcount) + + class TestType(unittest.TestCase): def test_new_type(self): A = type('A', (), {}) diff --git a/Lib/test/test_calendar.py b/Lib/test/test_calendar.py index ccfbeede0be949..24e472b5fee828 100644 --- a/Lib/test/test_calendar.py +++ b/Lib/test/test_calendar.py @@ -8,6 +8,7 @@ import sys import datetime import os +import warnings # From https://en.wikipedia.org/wiki/Leap_year_starting_on_Saturday result_0_02_text = """\ @@ -490,6 +491,14 @@ def test_format(self): self.assertEqual(out.getvalue().strip(), "1 2 3") class CalendarTestCase(unittest.TestCase): + + def test_deprecation_warning(self): + with self.assertWarnsRegex( + DeprecationWarning, + "The 'January' attribute is deprecated, use 'JANUARY' instead" + ): + calendar.January + def test_isleap(self): # Make sure that the return is right for a few years, and # ensure that the return values are 1 or 0, not just true or diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py index c34ee578b5c83f..9470cf12a7d1c4 100644 --- a/Lib/test/test_capi/test_misc.py +++ b/Lib/test/test_capi/test_misc.py @@ -21,7 +21,7 @@ from test.support import import_helper from test.support import threading_helper from test.support import warnings_helper -from test.support.script_helper import assert_python_failure, assert_python_ok +from test.support.script_helper import assert_python_failure, assert_python_ok, run_python_until_end try: import _posixsubprocess except ImportError: @@ -69,21 +69,17 @@ def test_instancemethod(self): @support.requires_subprocess() def test_no_FatalError_infinite_loop(self): - with support.SuppressCrashReport(): - p = subprocess.Popen([sys.executable, "-c", - 'import _testcapi;' - '_testcapi.crash_no_current_thread()'], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True) - (out, err) = p.communicate() - self.assertEqual(out, '') + run_result, _cmd_line = run_python_until_end( + '-c', 'import _testcapi; _testcapi.crash_no_current_thread()', + ) + _rc, out, err = run_result + self.assertEqual(out, b'') # This used to cause an infinite loop. msg = ("Fatal Python error: PyThreadState_Get: " "the function must be called with the GIL held, " "after Python initialization and before Python finalization, " "but the GIL is released " - "(the current Python thread state is NULL)") + "(the current Python thread state is NULL)").encode() self.assertTrue(err.rstrip().startswith(msg), err) @@ -1215,20 +1211,25 @@ def test_configured_settings(self): """ import json + OBMALLOC = 1<<5 EXTENSIONS = 1<<8 THREADS = 1<<10 DAEMON_THREADS = 1<<11 FORK = 1<<15 EXEC = 1<<16 - features = ['fork', 'exec', 'threads', 'daemon_threads', 'extensions'] + features = ['obmalloc', 'fork', 'exec', 'threads', 'daemon_threads', + 'extensions'] kwlist = [f'allow_{n}' for n in features] + kwlist[0] = 'use_main_obmalloc' kwlist[-1] = 'check_multi_interp_extensions' + + # expected to work for config, expected in { - (True, True, True, True, True): - FORK | EXEC | THREADS | DAEMON_THREADS | EXTENSIONS, - (False, False, False, False, False): 0, - (False, False, True, False, True): THREADS | EXTENSIONS, + (True, True, True, True, True, True): + OBMALLOC | FORK | EXEC | THREADS | DAEMON_THREADS | EXTENSIONS, + (True, False, False, False, False, False): OBMALLOC, + (False, False, False, True, False, True): THREADS | EXTENSIONS, }.items(): kwargs = dict(zip(kwlist, config)) expected = { @@ -1250,6 +1251,20 @@ def test_configured_settings(self): self.assertEqual(settings, expected) + # expected to fail + for config in [ + (False, False, False, False, False, False), + ]: + kwargs = dict(zip(kwlist, config)) + with self.subTest(config): + script = textwrap.dedent(f''' + import _testinternalcapi + _testinternalcapi.get_interp_settings() + raise NotImplementedError('unreachable') + ''') + with self.assertRaises(RuntimeError): + support.run_in_subinterp_with_config(script, **kwargs) + @unittest.skipIf(_testsinglephase is None, "test requires _testsinglephase module") @unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()") def test_overridden_setting_extensions_subinterp_check(self): @@ -1261,13 +1276,15 @@ def test_overridden_setting_extensions_subinterp_check(self): """ import json + OBMALLOC = 1<<5 EXTENSIONS = 1<<8 THREADS = 1<<10 DAEMON_THREADS = 1<<11 FORK = 1<<15 EXEC = 1<<16 - BASE_FLAGS = FORK | EXEC | THREADS | DAEMON_THREADS + BASE_FLAGS = OBMALLOC | FORK | EXEC | THREADS | DAEMON_THREADS base_kwargs = { + 'use_main_obmalloc': True, 'allow_fork': True, 'allow_exec': True, 'allow_threads': True, @@ -1404,7 +1421,7 @@ def callback(): @threading_helper.requires_working_threading() def test_gilstate_ensure_no_deadlock(self): # See https://github.com/python/cpython/issues/96071 - code = textwrap.dedent(f""" + code = textwrap.dedent(""" import _testcapi def callback(): diff --git a/Lib/test/test_class.py b/Lib/test/test_class.py index 61df81b169775e..d7a48e55b10180 100644 --- a/Lib/test/test_class.py +++ b/Lib/test/test_class.py @@ -457,7 +457,7 @@ def __init__(self): a = A() self.assertEqual(_testcapi.hasattr_string(a, "attr"), True) self.assertEqual(_testcapi.hasattr_string(a, "noattr"), False) - self.assertEqual(sys.exc_info(), (None, None, None)) + self.assertIsNone(sys.exception()) def testDel(self): x = [] diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py index f10d72ea5547ee..d98e23855e0c19 100644 --- a/Lib/test/test_cmd_line_script.py +++ b/Lib/test/test_cmd_line_script.py @@ -636,9 +636,9 @@ def test_syntaxerror_multi_line_fstring(self): self.assertEqual( stderr.splitlines()[-3:], [ - b' foo"""', - b' ^', - b'SyntaxError: f-string: empty expression not allowed', + b' foo = f"""{}', + b' ^', + b'SyntaxError: f-string: valid expression required before \'}\'', ], ) diff --git a/Lib/test/test_code.py b/Lib/test/test_code.py index 0cd1fb3f9728e5..ca06a39f5df142 100644 --- a/Lib/test/test_code.py +++ b/Lib/test/test_code.py @@ -338,18 +338,25 @@ def func(): new_code = code = func.__code__.replace(co_linetable=b'') self.assertEqual(list(new_code.co_lines()), []) + def test_co_lnotab_is_deprecated(self): # TODO: remove in 3.14 + def func(): + pass + + with self.assertWarns(DeprecationWarning): + func.__code__.co_lnotab + def test_invalid_bytecode(self): def foo(): pass - # assert that opcode 238 is invalid - self.assertEqual(opname[238], '<238>') + # assert that opcode 229 is invalid + self.assertEqual(opname[229], '<229>') - # change first opcode to 0xee (=238) + # change first opcode to 0xeb (=229) foo.__code__ = foo.__code__.replace( - co_code=b'\xee' + foo.__code__.co_code[1:]) + co_code=b'\xe5' + foo.__code__.co_code[1:]) - msg = f"unknown opcode 238" + msg = "unknown opcode 229" with self.assertRaisesRegex(SystemError, msg): foo() diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py index e3add0c1ee926c..376175f90f63eb 100644 --- a/Lib/test/test_codecs.py +++ b/Lib/test/test_codecs.py @@ -2819,24 +2819,19 @@ def test_binary_to_text_denylists_text_transforms(self): self.assertIsNone(failure.exception.__cause__) @unittest.skipUnless(zlib, "Requires zlib support") - def test_custom_zlib_error_is_wrapped(self): + def test_custom_zlib_error_is_noted(self): # Check zlib codec gives a good error for malformed input - msg = "^decoding with 'zlib_codec' codec failed" - with self.assertRaisesRegex(Exception, msg) as failure: + msg = "decoding with 'zlib_codec' codec failed" + with self.assertRaises(Exception) as failure: codecs.decode(b"hello", "zlib_codec") - self.assertIsInstance(failure.exception.__cause__, - type(failure.exception)) + self.assertEqual(msg, failure.exception.__notes__[0]) - def test_custom_hex_error_is_wrapped(self): + def test_custom_hex_error_is_noted(self): # Check hex codec gives a good error for malformed input - msg = "^decoding with 'hex_codec' codec failed" - with self.assertRaisesRegex(Exception, msg) as failure: + msg = "decoding with 'hex_codec' codec failed" + with self.assertRaises(Exception) as failure: codecs.decode(b"hello", "hex_codec") - self.assertIsInstance(failure.exception.__cause__, - type(failure.exception)) - - # Unfortunately, the bz2 module throws OSError, which the codec - # machinery currently can't wrap :( + self.assertEqual(msg, failure.exception.__notes__[0]) # Ensure codec aliases from http://bugs.python.org/issue7475 work def test_aliases(self): @@ -2860,11 +2855,8 @@ def test_uu_invalid(self): self.assertRaises(ValueError, codecs.decode, b"", "uu-codec") -# The codec system tries to wrap exceptions in order to ensure the error -# mentions the operation being performed and the codec involved. We -# currently *only* want this to happen for relatively stateless -# exceptions, where the only significant information they contain is their -# type and a single str argument. +# The codec system tries to add notes to exceptions in order to ensure +# the error mentions the operation being performed and the codec involved. # Use a local codec registry to avoid appearing to leak objects when # registering multiple search functions @@ -2874,10 +2866,10 @@ def _get_test_codec(codec_name): return _TEST_CODECS.get(codec_name) -class ExceptionChainingTest(unittest.TestCase): +class ExceptionNotesTest(unittest.TestCase): def setUp(self): - self.codec_name = 'exception_chaining_test' + self.codec_name = 'exception_notes_test' codecs.register(_get_test_codec) self.addCleanup(codecs.unregister, _get_test_codec) @@ -2901,91 +2893,77 @@ def set_codec(self, encode, decode): _TEST_CODECS[self.codec_name] = codec_info @contextlib.contextmanager - def assertWrapped(self, operation, exc_type, msg): - full_msg = r"{} with {!r} codec failed \({}: {}\)".format( - operation, self.codec_name, exc_type.__name__, msg) - with self.assertRaisesRegex(exc_type, full_msg) as caught: + def assertNoted(self, operation, exc_type, msg): + full_msg = r"{} with {!r} codec failed".format( + operation, self.codec_name) + with self.assertRaises(exc_type) as caught: yield caught - self.assertIsInstance(caught.exception.__cause__, exc_type) - self.assertIsNotNone(caught.exception.__cause__.__traceback__) + self.assertIn(full_msg, caught.exception.__notes__[0]) + caught.exception.__notes__.clear() def raise_obj(self, *args, **kwds): # Helper to dynamically change the object raised by a test codec raise self.obj_to_raise - def check_wrapped(self, obj_to_raise, msg, exc_type=RuntimeError): + def check_note(self, obj_to_raise, msg, exc_type=RuntimeError): self.obj_to_raise = obj_to_raise self.set_codec(self.raise_obj, self.raise_obj) - with self.assertWrapped("encoding", exc_type, msg): + with self.assertNoted("encoding", exc_type, msg): "str_input".encode(self.codec_name) - with self.assertWrapped("encoding", exc_type, msg): + with self.assertNoted("encoding", exc_type, msg): codecs.encode("str_input", self.codec_name) - with self.assertWrapped("decoding", exc_type, msg): + with self.assertNoted("decoding", exc_type, msg): b"bytes input".decode(self.codec_name) - with self.assertWrapped("decoding", exc_type, msg): + with self.assertNoted("decoding", exc_type, msg): codecs.decode(b"bytes input", self.codec_name) def test_raise_by_type(self): - self.check_wrapped(RuntimeError, "") + self.check_note(RuntimeError, "") def test_raise_by_value(self): - msg = "This should be wrapped" - self.check_wrapped(RuntimeError(msg), msg) + msg = "This should be noted" + self.check_note(RuntimeError(msg), msg) def test_raise_grandchild_subclass_exact_size(self): - msg = "This should be wrapped" + msg = "This should be noted" class MyRuntimeError(RuntimeError): __slots__ = () - self.check_wrapped(MyRuntimeError(msg), msg, MyRuntimeError) + self.check_note(MyRuntimeError(msg), msg, MyRuntimeError) def test_raise_subclass_with_weakref_support(self): - msg = "This should be wrapped" + msg = "This should be noted" class MyRuntimeError(RuntimeError): pass - self.check_wrapped(MyRuntimeError(msg), msg, MyRuntimeError) - - def check_not_wrapped(self, obj_to_raise, msg): - def raise_obj(*args, **kwds): - raise obj_to_raise - self.set_codec(raise_obj, raise_obj) - with self.assertRaisesRegex(RuntimeError, msg): - "str input".encode(self.codec_name) - with self.assertRaisesRegex(RuntimeError, msg): - codecs.encode("str input", self.codec_name) - with self.assertRaisesRegex(RuntimeError, msg): - b"bytes input".decode(self.codec_name) - with self.assertRaisesRegex(RuntimeError, msg): - codecs.decode(b"bytes input", self.codec_name) + self.check_note(MyRuntimeError(msg), msg, MyRuntimeError) - def test_init_override_is_not_wrapped(self): + def test_init_override(self): class CustomInit(RuntimeError): def __init__(self): pass - self.check_not_wrapped(CustomInit, "") + self.check_note(CustomInit, "") - def test_new_override_is_not_wrapped(self): + def test_new_override(self): class CustomNew(RuntimeError): def __new__(cls): return super().__new__(cls) - self.check_not_wrapped(CustomNew, "") + self.check_note(CustomNew, "") - def test_instance_attribute_is_not_wrapped(self): - msg = "This should NOT be wrapped" + def test_instance_attribute(self): + msg = "This should be noted" exc = RuntimeError(msg) exc.attr = 1 - self.check_not_wrapped(exc, "^{}$".format(msg)) + self.check_note(exc, "^{}$".format(msg)) - def test_non_str_arg_is_not_wrapped(self): - self.check_not_wrapped(RuntimeError(1), "1") + def test_non_str_arg(self): + self.check_note(RuntimeError(1), "1") - def test_multiple_args_is_not_wrapped(self): + def test_multiple_args(self): msg_re = r"^\('a', 'b', 'c'\)$" - self.check_not_wrapped(RuntimeError('a', 'b', 'c'), msg_re) + self.check_note(RuntimeError('a', 'b', 'c'), msg_re) # http://bugs.python.org/issue19609 - def test_codec_lookup_failure_not_wrapped(self): + def test_codec_lookup_failure(self): msg = "^unknown encoding: {}$".format(self.codec_name) - # The initial codec lookup should not be wrapped with self.assertRaisesRegex(LookupError, msg): "str input".encode(self.codec_name) with self.assertRaisesRegex(LookupError, msg): diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py index 6966c2ffd811b8..e3c382266fa058 100644 --- a/Lib/test/test_codeop.py +++ b/Lib/test/test_codeop.py @@ -277,7 +277,7 @@ def test_filename(self): def test_warning(self): # Test that the warning is only returned once. with warnings_helper.check_warnings( - ('"is" with a literal', SyntaxWarning), + ('"is" with \'str\' literal', SyntaxWarning), ("invalid escape sequence", SyntaxWarning), ) as w: compile_command(r"'\e' is 0") diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py index bfe18c7fc50330..fb568a48396498 100644 --- a/Lib/test/test_collections.py +++ b/Lib/test/test_collections.py @@ -1626,7 +1626,7 @@ def test_Set_from_iterable(self): class SetUsingInstanceFromIterable(MutableSet): def __init__(self, values, created_by): if not created_by: - raise ValueError(f'created_by must be specified') + raise ValueError('created_by must be specified') self.created_by = created_by self._values = set(values) diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py index d1d791915bbbb4..c68b9ce388466e 100644 --- a/Lib/test/test_compile.py +++ b/Lib/test/test_compile.py @@ -1283,7 +1283,7 @@ def test_multiline_boolean_expression(self): self.assertOpcodeSourcePositionIs(compiled_code, 'POP_JUMP_IF_FALSE', line=2, end_line=2, column=15, end_column=16, occurrence=2) # compare d and 0 - self.assertOpcodeSourcePositionIs(compiled_code, 'COMPARE_AND_BRANCH', + self.assertOpcodeSourcePositionIs(compiled_code, 'COMPARE_OP', line=4, end_line=4, column=8, end_column=13, occurrence=1) # jump if comparison it True self.assertOpcodeSourcePositionIs(compiled_code, 'POP_JUMP_IF_TRUE', diff --git a/Lib/test/test_compiler_assemble.py b/Lib/test/test_compiler_assemble.py new file mode 100644 index 00000000000000..b01b0cc3052c0f --- /dev/null +++ b/Lib/test/test_compiler_assemble.py @@ -0,0 +1,74 @@ + +import ast +import types + +from test.support.bytecode_helper import AssemblerTestCase + + +# Tests for the code-object creation stage of the compiler. + +class IsolatedAssembleTests(AssemblerTestCase): + + def complete_metadata(self, metadata, filename="myfile.py"): + if metadata is None: + metadata = {} + for key in ['name', 'qualname']: + metadata.setdefault(key, key) + for key in ['consts']: + metadata.setdefault(key, []) + for key in ['names', 'varnames', 'cellvars', 'freevars', 'fasthidden']: + metadata.setdefault(key, {}) + for key in ['argcount', 'posonlyargcount', 'kwonlyargcount']: + metadata.setdefault(key, 0) + metadata.setdefault('firstlineno', 1) + metadata.setdefault('filename', filename) + return metadata + + def assemble_test(self, insts, metadata, expected): + metadata = self.complete_metadata(metadata) + insts = self.complete_insts_info(insts) + + co = self.get_code_object(metadata['filename'], insts, metadata) + self.assertIsInstance(co, types.CodeType) + + expected_metadata = {} + for key, value in metadata.items(): + if key == "fasthidden": + # not exposed on code object + continue + if isinstance(value, list): + expected_metadata[key] = tuple(value) + elif isinstance(value, dict): + expected_metadata[key] = tuple(value.keys()) + else: + expected_metadata[key] = value + + for key, value in expected_metadata.items(): + self.assertEqual(getattr(co, "co_" + key), value) + + f = types.FunctionType(co, {}) + for args, res in expected.items(): + self.assertEqual(f(*args), res) + + def test_simple_expr(self): + metadata = { + 'filename' : 'avg.py', + 'name' : 'avg', + 'qualname' : 'stats.avg', + 'consts' : [2], + 'argcount' : 2, + 'varnames' : {'x' : 0, 'y' : 1}, + } + + # code for "return (x+y)/2" + insts = [ + ('RESUME', 0), + ('LOAD_FAST', 0, 1), # 'x' + ('LOAD_FAST', 1, 1), # 'y' + ('BINARY_OP', 0, 1), # '+' + ('LOAD_CONST', 0, 1), # 2 + ('BINARY_OP', 11, 1), # '/' + ('RETURN_VALUE', 1), + ] + expected = {(3, 4) : 3.5, (-100, 200) : 50, (10, 18) : 14} + self.assemble_test(insts, metadata, expected) diff --git a/Lib/test/test_contextlib.py b/Lib/test/test_contextlib.py index ec06785b5667a6..0f8351ab8108a6 100644 --- a/Lib/test/test_contextlib.py +++ b/Lib/test/test_contextlib.py @@ -10,6 +10,7 @@ from contextlib import * # Tests __all__ from test import support from test.support import os_helper +from test.support.testcase import ExceptionIsLikeMixin import weakref @@ -1148,7 +1149,7 @@ class TestRedirectStderr(TestRedirectStream, unittest.TestCase): orig_stream = "stderr" -class TestSuppress(unittest.TestCase): +class TestSuppress(ExceptionIsLikeMixin, unittest.TestCase): @support.requires_docstrings def test_instance_docs(self): @@ -1202,6 +1203,30 @@ def test_cm_is_reentrant(self): 1/0 self.assertTrue(outer_continued) + def test_exception_groups(self): + eg_ve = lambda: ExceptionGroup( + "EG with ValueErrors only", + [ValueError("ve1"), ValueError("ve2"), ValueError("ve3")], + ) + eg_all = lambda: ExceptionGroup( + "EG with many types of exceptions", + [ValueError("ve1"), KeyError("ke1"), ValueError("ve2"), KeyError("ke2")], + ) + with suppress(ValueError): + raise eg_ve() + with suppress(ValueError, KeyError): + raise eg_all() + with self.assertRaises(ExceptionGroup) as eg1: + with suppress(ValueError): + raise eg_all() + self.assertExceptionIsLike( + eg1.exception, + ExceptionGroup( + "EG with many types of exceptions", + [KeyError("ke1"), KeyError("ke2")], + ), + ) + class TestChdir(unittest.TestCase): def make_relative_path(self, *parts): diff --git a/Lib/test/test_coroutines.py b/Lib/test/test_coroutines.py index 6ab19efcc588b8..47145782c0f04f 100644 --- a/Lib/test/test_coroutines.py +++ b/Lib/test/test_coroutines.py @@ -2365,15 +2365,15 @@ def check(depth, msg): f"coroutine '{corofn.__qualname__}' was never awaited\n", "Coroutine created at (most recent call last)\n", f' File "{a1_filename}", line {a1_lineno}, in a1\n', - f' return corofn() # comment in a1', + " return corofn() # comment in a1", ])) check(2, "".join([ f"coroutine '{corofn.__qualname__}' was never awaited\n", "Coroutine created at (most recent call last)\n", f' File "{a2_filename}", line {a2_lineno}, in a2\n', - f' return a1() # comment in a2\n', + " return a1() # comment in a2\n", f' File "{a1_filename}", line {a1_lineno}, in a1\n', - f' return corofn() # comment in a1', + " return corofn() # comment in a1", ])) finally: diff --git a/Lib/test/test_cppext.py b/Lib/test/test_cppext.py index 465894d24e7dfc..4fb62d87e860fc 100644 --- a/Lib/test/test_cppext.py +++ b/Lib/test/test_cppext.py @@ -1,6 +1,7 @@ # gh-91321: Build a basic C++ test extension to check that the Python C API is # compatible with C++ and does not emit C++ compiler warnings. import os.path +import shutil import sys import unittest import subprocess @@ -39,6 +40,10 @@ def check_build(self, std_cpp03, extension_name): self._check_build(std_cpp03, extension_name) def _check_build(self, std_cpp03, extension_name): + pkg_dir = 'pkg' + os.mkdir(pkg_dir) + shutil.copy(SETUP_TESTCPPEXT, os.path.join(pkg_dir, "setup.py")) + venv_dir = 'env' verbose = support.verbose @@ -59,11 +64,15 @@ def _check_build(self, std_cpp03, extension_name): python = os.path.join(venv_dir, 'bin', python_exe) def run_cmd(operation, cmd): + env = os.environ.copy() + env['CPYTHON_TEST_CPP_STD'] = 'c++03' if std_cpp03 else 'c++11' + env['CPYTHON_TEST_EXT_NAME'] = extension_name if verbose: print('Run:', ' '.join(cmd)) - subprocess.run(cmd, check=True) + subprocess.run(cmd, check=True, env=env) else: proc = subprocess.run(cmd, + env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) @@ -72,16 +81,16 @@ def run_cmd(operation, cmd): self.fail( f"{operation} failed with exit code {proc.returncode}") - # Build the C++ extension cmd = [python, '-X', 'dev', - SETUP_TESTCPPEXT, 'build_ext', '--verbose'] - if std_cpp03: - cmd.append('-std=c++03') - run_cmd('Build', cmd) + '-m', 'pip', 'install', + support.findfile('setuptools-67.6.1-py3-none-any.whl'), + support.findfile('wheel-0.40.0-py3-none-any.whl')] + run_cmd('Install build dependencies', cmd) - # Install the C++ extension + # Build and install the C++ extension cmd = [python, '-X', 'dev', - SETUP_TESTCPPEXT, 'install'] + '-m', 'pip', 'install', '--no-build-isolation', + os.path.abspath(pkg_dir)] run_cmd('Install', cmd) # Do a reference run. Until we test that running python diff --git a/Lib/test/test_cprofile.py b/Lib/test/test_cprofile.py index 4ec769885292a8..98648528bc81f2 100644 --- a/Lib/test/test_cprofile.py +++ b/Lib/test/test_cprofile.py @@ -100,7 +100,7 @@ def main(): profilee.py:98(subhelper) <- 8 0.064 0.080 profilee.py:88(helper2) {built-in method builtins.hasattr} <- 4 0.000 0.004 profilee.py:73(helper1) 8 0.000 0.008 profilee.py:88(helper2) -{built-in method sys.exc_info} <- 4 0.000 0.000 profilee.py:73(helper1) +{built-in method sys.exception} <- 4 0.000 0.000 profilee.py:73(helper1) {method 'append' of 'list' objects} <- 4 0.000 0.000 profilee.py:73(helper1)""" _ProfileOutput['print_callees'] = """\ <string>:1(<module>) -> 1 0.270 1.000 profilee.py:25(testfunc) diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py index 8289ddb1c3a54f..8fb97bc0c1a1a7 100644 --- a/Lib/test/test_csv.py +++ b/Lib/test/test_csv.py @@ -187,6 +187,10 @@ def test_write_quoting(self): quoting = csv.QUOTE_ALL) self._write_test(['a\nb',1], '"a\nb","1"', quoting = csv.QUOTE_ALL) + self._write_test(['a','',None,1], '"a","",,1', + quoting = csv.QUOTE_STRINGS) + self._write_test(['a','',None,1], '"a","",,"1"', + quoting = csv.QUOTE_NOTNULL) def test_write_escape(self): self._write_test(['a',1,'p,q'], 'a,1,"p,q"', diff --git a/Lib/test/test_ctypes/test_pep3118.py b/Lib/test/test_ctypes/test_pep3118.py index c8a70e3e335693..038161745df905 100644 --- a/Lib/test/test_ctypes/test_pep3118.py +++ b/Lib/test/test_ctypes/test_pep3118.py @@ -28,7 +28,7 @@ def test_native_types(self): if shape: self.assertEqual(len(v), shape[0]) else: - self.assertEqual(len(v) * sizeof(itemtp), sizeof(ob)) + self.assertRaises(TypeError, len, v) self.assertEqual(v.itemsize, sizeof(itemtp)) self.assertEqual(v.shape, shape) # XXX Issue #12851: PyCData_NewGetBuffer() must provide strides @@ -39,11 +39,10 @@ def test_native_types(self): # they are always read/write self.assertFalse(v.readonly) - if v.shape: - n = 1 - for dim in v.shape: - n = n * dim - self.assertEqual(n * v.itemsize, len(v.tobytes())) + n = 1 + for dim in v.shape: + n = n * dim + self.assertEqual(n * v.itemsize, len(v.tobytes())) except: # so that we can see the failing type print(tp) @@ -58,7 +57,7 @@ def test_endian_types(self): if shape: self.assertEqual(len(v), shape[0]) else: - self.assertEqual(len(v) * sizeof(itemtp), sizeof(ob)) + self.assertRaises(TypeError, len, v) self.assertEqual(v.itemsize, sizeof(itemtp)) self.assertEqual(v.shape, shape) # XXX Issue #12851 @@ -67,11 +66,10 @@ def test_endian_types(self): # they are always read/write self.assertFalse(v.readonly) - if v.shape: - n = 1 - for dim in v.shape: - n = n * dim - self.assertEqual(n, len(v)) + n = 1 + for dim in v.shape: + n = n * dim + self.assertEqual(n * v.itemsize, len(v.tobytes())) except: # so that we can see the failing type print(tp) @@ -243,7 +241,7 @@ class LEPoint(LittleEndianStructure): # endian_types = [ (BEPoint, "T{>l:x:>l:y:}".replace('l', s_long), (), BEPoint), - (LEPoint, "T{<l:x:<l:y:}".replace('l', s_long), (), LEPoint), + (LEPoint * 1, "T{<l:x:<l:y:}".replace('l', s_long), (1,), LEPoint), (POINTER(BEPoint), "&T{>l:x:>l:y:}".replace('l', s_long), (), POINTER(BEPoint)), (POINTER(LEPoint), "&T{<l:x:<l:y:}".replace('l', s_long), (), POINTER(LEPoint)), ] diff --git a/Lib/test/test_ctypes/test_python_api.py b/Lib/test/test_ctypes/test_python_api.py index 49571f97bbe152..de8989e2c3300f 100644 --- a/Lib/test/test_ctypes/test_python_api.py +++ b/Lib/test/test_ctypes/test_python_api.py @@ -46,7 +46,8 @@ def test_PyLong_Long(self): pythonapi.PyLong_AsLong.restype = c_long res = pythonapi.PyLong_AsLong(42) - self.assertEqual(grc(res), ref42 + 1) + # Small int refcnts don't change + self.assertEqual(grc(res), ref42) del res self.assertEqual(grc(42), ref42) diff --git a/Lib/test/test_curses.py b/Lib/test/test_curses.py index b550f4af555ce4..3ab837e4f95681 100644 --- a/Lib/test/test_curses.py +++ b/Lib/test/test_curses.py @@ -5,6 +5,7 @@ import sys import tempfile import unittest +from unittest.mock import MagicMock from test.support import (requires, verbose, SaveSignals, cpython_only, check_disallow_instantiation) @@ -1319,5 +1320,75 @@ def lorem_ipsum(win): for y, line in enumerate(text[:maxy]): win.addstr(y, 0, line[:maxx - (y == maxy - 1)]) + +class TextboxTest(unittest.TestCase): + def setUp(self): + self.mock_win = MagicMock(spec=curses.window) + self.mock_win.getyx.return_value = (1, 1) + self.mock_win.getmaxyx.return_value = (10, 20) + self.textbox = curses.textpad.Textbox(self.mock_win) + + def test_init(self): + """Test textbox initialization.""" + self.mock_win.reset_mock() + tb = curses.textpad.Textbox(self.mock_win) + self.mock_win.getmaxyx.assert_called_once_with() + self.mock_win.keypad.assert_called_once_with(1) + self.assertEqual(tb.insert_mode, False) + self.assertEqual(tb.stripspaces, 1) + self.assertIsNone(tb.lastcmd) + self.mock_win.reset_mock() + + def test_insert(self): + """Test inserting a printable character.""" + self.mock_win.reset_mock() + self.textbox.do_command(ord('a')) + self.mock_win.addch.assert_called_with(ord('a')) + self.textbox.do_command(ord('b')) + self.mock_win.addch.assert_called_with(ord('b')) + self.textbox.do_command(ord('c')) + self.mock_win.addch.assert_called_with(ord('c')) + self.mock_win.reset_mock() + + def test_delete(self): + """Test deleting a character.""" + self.mock_win.reset_mock() + self.textbox.do_command(curses.ascii.BS) + self.textbox.do_command(curses.KEY_BACKSPACE) + self.textbox.do_command(curses.ascii.DEL) + assert self.mock_win.delch.call_count == 3 + self.mock_win.reset_mock() + + def test_move_left(self): + """Test moving the cursor left.""" + self.mock_win.reset_mock() + self.textbox.do_command(curses.KEY_LEFT) + self.mock_win.move.assert_called_with(1, 0) + self.textbox.do_command(curses.KEY_RIGHT) + self.mock_win.move.assert_called_with(1, 2) + self.mock_win.reset_mock() + + def test_move_left(self): + """Test moving the cursor left.""" + self.mock_win.reset_mock() + self.textbox.do_command(curses.KEY_RIGHT) + self.mock_win.move.assert_called_with(1, 2) + self.mock_win.reset_mock() + + def test_move_up(self): + """Test moving the cursor left.""" + self.mock_win.reset_mock() + self.textbox.do_command(curses.KEY_UP) + self.mock_win.move.assert_called_with(0, 1) + self.mock_win.reset_mock() + + def test_move_down(self): + """Test moving the cursor left.""" + self.mock_win.reset_mock() + self.textbox.do_command(curses.KEY_DOWN) + self.mock_win.move.assert_called_with(2, 1) + self.mock_win.reset_mock() + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_dataclasses.py b/Lib/test/test_dataclasses.py index 46f33043c27071..6669f1c57e2e78 100644 --- a/Lib/test/test_dataclasses.py +++ b/Lib/test/test_dataclasses.py @@ -5,11 +5,13 @@ from dataclasses import * import abc +import io import pickle import inspect import builtins import types import weakref +import traceback import unittest from unittest.mock import Mock from typing import ClassVar, Any, List, Union, Tuple, Dict, Generic, TypeVar, Optional, Protocol, DefaultDict @@ -283,6 +285,23 @@ class C: c = C(5) self.assertEqual(c.BUILTINS, 5) + def test_field_with_special_single_underscore_names(self): + # gh-98886 + + @dataclass + class X: + x: int = field(default_factory=lambda: 111) + _dflt_x: int = field(default_factory=lambda: 222) + + X() + + @dataclass + class Y: + y: int = field(default_factory=lambda: 111) + _HAS_DEFAULT_FACTORY: int = 222 + + assert Y(y=222).y == 222 + def test_field_named_like_builtin(self): # Attribute names can shadow built-in names # since code generation is used. @@ -738,8 +757,8 @@ class Point: class Subclass(typ): pass with self.assertRaisesRegex(ValueError, - f"mutable default .*Subclass'>" - ' for field z is not allowed' + "mutable default .*Subclass'>" + " for field z is not allowed" ): @dataclass class Point: @@ -1526,6 +1545,16 @@ class C: pass with self.assertRaisesRegex(TypeError, 'dataclass type or instance'): fields(C()) + def test_clean_traceback_from_fields_exception(self): + stdout = io.StringIO() + try: + fields(object) + except TypeError as exc: + traceback.print_exception(exc, file=stdout) + printed_traceback = stdout.getvalue() + self.assertNotIn("AttributeError", printed_traceback) + self.assertNotIn("__dataclass_fields__", printed_traceback) + def test_helper_asdict(self): # Basic tests for asdict(), it should return a new dictionary. @dataclass @@ -2268,6 +2297,19 @@ class C: self.assertDocStrEqual(C.__doc__, "C(x:collections.deque=<factory>)") + def test_docstring_with_no_signature(self): + # See https://github.com/python/cpython/issues/103449 + class Meta(type): + __call__ = dict + class Base(metaclass=Meta): + pass + + @dataclass + class C(Base): + pass + + self.assertDocStrEqual(C.__doc__, "C") + class TestInit(unittest.TestCase): def test_base_has_init(self): @@ -3142,6 +3184,74 @@ def test_frozen_pickle(self): self.assertIsNot(obj, p) self.assertEqual(obj, p) + @dataclass(frozen=True, slots=True) + class FrozenSlotsGetStateClass: + foo: str + bar: int + + getstate_called: bool = field(default=False, compare=False) + + def __getstate__(self): + object.__setattr__(self, 'getstate_called', True) + return [self.foo, self.bar] + + @dataclass(frozen=True, slots=True) + class FrozenSlotsSetStateClass: + foo: str + bar: int + + setstate_called: bool = field(default=False, compare=False) + + def __setstate__(self, state): + object.__setattr__(self, 'setstate_called', True) + object.__setattr__(self, 'foo', state[0]) + object.__setattr__(self, 'bar', state[1]) + + @dataclass(frozen=True, slots=True) + class FrozenSlotsAllStateClass: + foo: str + bar: int + + getstate_called: bool = field(default=False, compare=False) + setstate_called: bool = field(default=False, compare=False) + + def __getstate__(self): + object.__setattr__(self, 'getstate_called', True) + return [self.foo, self.bar] + + def __setstate__(self, state): + object.__setattr__(self, 'setstate_called', True) + object.__setattr__(self, 'foo', state[0]) + object.__setattr__(self, 'bar', state[1]) + + def test_frozen_slots_pickle_custom_state(self): + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.subTest(proto=proto): + obj = self.FrozenSlotsGetStateClass('a', 1) + dumped = pickle.dumps(obj, protocol=proto) + + self.assertTrue(obj.getstate_called) + self.assertEqual(obj, pickle.loads(dumped)) + + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.subTest(proto=proto): + obj = self.FrozenSlotsSetStateClass('a', 1) + obj2 = pickle.loads(pickle.dumps(obj, protocol=proto)) + + self.assertTrue(obj2.setstate_called) + self.assertEqual(obj, obj2) + + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.subTest(proto=proto): + obj = self.FrozenSlotsAllStateClass('a', 1) + dumped = pickle.dumps(obj, protocol=proto) + + self.assertTrue(obj.getstate_called) + + obj2 = pickle.loads(dumped) + self.assertTrue(obj2.setstate_called) + self.assertEqual(obj, obj2) + def test_slots_with_default_no_init(self): # Originally reported in bpo-44649. @dataclass(slots=True) @@ -3629,7 +3739,7 @@ def test_text_annotations(self): ByMakeDataClass = make_dataclass('ByMakeDataClass', [('x', int)]) ManualModuleMakeDataClass = make_dataclass('ManualModuleMakeDataClass', [('x', int)], - module='test.test_dataclasses') + module=__name__) WrongNameMakeDataclass = make_dataclass('Wrong', [('x', int)]) WrongModuleMakeDataclass = make_dataclass('WrongModuleMakeDataclass', [('x', int)], diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py index cbc020d1d3904a..ad3eefba365856 100644 --- a/Lib/test/test_descr.py +++ b/Lib/test/test_descr.py @@ -5003,6 +5003,32 @@ class Child(Parent): gc.collect() self.assertEqual(Parent.__subclasses__(), []) + def test_attr_raise_through_property(self): + # test case for gh-103272 + class A: + def __getattr__(self, name): + raise ValueError("FOO") + + @property + def foo(self): + return self.__getattr__("asdf") + + with self.assertRaisesRegex(ValueError, "FOO"): + A().foo + + # test case for gh-103551 + class B: + @property + def __getattr__(self, name): + raise ValueError("FOO") + + @property + def foo(self): + raise NotImplementedError("BAR") + + with self.assertRaisesRegex(NotImplementedError, "BAR"): + B().foo + class DictProxyTests(unittest.TestCase): def setUp(self): diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py index 66701018a5fe8d..cfd6a2d9ef41a3 100644 --- a/Lib/test/test_dis.py +++ b/Lib/test/test_dis.py @@ -46,7 +46,7 @@ def cm(cls, x): %3d LOAD_FAST 1 (x) LOAD_CONST 1 (1) - COMPARE_OP 32 (==) + COMPARE_OP 40 (==) LOAD_FAST 0 (self) STORE_ATTR 0 (x) RETURN_CONST 0 (None) @@ -56,7 +56,7 @@ def cm(cls, x): RESUME 0 LOAD_FAST 1 LOAD_CONST 1 - COMPARE_OP 32 (==) + COMPARE_OP 40 (==) LOAD_FAST 0 STORE_ATTR 0 RETURN_CONST 0 @@ -67,7 +67,7 @@ def cm(cls, x): %3d LOAD_FAST 1 (x) LOAD_CONST 1 (1) - COMPARE_OP 32 (==) + COMPARE_OP 40 (==) LOAD_FAST 0 (cls) STORE_ATTR 0 (x) RETURN_CONST 0 (None) @@ -78,7 +78,7 @@ def cm(cls, x): %3d LOAD_FAST 0 (x) LOAD_CONST 1 (1) - COMPARE_OP 32 (==) + COMPARE_OP 40 (==) STORE_FAST 0 (x) RETURN_CONST 0 (None) """ % (_C.sm.__code__.co_firstlineno, _C.sm.__code__.co_firstlineno + 2,) @@ -138,10 +138,10 @@ def bug708901(): %3d CALL 2 GET_ITER - >> FOR_ITER 2 (to 36) + >> FOR_ITER 2 (to 34) STORE_FAST 0 (res) -%3d JUMP_BACKWARD 4 (to 28) +%3d JUMP_BACKWARD 4 (to 26) %3d >> END_FOR RETURN_CONST 0 (None) @@ -227,6 +227,26 @@ def bug42562(): JUMP_FORWARD -4 (to 0) """ +def func_w_kwargs(a, b, **c): + pass + +def wrap_func_w_kwargs(): + func_w_kwargs(1, 2, c=5) + +dis_kw_names = """\ +%3d RESUME 0 + +%3d LOAD_GLOBAL 1 (NULL + func_w_kwargs) + LOAD_CONST 1 (1) + LOAD_CONST 2 (2) + LOAD_CONST 3 (5) + KW_NAMES 4 (('c',)) + CALL 3 + POP_TOP + RETURN_CONST 0 (None) +""" % (wrap_func_w_kwargs.__code__.co_firstlineno, + wrap_func_w_kwargs.__code__.co_firstlineno + 1) + _BIG_LINENO_FORMAT = """\ 1 RESUME 0 @@ -437,7 +457,7 @@ def _with(c): %3d >> PUSH_EXC_INFO WITH_EXCEPT_START - POP_JUMP_IF_TRUE 1 (to 44) + POP_JUMP_IF_TRUE 1 (to 42) RERAISE 2 >> POP_TOP POP_EXCEPT @@ -479,8 +499,7 @@ async def _asyncwith(c): YIELD_VALUE 2 RESUME 3 JUMP_BACKWARD_NO_INTERRUPT 5 (to 14) - >> SWAP 2 - POP_TOP + >> END_SEND POP_TOP %3d LOAD_CONST 1 (1) @@ -492,11 +511,11 @@ async def _asyncwith(c): CALL 2 GET_AWAITABLE 2 LOAD_CONST 0 (None) - >> SEND 3 (to 64) + >> SEND 3 (to 60) YIELD_VALUE 2 RESUME 3 - JUMP_BACKWARD_NO_INTERRUPT 5 (to 54) - >> POP_TOP + JUMP_BACKWARD_NO_INTERRUPT 5 (to 50) + >> END_SEND POP_TOP %3d LOAD_CONST 2 (2) @@ -504,21 +523,20 @@ async def _asyncwith(c): RETURN_CONST 0 (None) %3d >> CLEANUP_THROW - JUMP_BACKWARD 27 (to 24) + JUMP_BACKWARD 25 (to 24) >> CLEANUP_THROW - JUMP_BACKWARD 9 (to 64) + JUMP_BACKWARD 9 (to 60) >> PUSH_EXC_INFO WITH_EXCEPT_START GET_AWAITABLE 2 LOAD_CONST 0 (None) - >> SEND 4 (to 102) + >> SEND 4 (to 98) YIELD_VALUE 3 RESUME 3 - JUMP_BACKWARD_NO_INTERRUPT 5 (to 90) + JUMP_BACKWARD_NO_INTERRUPT 5 (to 86) >> CLEANUP_THROW - >> SWAP 2 - POP_TOP - POP_JUMP_IF_TRUE 1 (to 110) + >> END_SEND + POP_JUMP_IF_TRUE 1 (to 104) RERAISE 2 >> POP_TOP POP_EXCEPT @@ -739,14 +757,14 @@ def loop_test(): LOAD_CONST 2 (3) BINARY_OP 5 (*) GET_ITER - >> FOR_ITER_LIST 14 (to 48) + >> FOR_ITER_LIST 13 (to 46) STORE_FAST 0 (i) %3d LOAD_GLOBAL_MODULE 1 (NULL + load_test) LOAD_FAST 0 (i) CALL_PY_WITH_DEFAULTS 1 POP_TOP - JUMP_BACKWARD 16 (to 16) + JUMP_BACKWARD 15 (to 16) %3d >> END_FOR RETURN_CONST 0 (None) @@ -872,6 +890,13 @@ def do_disassembly_test(self, func, expected, with_offsets=False): self.maxDiff = None got = self.get_disassembly(func, depth=0) self.do_disassembly_compare(got, expected, with_offsets) + # Add checks for dis.disco + if hasattr(func, '__code__'): + got_disco = io.StringIO() + with contextlib.redirect_stdout(got_disco): + dis.disco(func.__code__) + self.do_disassembly_compare(got_disco.getvalue(), expected, + with_offsets) def test_opmap(self): self.assertEqual(dis.opmap["NOP"], 9) @@ -887,9 +912,9 @@ def test_boundaries(self): def test_widths(self): long_opcodes = set(['JUMP_BACKWARD_NO_INTERRUPT', - ]) + 'INSTRUMENTED_CALL_FUNCTION_EX']) for opcode, opname in enumerate(dis.opname): - if opname in long_opcodes: + if opname in long_opcodes or opname.startswith("INSTRUMENTED"): continue with self.subTest(opname=opname): width = dis._OPNAME_WIDTH @@ -922,6 +947,10 @@ def test_bug_46724(self): # Test that negative operargs are handled properly self.do_disassembly_test(bug46724, dis_bug46724) + def test_kw_names(self): + # Test that value is displayed for KW_NAMES + self.do_disassembly_test(wrap_func_w_kwargs, dis_kw_names) + def test_big_linenos(self): def func(count): namespace = {} @@ -1035,6 +1064,10 @@ def test_disassemble_try_finally(self): self.do_disassembly_test(_tryfinallyconst, dis_tryfinallyconst) def test_dis_none(self): + try: + del sys.last_exc + except AttributeError: + pass try: del sys.last_traceback except AttributeError: @@ -1052,7 +1085,7 @@ def test_dis_traceback(self): 1/0 except Exception as e: tb = e.__traceback__ - sys.last_traceback = tb + sys.last_exc = e tb_dis = self.get_disassemble_as_string(tb.tb_frame.f_code, tb.tb_lasti) self.do_disassembly_test(None, tb_dis, True) @@ -1074,6 +1107,13 @@ def check(expected, **kwargs): check(dis_nested_2, depth=None) check(dis_nested_2) + def test__try_compile_no_context_exc_on_error(self): + # see gh-102114 + try: + dis._try_compile(")", "") + except Exception as e: + self.assertIsNone(e.__context__) + @staticmethod def code_quicken(f, times=ADAPTIVE_WARMUP_DELAY): for _ in range(times): @@ -1113,7 +1153,7 @@ def test_binary_specialize(self): 1 2 LOAD_NAME 0 (a) 4 LOAD_CONST 0 (0) 6 %s - 16 RETURN_VALUE + 10 RETURN_VALUE """ co_list = compile('a[0]', "<list>", "eval") self.code_quicken(lambda: exec(co_list, {}, {'a': [0]})) @@ -1198,11 +1238,40 @@ def test_show_caches(self): caches = list(self.get_cached_values(quickened, adaptive)) for cache in caches: self.assertRegex(cache, pattern) - total_caches = 22 + total_caches = 20 empty_caches = 7 self.assertEqual(caches.count(""), empty_caches) self.assertEqual(len(caches), total_caches) + @cpython_only + def test_show_currinstr_with_cache(self): + """ + Make sure that with lasti pointing to CACHE, it still shows the current + line correctly + """ + def f(): + print(a) + # The code above should generate a LOAD_GLOBAL which has CACHE instr after + # However, this might change in the future. So we explicitly try to find + # a CACHE entry in the instructions. If we can't do that, fail the test + + for inst in dis.get_instructions(f, show_caches=True): + if inst.opname == "CACHE": + op_offset = inst.offset - 2 + cache_offset = inst.offset + break + else: + self.fail("Can't find a CACHE entry in the function provided to do the test") + + assem_op = self.get_disassembly(f.__code__, lasti=op_offset, wrapper=False) + assem_cache = self.get_disassembly(f.__code__, lasti=cache_offset, wrapper=False) + + # Make sure --> exists and points to the correct offset + self.assertRegex(assem_op, fr"-->\s+{op_offset}") + # Make sure when lasti points to cache, it shows the same disassembly + self.assertEqual(assem_op, assem_cache) + + class DisWithFileTests(DisTests): # Run the tests again, using the file arg instead of print @@ -1500,9 +1569,9 @@ def _prepare_test_cases(): Instruction(opname='BUILD_MAP', opcode=105, arg=0, argval=0, argrepr='', offset=40, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='Hello world!', argrepr="'Hello world!'", offset=42, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=7, argval=7, argrepr='', offset=44, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=54, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='f', argrepr='f', offset=56, starts_line=8, is_jump_target=False, positions=None), - Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=58, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=52, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='f', argrepr='f', offset=54, starts_line=8, is_jump_target=False, positions=None), + Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=56, starts_line=None, is_jump_target=False, positions=None), ] expected_opinfo_f = [ @@ -1525,9 +1594,9 @@ def _prepare_test_cases(): Instruction(opname='LOAD_DEREF', opcode=137, arg=0, argval='c', argrepr='c', offset=40, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_DEREF', opcode=137, arg=1, argval='d', argrepr='d', offset=42, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=4, argval=4, argrepr='', offset=44, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=54, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='inner', argrepr='inner', offset=56, starts_line=6, is_jump_target=False, positions=None), - Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=58, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=52, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='inner', argrepr='inner', offset=54, starts_line=6, is_jump_target=False, positions=None), + Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=56, starts_line=None, is_jump_target=False, positions=None), ] expected_opinfo_inner = [ @@ -1541,8 +1610,8 @@ def _prepare_test_cases(): Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='e', argrepr='e', offset=22, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_FAST', opcode=124, arg=1, argval='f', argrepr='f', offset=24, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=6, argval=6, argrepr='', offset=26, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=36, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RETURN_CONST', opcode=121, arg=0, argval=None, argrepr='None', offset=38, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=34, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RETURN_CONST', opcode=121, arg=0, argval=None, argrepr='None', offset=36, starts_line=None, is_jump_target=False, positions=None), ] expected_opinfo_jumpy = [ @@ -1550,115 +1619,115 @@ def _prepare_test_cases(): Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='range', argrepr='NULL + range', offset=2, starts_line=3, is_jump_target=False, positions=None), Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=10, argrepr='10', offset=12, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=14, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='GET_ITER', opcode=68, arg=None, argval=None, argrepr='', offset=24, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='FOR_ITER', opcode=93, arg=27, argval=84, argrepr='to 84', offset=26, starts_line=None, is_jump_target=True, positions=None), - Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=30, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=32, starts_line=4, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=42, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=44, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=54, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=56, starts_line=5, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=58, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COMPARE_AND_BRANCH', opcode=141, arg=13, argval='<', argrepr='<', offset=60, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=68, argrepr='to 68', offset=64, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=21, argval=26, argrepr='to 26', offset=66, starts_line=6, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=68, starts_line=7, is_jump_target=True, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=70, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COMPARE_AND_BRANCH', opcode=141, arg=68, argval='>', argrepr='>', offset=72, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_TRUE', opcode=115, arg=1, argval=80, argrepr='to 80', offset=76, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=27, argval=26, argrepr='to 26', offset=78, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=80, starts_line=8, is_jump_target=True, positions=None), - Instruction(opname='JUMP_FORWARD', opcode=110, arg=13, argval=110, argrepr='to 110', offset=82, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='END_FOR', opcode=4, arg=None, argval=None, argrepr='', offset=84, starts_line=3, is_jump_target=True, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=86, starts_line=10, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=96, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=98, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=108, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST_CHECK', opcode=127, arg=0, argval='i', argrepr='i', offset=110, starts_line=11, is_jump_target=True, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=32, argval=178, argrepr='to 178', offset=112, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=114, starts_line=12, is_jump_target=True, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=124, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=126, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=136, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=138, starts_line=13, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=140, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='BINARY_OP', opcode=122, arg=23, argval=23, argrepr='-=', offset=142, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=146, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=148, starts_line=14, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=150, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COMPARE_AND_BRANCH', opcode=141, arg=75, argval='>', argrepr='>', offset=152, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=160, argrepr='to 160', offset=156, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=25, argval=110, argrepr='to 110', offset=158, starts_line=15, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=160, starts_line=16, is_jump_target=True, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=162, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COMPARE_AND_BRANCH', opcode=141, arg=13, argval='<', argrepr='<', offset=164, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=172, argrepr='to 172', offset=168, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_FORWARD', opcode=110, arg=15, argval=202, argrepr='to 202', offset=170, starts_line=17, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=172, starts_line=11, is_jump_target=True, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=178, argrepr='to 178', offset=174, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=32, argval=114, argrepr='to 114', offset=176, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=178, starts_line=19, is_jump_target=True, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=188, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=190, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=200, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='NOP', opcode=9, arg=None, argval=None, argrepr='', offset=202, starts_line=20, is_jump_target=True, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=204, starts_line=21, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval=0, argrepr='0', offset=206, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='BINARY_OP', opcode=122, arg=11, argval=11, argrepr='/', offset=208, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=212, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=214, starts_line=25, is_jump_target=False, positions=None), - Instruction(opname='BEFORE_WITH', opcode=53, arg=None, argval=None, argrepr='', offset=216, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='STORE_FAST', opcode=125, arg=1, argval='dodgy', argrepr='dodgy', offset=218, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=220, starts_line=26, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=8, argval='Never reach this', argrepr="'Never reach this'", offset=230, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=232, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=242, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=244, starts_line=25, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=246, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=248, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=2, argval=2, argrepr='', offset=250, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=260, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=262, starts_line=28, is_jump_target=True, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=272, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=274, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='GET_ITER', opcode=68, arg=None, argval=None, argrepr='', offset=22, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='FOR_ITER', opcode=93, arg=26, argval=80, argrepr='to 80', offset=24, starts_line=None, is_jump_target=True, positions=None), + Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=28, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=30, starts_line=4, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=40, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=42, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=50, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=52, starts_line=5, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=54, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COMPARE_OP', opcode=107, arg=2, argval='<', argrepr='<', offset=56, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=64, argrepr='to 64', offset=60, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=20, argval=24, argrepr='to 24', offset=62, starts_line=6, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=64, starts_line=7, is_jump_target=True, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=66, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COMPARE_OP', opcode=107, arg=68, argval='>', argrepr='>', offset=68, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_TRUE', opcode=115, arg=1, argval=76, argrepr='to 76', offset=72, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=26, argval=24, argrepr='to 24', offset=74, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=76, starts_line=8, is_jump_target=True, positions=None), + Instruction(opname='JUMP_FORWARD', opcode=110, arg=12, argval=104, argrepr='to 104', offset=78, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='END_FOR', opcode=4, arg=None, argval=None, argrepr='', offset=80, starts_line=3, is_jump_target=True, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=82, starts_line=10, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=92, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=94, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=102, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST_CHECK', opcode=127, arg=0, argval='i', argrepr='i', offset=104, starts_line=11, is_jump_target=True, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=31, argval=170, argrepr='to 170', offset=106, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=108, starts_line=12, is_jump_target=True, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=118, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=120, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=128, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=130, starts_line=13, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=132, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='BINARY_OP', opcode=122, arg=23, argval=23, argrepr='-=', offset=134, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=138, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=140, starts_line=14, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=142, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COMPARE_OP', opcode=107, arg=68, argval='>', argrepr='>', offset=144, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=152, argrepr='to 152', offset=148, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=24, argval=104, argrepr='to 104', offset=150, starts_line=15, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=152, starts_line=16, is_jump_target=True, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=154, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COMPARE_OP', opcode=107, arg=2, argval='<', argrepr='<', offset=156, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=164, argrepr='to 164', offset=160, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_FORWARD', opcode=110, arg=14, argval=192, argrepr='to 192', offset=162, starts_line=17, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=164, starts_line=11, is_jump_target=True, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=1, argval=170, argrepr='to 170', offset=166, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=31, argval=108, argrepr='to 108', offset=168, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=170, starts_line=19, is_jump_target=True, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=180, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=182, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=190, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='NOP', opcode=9, arg=None, argval=None, argrepr='', offset=192, starts_line=20, is_jump_target=True, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=194, starts_line=21, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval=0, argrepr='0', offset=196, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='BINARY_OP', opcode=122, arg=11, argval=11, argrepr='/', offset=198, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=202, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=204, starts_line=25, is_jump_target=False, positions=None), + Instruction(opname='BEFORE_WITH', opcode=53, arg=None, argval=None, argrepr='', offset=206, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='STORE_FAST', opcode=125, arg=1, argval='dodgy', argrepr='dodgy', offset=208, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=210, starts_line=26, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=8, argval='Never reach this', argrepr="'Never reach this'", offset=220, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=222, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=230, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=232, starts_line=25, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=234, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=236, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=2, argval=2, argrepr='', offset=238, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=246, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=248, starts_line=28, is_jump_target=True, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=258, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=260, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=268, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RETURN_CONST', opcode=121, arg=0, argval=None, argrepr='None', offset=270, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=272, starts_line=25, is_jump_target=False, positions=None), + Instruction(opname='WITH_EXCEPT_START', opcode=49, arg=None, argval=None, argrepr='', offset=274, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_TRUE', opcode=115, arg=1, argval=280, argrepr='to 280', offset=276, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=2, argval=2, argrepr='', offset=278, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=280, starts_line=None, is_jump_target=True, positions=None), + Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=282, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=284, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RETURN_CONST', opcode=121, arg=0, argval=None, argrepr='None', offset=286, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=288, starts_line=25, is_jump_target=False, positions=None), - Instruction(opname='WITH_EXCEPT_START', opcode=49, arg=None, argval=None, argrepr='', offset=290, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_TRUE', opcode=115, arg=1, argval=296, argrepr='to 296', offset=292, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=2, argval=2, argrepr='', offset=294, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=296, starts_line=None, is_jump_target=True, positions=None), - Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=298, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=300, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=302, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=22, argval=262, argrepr='to 262', offset=304, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=306, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=308, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=310, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=312, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=314, starts_line=22, is_jump_target=False, positions=None), - Instruction(opname='CHECK_EXC_MATCH', opcode=36, arg=None, argval=None, argrepr='', offset=324, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=15, argval=358, argrepr='to 358', offset=326, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=328, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=330, starts_line=23, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=9, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=340, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=342, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=352, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=354, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=140, arg=48, argval=262, argrepr='to 262', offset=356, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=0, argval=0, argrepr='', offset=358, starts_line=22, is_jump_target=True, positions=None), - Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=360, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=362, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=364, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=366, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=368, starts_line=28, is_jump_target=False, positions=None), - Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=378, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=380, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=390, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=0, argval=0, argrepr='', offset=392, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=394, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=396, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=398, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=286, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=21, argval=248, argrepr='to 248', offset=288, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=290, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=292, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=294, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=296, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=298, starts_line=22, is_jump_target=False, positions=None), + Instruction(opname='CHECK_EXC_MATCH', opcode=36, arg=None, argval=None, argrepr='', offset=308, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=14, argval=340, argrepr='to 340', offset=310, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=312, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=314, starts_line=23, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=9, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=324, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=326, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=334, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=336, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='JUMP_BACKWARD', opcode=140, arg=46, argval=248, argrepr='to 248', offset=338, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=0, argval=0, argrepr='', offset=340, starts_line=22, is_jump_target=True, positions=None), + Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=342, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=344, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=346, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=348, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=350, starts_line=28, is_jump_target=False, positions=None), + Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=360, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=362, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=370, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=0, argval=0, argrepr='', offset=372, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='COPY', opcode=120, arg=3, argval=3, argrepr='', offset=374, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=376, starts_line=None, is_jump_target=False, positions=None), + Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=378, starts_line=None, is_jump_target=False, positions=None), ] # One last piece of inspect fodder to check the default line number handling @@ -1906,9 +1975,21 @@ def test_findlabels(self): self.assertEqual(sorted(labels), sorted(jumps)) + def test_findlinestarts(self): + def func(): + pass + + code = func.__code__ + offsets = [linestart[0] for linestart in dis.findlinestarts(code)] + self.assertEqual(offsets, [0, 2]) + class TestDisTraceback(DisTestBase): def setUp(self) -> None: + try: # We need to clean up existing tracebacks + del sys.last_exc + except AttributeError: + pass try: # We need to clean up existing tracebacks del sys.last_traceback except AttributeError: diff --git a/Lib/test/test_email/test_utils.py b/Lib/test/test_email/test_utils.py index 78afb358035e81..25fa48c5ee217b 100644 --- a/Lib/test/test_email/test_utils.py +++ b/Lib/test/test_email/test_utils.py @@ -83,14 +83,14 @@ def test_localtime_is_tz_aware_daylight_false(self): def test_localtime_daylight_true_dst_false(self): test.support.patch(self, time, 'daylight', True) t0 = datetime.datetime(2012, 3, 12, 1, 1) - t1 = utils.localtime(t0, isdst=-1) + t1 = utils.localtime(t0) t2 = utils.localtime(t1) self.assertEqual(t1, t2) def test_localtime_daylight_false_dst_false(self): test.support.patch(self, time, 'daylight', False) t0 = datetime.datetime(2012, 3, 12, 1, 1) - t1 = utils.localtime(t0, isdst=-1) + t1 = utils.localtime(t0) t2 = utils.localtime(t1) self.assertEqual(t1, t2) @@ -98,7 +98,7 @@ def test_localtime_daylight_false_dst_false(self): def test_localtime_daylight_true_dst_true(self): test.support.patch(self, time, 'daylight', True) t0 = datetime.datetime(2012, 3, 12, 1, 1) - t1 = utils.localtime(t0, isdst=1) + t1 = utils.localtime(t0) t2 = utils.localtime(t1) self.assertEqual(t1, t2) @@ -106,7 +106,7 @@ def test_localtime_daylight_true_dst_true(self): def test_localtime_daylight_false_dst_true(self): test.support.patch(self, time, 'daylight', False) t0 = datetime.datetime(2012, 3, 12, 1, 1) - t1 = utils.localtime(t0, isdst=1) + t1 = utils.localtime(t0) t2 = utils.localtime(t1) self.assertEqual(t1, t2) @@ -157,6 +157,11 @@ def test_variable_tzname(self): t1 = utils.localtime(t0) self.assertEqual(t1.tzname(), 'EET') + def test_isdst_deprecation(self): + with self.assertWarns(DeprecationWarning): + t0 = datetime.datetime(1990, 1, 1) + t1 = utils.localtime(t0, isdst=True) + # Issue #24836: The timezone files are out of date (pre 2011k) # on Mac OS X Snow Leopard. @test.support.requires_mac_ver(10, 7) diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index e56d0db8627e91..c9691bbf304915 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -110,7 +110,7 @@ def run_embedded_interpreter(self, *args, env=None, print(f"--- {cmd} failed ---") print(f"stdout:\n{out}") print(f"stderr:\n{err}") - print(f"------") + print("------") self.assertEqual(p.returncode, returncode, "bad returncode %d, stderr is %r" % @@ -1656,6 +1656,7 @@ def test_init_use_frozen_modules(self): api=API_PYTHON, env=env) def test_init_main_interpreter_settings(self): + OBMALLOC = 1<<5 EXTENSIONS = 1<<8 THREADS = 1<<10 DAEMON_THREADS = 1<<11 @@ -1664,7 +1665,7 @@ def test_init_main_interpreter_settings(self): expected = { # All optional features should be enabled. 'feature_flags': - FORK | EXEC | THREADS | DAEMON_THREADS, + OBMALLOC | FORK | EXEC | THREADS | DAEMON_THREADS, } out, err = self.run_embedded_interpreter( 'test_init_main_interpreter_settings', diff --git a/Lib/test/test_ensurepip.py b/Lib/test/test_ensurepip.py index bfca0cd7fbe483..69ab2a4feaa938 100644 --- a/Lib/test/test_ensurepip.py +++ b/Lib/test/test_ensurepip.py @@ -20,7 +20,6 @@ def test_version(self): # Test version() with tempfile.TemporaryDirectory() as tmpdir: self.touch(tmpdir, "pip-1.2.3b1-py2.py3-none-any.whl") - self.touch(tmpdir, "setuptools-49.1.3-py3-none-any.whl") with (unittest.mock.patch.object(ensurepip, '_PACKAGES', None), unittest.mock.patch.object(ensurepip, '_WHEEL_PKG_DIR', tmpdir)): self.assertEqual(ensurepip.version(), '1.2.3b1') @@ -36,15 +35,12 @@ def test_get_packages_no_dir(self): # use bundled wheel packages self.assertIsNotNone(packages['pip'].wheel_name) - self.assertIsNotNone(packages['setuptools'].wheel_name) def test_get_packages_with_dir(self): # Test _get_packages() with a wheel package directory - setuptools_filename = "setuptools-49.1.3-py3-none-any.whl" pip_filename = "pip-20.2.2-py2.py3-none-any.whl" with tempfile.TemporaryDirectory() as tmpdir: - self.touch(tmpdir, setuptools_filename) self.touch(tmpdir, pip_filename) # not used, make sure that it's ignored self.touch(tmpdir, "wheel-0.34.2-py2.py3-none-any.whl") @@ -53,15 +49,12 @@ def test_get_packages_with_dir(self): unittest.mock.patch.object(ensurepip, '_WHEEL_PKG_DIR', tmpdir)): packages = ensurepip._get_packages() - self.assertEqual(packages['setuptools'].version, '49.1.3') - self.assertEqual(packages['setuptools'].wheel_path, - os.path.join(tmpdir, setuptools_filename)) self.assertEqual(packages['pip'].version, '20.2.2') self.assertEqual(packages['pip'].wheel_path, os.path.join(tmpdir, pip_filename)) # wheel package is ignored - self.assertEqual(sorted(packages), ['pip', 'setuptools']) + self.assertEqual(sorted(packages), ['pip']) class EnsurepipMixin: @@ -92,13 +85,13 @@ def test_basic_bootstrapping(self): self.run_pip.assert_called_once_with( [ "install", "--no-cache-dir", "--no-index", "--find-links", - unittest.mock.ANY, "setuptools", "pip", + unittest.mock.ANY, "pip", ], unittest.mock.ANY, ) additional_paths = self.run_pip.call_args[0][1] - self.assertEqual(len(additional_paths), 2) + self.assertEqual(len(additional_paths), 1) def test_bootstrapping_with_root(self): ensurepip.bootstrap(root="/foo/bar/") @@ -107,7 +100,7 @@ def test_bootstrapping_with_root(self): [ "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "--root", "/foo/bar/", - "setuptools", "pip", + "pip", ], unittest.mock.ANY, ) @@ -118,7 +111,7 @@ def test_bootstrapping_with_user(self): self.run_pip.assert_called_once_with( [ "install", "--no-cache-dir", "--no-index", "--find-links", - unittest.mock.ANY, "--user", "setuptools", "pip", + unittest.mock.ANY, "--user", "pip", ], unittest.mock.ANY, ) @@ -129,7 +122,7 @@ def test_bootstrapping_with_upgrade(self): self.run_pip.assert_called_once_with( [ "install", "--no-cache-dir", "--no-index", "--find-links", - unittest.mock.ANY, "--upgrade", "setuptools", "pip", + unittest.mock.ANY, "--upgrade", "pip", ], unittest.mock.ANY, ) @@ -140,7 +133,7 @@ def test_bootstrapping_with_verbosity_1(self): self.run_pip.assert_called_once_with( [ "install", "--no-cache-dir", "--no-index", "--find-links", - unittest.mock.ANY, "-v", "setuptools", "pip", + unittest.mock.ANY, "-v", "pip", ], unittest.mock.ANY, ) @@ -151,7 +144,7 @@ def test_bootstrapping_with_verbosity_2(self): self.run_pip.assert_called_once_with( [ "install", "--no-cache-dir", "--no-index", "--find-links", - unittest.mock.ANY, "-vv", "setuptools", "pip", + unittest.mock.ANY, "-vv", "pip", ], unittest.mock.ANY, ) @@ -162,7 +155,7 @@ def test_bootstrapping_with_verbosity_3(self): self.run_pip.assert_called_once_with( [ "install", "--no-cache-dir", "--no-index", "--find-links", - unittest.mock.ANY, "-vvv", "setuptools", "pip", + unittest.mock.ANY, "-vvv", "pip", ], unittest.mock.ANY, ) @@ -239,7 +232,6 @@ def test_uninstall(self): self.run_pip.assert_called_once_with( [ "uninstall", "-y", "--disable-pip-version-check", "pip", - "setuptools", ] ) @@ -250,7 +242,6 @@ def test_uninstall_with_verbosity_1(self): self.run_pip.assert_called_once_with( [ "uninstall", "-y", "--disable-pip-version-check", "-v", "pip", - "setuptools", ] ) @@ -261,7 +252,6 @@ def test_uninstall_with_verbosity_2(self): self.run_pip.assert_called_once_with( [ "uninstall", "-y", "--disable-pip-version-check", "-vv", "pip", - "setuptools", ] ) @@ -272,7 +262,7 @@ def test_uninstall_with_verbosity_3(self): self.run_pip.assert_called_once_with( [ "uninstall", "-y", "--disable-pip-version-check", "-vvv", - "pip", "setuptools", + "pip" ] ) @@ -312,13 +302,13 @@ def test_basic_bootstrapping(self): self.run_pip.assert_called_once_with( [ "install", "--no-cache-dir", "--no-index", "--find-links", - unittest.mock.ANY, "setuptools", "pip", + unittest.mock.ANY, "pip", ], unittest.mock.ANY, ) additional_paths = self.run_pip.call_args[0][1] - self.assertEqual(len(additional_paths), 2) + self.assertEqual(len(additional_paths), 1) self.assertEqual(exit_code, 0) def test_bootstrapping_error_code(self): @@ -344,7 +334,6 @@ def test_basic_uninstall(self): self.run_pip.assert_called_once_with( [ "uninstall", "-y", "--disable-pip-version-check", "pip", - "setuptools", ] ) diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py index a11bb441f06e8e..fb7a016c9007f8 100644 --- a/Lib/test/test_enum.py +++ b/Lib/test/test_enum.py @@ -11,7 +11,7 @@ import builtins as bltns from collections import OrderedDict from datetime import date -from enum import Enum, IntEnum, StrEnum, EnumType, Flag, IntFlag, unique, auto +from enum import Enum, EnumMeta, IntEnum, StrEnum, EnumType, Flag, IntFlag, unique, auto from enum import STRICT, CONFORM, EJECT, KEEP, _simple_enum, _test_simple_enum from enum import verify, UNIQUE, CONTINUOUS, NAMED_FLAGS, ReprEnum from enum import member, nonmember, _iter_bits_lsb @@ -270,6 +270,17 @@ class NewSubEnum(NewBaseEnum): first = auto() self.NewSubEnum = NewSubEnum # + class LazyGNV(self.enum_type): + def _generate_next_value_(name, start, last, values): + pass + self.LazyGNV = LazyGNV + # + class BusyGNV(self.enum_type): + @staticmethod + def _generate_next_value_(name, start, last, values): + pass + self.BusyGNV = BusyGNV + # self.is_flag = False self.names = ['first', 'second', 'third'] if issubclass(MainEnum, StrEnum): @@ -466,6 +477,12 @@ def test_enum_in_enum_out(self): Main = self.MainEnum self.assertIs(Main(Main.first), Main.first) + def test_gnv_is_static(self): + lazy = self.LazyGNV + busy = self.BusyGNV + self.assertTrue(type(lazy.__dict__['_generate_next_value_']) is staticmethod) + self.assertTrue(type(busy.__dict__['_generate_next_value_']) is staticmethod) + def test_hash(self): MainEnum = self.MainEnum mapping = {} @@ -644,6 +661,13 @@ class MySubEnum(MyEnum): theother = auto() self.assertEqual(repr(MySubEnum.that), "My name is that.") + def test_multiple_superclasses_repr(self): + class _EnumSuperClass(metaclass=EnumMeta): + pass + class E(_EnumSuperClass, Enum): + A = 1 + self.assertEqual(repr(E.A), "<E.A: 1>") + def test_reversed_iteration_order(self): self.assertEqual( list(reversed(self.MainEnum)), @@ -795,10 +819,27 @@ class TestPlainFlag(_EnumTests, _PlainOutputTests, _FlagTests, unittest.TestCase class TestIntEnum(_EnumTests, _MinimalOutputTests, unittest.TestCase): enum_type = IntEnum + # + def test_shadowed_attr(self): + class Number(IntEnum): + divisor = 1 + numerator = 2 + # + self.assertEqual(Number.divisor.numerator, 1) + self.assertIs(Number.numerator.divisor, Number.divisor) class TestStrEnum(_EnumTests, _MinimalOutputTests, unittest.TestCase): enum_type = StrEnum + # + def test_shadowed_attr(self): + class Book(StrEnum): + author = 'author' + title = 'title' + # + self.assertEqual(Book.author.title(), 'Author') + self.assertEqual(Book.title.title(), 'Title') + self.assertIs(Book.title.author, Book.author) class TestIntFlag(_EnumTests, _MinimalOutputTests, _FlagTests, unittest.TestCase): @@ -1362,7 +1403,6 @@ def repr(self): class Huh(MyStr, MyInt, Enum): One = 1 - def test_pickle_enum(self): if isinstance(Stooges, Exception): raise Stooges @@ -2663,28 +2703,15 @@ class Private(Enum): self.assertEqual(Private._Private__corporal, 'Radar') self.assertEqual(Private._Private__major_, 'Hoolihan') - @unittest.skipIf( - python_version <= (3, 13), - 'member.member access currently deprecated', - ) - def test_exception_for_member_from_member_access(self): - with self.assertRaisesRegex(AttributeError, "<enum .Di.> member has no attribute .NO."): - class Di(Enum): - YES = 1 - NO = 0 - nope = Di.YES.NO - - @unittest.skipIf( - python_version > (3, 13), - 'member.member access now raises', - ) - def test_warning_for_member_from_member_access(self): - with self.assertWarnsRegex(DeprecationWarning, '`member.member` access .* is deprecated and will be removed in 3.14'): - class Di(Enum): - YES = 1 - NO = 0 - warn = Di.YES.NO + def test_member_from_member_access(self): + class Di(Enum): + YES = 1 + NO = 0 + name = 3 + warn = Di.YES.NO self.assertIs(warn, Di.NO) + self.assertIs(Di.name, Di['name']) + self.assertEqual(Di.name.name, 'name') def test_dynamic_members_with_static_methods(self): # @@ -2727,10 +2754,10 @@ def __repr__(self): return 'ha hah!' class Entries(Foo, Enum): ENTRY1 = 1 + self.assertEqual(repr(Entries.ENTRY1), '<Entries.ENTRY1: ha hah!>') + self.assertTrue(Entries.ENTRY1.value == Foo(1), Entries.ENTRY1.value) self.assertTrue(isinstance(Entries.ENTRY1, Foo)) self.assertTrue(Entries._member_type_ is Foo, Entries._member_type_) - self.assertTrue(Entries.ENTRY1.value == Foo(1), Entries.ENTRY1.value) - self.assertEqual(repr(Entries.ENTRY1), '<Entries.ENTRY1: ha hah!>') # # check auto-generated dataclass __repr__ is not used # @@ -2777,8 +2804,7 @@ class Creature(CreatureDataMixin, Enum): DOG = ('medium', 4) self.assertRegex(repr(Creature.DOG), "<Creature.DOG: .*CreatureDataMixin object at .*>") - def test_repr_with_init_data_type_mixin(self): - # non-data_type is a mixin that doesn't define __new__ + def test_repr_with_init_mixin(self): class Foo: def __init__(self, a): self.a = a @@ -2787,9 +2813,9 @@ def __repr__(self): class Entries(Foo, Enum): ENTRY1 = 1 # - self.assertEqual(repr(Entries.ENTRY1), '<Entries.ENTRY1: Foo(a=1)>') + self.assertEqual(repr(Entries.ENTRY1), 'Foo(a=1)') - def test_repr_and_str_with_non_data_type_mixin(self): + def test_repr_and_str_with_no_init_mixin(self): # non-data_type is a mixin that doesn't define __new__ class Foo: def __repr__(self): @@ -2863,6 +2889,8 @@ def __new__(cls, c): # a = ord('a') # + self.assertEqual(FlagFromChar._all_bits_, 316912650057057350374175801343) + self.assertEqual(FlagFromChar._flag_mask_, 158456325028528675187087900672) self.assertEqual(FlagFromChar.a, 158456325028528675187087900672) self.assertEqual(FlagFromChar.a|1, 158456325028528675187087900673) # @@ -2877,6 +2905,8 @@ def __new__(cls, c): a = ord('a') z = 1 # + self.assertEqual(FlagFromChar._all_bits_, 316912650057057350374175801343) + self.assertEqual(FlagFromChar._flag_mask_, 158456325028528675187087900674) self.assertEqual(FlagFromChar.a.value, 158456325028528675187087900672) self.assertEqual((FlagFromChar.a|FlagFromChar.z).value, 158456325028528675187087900674) # @@ -2890,9 +2920,33 @@ def __new__(cls, c): # a = ord('a') # + self.assertEqual(FlagFromChar._all_bits_, 316912650057057350374175801343) + self.assertEqual(FlagFromChar._flag_mask_, 158456325028528675187087900672) self.assertEqual(FlagFromChar.a, 158456325028528675187087900672) self.assertEqual(FlagFromChar.a|1, 158456325028528675187087900673) + def test_init_exception(self): + class Base: + def __new__(cls, *args): + return object.__new__(cls) + def __init__(self, x): + raise ValueError("I don't like", x) + with self.assertRaises(TypeError): + class MyEnum(Base, enum.Enum): + A = 'a' + def __init__(self, y): + self.y = y + with self.assertRaises(ValueError): + class MyEnum(Base, enum.Enum): + A = 'a' + def __init__(self, y): + self.y = y + def __new__(cls, value): + member = Base.__new__(cls) + member._value_ = Base(value) + return member + + class TestOrder(unittest.TestCase): "test usage of the `_order_` attribute" @@ -3047,18 +3101,18 @@ def test_bool(self): self.assertEqual(bool(f.value), bool(f)) def test_boundary(self): - self.assertIs(enum.Flag._boundary_, CONFORM) - class Iron(Flag, boundary=STRICT): + self.assertIs(enum.Flag._boundary_, STRICT) + class Iron(Flag, boundary=CONFORM): ONE = 1 TWO = 2 EIGHT = 8 - self.assertIs(Iron._boundary_, STRICT) + self.assertIs(Iron._boundary_, CONFORM) # - class Water(Flag, boundary=CONFORM): + class Water(Flag, boundary=STRICT): ONE = 1 TWO = 2 EIGHT = 8 - self.assertIs(Water._boundary_, CONFORM) + self.assertIs(Water._boundary_, STRICT) # class Space(Flag, boundary=EJECT): ONE = 1 @@ -3071,10 +3125,10 @@ class Bizarre(Flag, boundary=KEEP): c = 4 d = 6 # - self.assertRaisesRegex(ValueError, 'invalid value 7', Iron, 7) + self.assertRaisesRegex(ValueError, 'invalid value 7', Water, 7) # - self.assertIs(Water(7), Water.ONE|Water.TWO) - self.assertIs(Water(~9), Water.TWO) + self.assertIs(Iron(7), Iron.ONE|Iron.TWO) + self.assertIs(Iron(~9), Iron.TWO) # self.assertEqual(Space(7), 7) self.assertTrue(type(Space(7)) is int) @@ -3082,6 +3136,31 @@ class Bizarre(Flag, boundary=KEEP): self.assertEqual(list(Bizarre), [Bizarre.c]) self.assertIs(Bizarre(3), Bizarre.b) self.assertIs(Bizarre(6), Bizarre.d) + # + class SkipFlag(enum.Flag): + A = 1 + B = 2 + C = 4 | B + # + self.assertTrue(SkipFlag.C in (SkipFlag.A|SkipFlag.C)) + self.assertRaisesRegex(ValueError, 'SkipFlag.. invalid value 42', SkipFlag, 42) + # + class SkipIntFlag(enum.IntFlag): + A = 1 + B = 2 + C = 4 | B + # + self.assertTrue(SkipIntFlag.C in (SkipIntFlag.A|SkipIntFlag.C)) + self.assertEqual(SkipIntFlag(42).value, 42) + # + class MethodHint(Flag): + HiddenText = 0x10 + DigitsOnly = 0x01 + LettersOnly = 0x02 + OnlyMask = 0x0f + # + self.assertEqual(str(MethodHint.HiddenText|MethodHint.OnlyMask), 'MethodHint.HiddenText|DigitsOnly|LettersOnly|OnlyMask') + def test_iter(self): Color = self.Color diff --git a/Lib/test/test_eof.py b/Lib/test/test_eof.py index abcbf046e2cc22..be4fd73bfdc36b 100644 --- a/Lib/test/test_eof.py +++ b/Lib/test/test_eof.py @@ -4,6 +4,7 @@ from test import support from test.support import os_helper from test.support import script_helper +from test.support import warnings_helper import unittest class EOFTestCase(unittest.TestCase): @@ -36,10 +37,11 @@ def test_EOFS_with_file(self): rc, out, err = script_helper.assert_python_failure(file_name) self.assertIn(b'unterminated triple-quoted string literal (detected at line 3)', err) + @warnings_helper.ignore_warnings(category=SyntaxWarning) def test_eof_with_line_continuation(self): expect = "unexpected EOF while parsing (<string>, line 1)" try: - compile('"\\xhh" \\', '<string>', 'exec', dont_inherit=True) + compile('"\\Xhh" \\', '<string>', 'exec') except SyntaxError as msg: self.assertEqual(str(msg), expect) else: diff --git a/Lib/test/test_except_star.py b/Lib/test/test_except_star.py index c5167c5bba38af..c49c6008e08e8c 100644 --- a/Lib/test/test_except_star.py +++ b/Lib/test/test_except_star.py @@ -1,6 +1,7 @@ import sys import unittest import textwrap +from test.support.testcase import ExceptionIsLikeMixin class TestInvalidExceptStar(unittest.TestCase): def test_mixed_except_and_except_star_is_syntax_error(self): @@ -169,26 +170,7 @@ def f(x): self.assertIsInstance(exc, ExceptionGroup) -class ExceptStarTest(unittest.TestCase): - def assertExceptionIsLike(self, exc, template): - if exc is None and template is None: - return - - if template is None: - self.fail(f"unexpected exception: {exc}") - - if exc is None: - self.fail(f"expected an exception like {template!r}, got None") - - if not isinstance(exc, ExceptionGroup): - self.assertEqual(exc.__class__, template.__class__) - self.assertEqual(exc.args[0], template.args[0]) - else: - self.assertEqual(exc.message, template.message) - self.assertEqual(len(exc.exceptions), len(template.exceptions)) - for e, t in zip(exc.exceptions, template.exceptions): - self.assertExceptionIsLike(e, t) - +class ExceptStarTest(ExceptionIsLikeMixin, unittest.TestCase): def assertMetadataEqual(self, e1, e2): if e1 is None or e2 is None: self.assertTrue(e1 is None and e2 is None) @@ -636,18 +618,17 @@ def test_raise_handle_all_raise_one_named(self): raise orig except* (TypeError, ValueError) as e: raise SyntaxError(3) - except BaseException as e: + except SyntaxError as e: exc = e - self.assertExceptionIsLike( - exc, ExceptionGroup("", [SyntaxError(3)])) + self.assertExceptionIsLike(exc, SyntaxError(3)) self.assertExceptionIsLike( - exc.exceptions[0].__context__, + exc.__context__, ExceptionGroup("eg", [TypeError(1), ValueError(2)])) self.assertMetadataNotEqual(orig, exc) - self.assertMetadataEqual(orig, exc.exceptions[0].__context__) + self.assertMetadataEqual(orig, exc.__context__) def test_raise_handle_all_raise_one_unnamed(self): orig = ExceptionGroup("eg", [TypeError(1), ValueError(2)]) @@ -656,18 +637,17 @@ def test_raise_handle_all_raise_one_unnamed(self): raise orig except* (TypeError, ValueError) as e: raise SyntaxError(3) - except ExceptionGroup as e: + except SyntaxError as e: exc = e - self.assertExceptionIsLike( - exc, ExceptionGroup("", [SyntaxError(3)])) + self.assertExceptionIsLike(exc, SyntaxError(3)) self.assertExceptionIsLike( - exc.exceptions[0].__context__, + exc.__context__, ExceptionGroup("eg", [TypeError(1), ValueError(2)])) self.assertMetadataNotEqual(orig, exc) - self.assertMetadataEqual(orig, exc.exceptions[0].__context__) + self.assertMetadataEqual(orig, exc.__context__) def test_raise_handle_all_raise_two_named(self): orig = ExceptionGroup("eg", [TypeError(1), ValueError(2)]) @@ -791,23 +771,22 @@ def test_raise_handle_all_raise_one_named(self): raise orig except* (TypeError, ValueError) as e: raise SyntaxError(3) from e - except BaseException as e: + except SyntaxError as e: exc = e - self.assertExceptionIsLike( - exc, ExceptionGroup("", [SyntaxError(3)])) + self.assertExceptionIsLike(exc, SyntaxError(3)) self.assertExceptionIsLike( - exc.exceptions[0].__context__, + exc.__context__, ExceptionGroup("eg", [TypeError(1), ValueError(2)])) self.assertExceptionIsLike( - exc.exceptions[0].__cause__, + exc.__cause__, ExceptionGroup("eg", [TypeError(1), ValueError(2)])) self.assertMetadataNotEqual(orig, exc) - self.assertMetadataEqual(orig, exc.exceptions[0].__context__) - self.assertMetadataEqual(orig, exc.exceptions[0].__cause__) + self.assertMetadataEqual(orig, exc.__context__) + self.assertMetadataEqual(orig, exc.__cause__) def test_raise_handle_all_raise_one_unnamed(self): orig = ExceptionGroup("eg", [TypeError(1), ValueError(2)]) @@ -817,23 +796,22 @@ def test_raise_handle_all_raise_one_unnamed(self): except* (TypeError, ValueError) as e: e = sys.exception() raise SyntaxError(3) from e - except ExceptionGroup as e: + except SyntaxError as e: exc = e - self.assertExceptionIsLike( - exc, ExceptionGroup("", [SyntaxError(3)])) + self.assertExceptionIsLike(exc, SyntaxError(3)) self.assertExceptionIsLike( - exc.exceptions[0].__context__, + exc.__context__, ExceptionGroup("eg", [TypeError(1), ValueError(2)])) self.assertExceptionIsLike( - exc.exceptions[0].__cause__, + exc.__cause__, ExceptionGroup("eg", [TypeError(1), ValueError(2)])) self.assertMetadataNotEqual(orig, exc) - self.assertMetadataEqual(orig, exc.exceptions[0].__context__) - self.assertMetadataEqual(orig, exc.exceptions[0].__cause__) + self.assertMetadataEqual(orig, exc.__context__) + self.assertMetadataEqual(orig, exc.__cause__) def test_raise_handle_all_raise_two_named(self): orig = ExceptionGroup("eg", [TypeError(1), ValueError(2)]) diff --git a/Lib/test/test_exception_group.py b/Lib/test/test_exception_group.py index b11524e778e665..fa159a76ec1aff 100644 --- a/Lib/test/test_exception_group.py +++ b/Lib/test/test_exception_group.py @@ -102,6 +102,20 @@ class MyEG(BaseExceptionGroup, ValueError): with self.assertRaisesRegex(TypeError, msg): MyEG("eg", [ValueError(12), KeyboardInterrupt(42)]) + def test_EG_and_specific_subclass_can_wrap_any_nonbase_exception(self): + class MyEG(ExceptionGroup, ValueError): + pass + + # The restriction is specific to Exception, not "the other base class" + MyEG("eg", [ValueError(12), Exception()]) + + def test_BEG_and_specific_subclass_can_wrap_any_nonbase_exception(self): + class MyEG(BaseExceptionGroup, ValueError): + pass + + # The restriction is specific to Exception, not "the other base class" + MyEG("eg", [ValueError(12), Exception()]) + def test_BEG_subclass_wraps_anything(self): class MyBEG(BaseExceptionGroup): diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index 4ae71e431c56dc..4ef7decfbc263e 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -155,6 +155,7 @@ def ckmsg(src, msg): ckmsg(s, "'continue' not properly in loop") ckmsg("continue\n", "'continue' not properly in loop") + ckmsg("f'{6 0}'", "invalid syntax. Perhaps you forgot a comma?") def testSyntaxErrorMissingParens(self): def ckmsg(src, msg, exception=SyntaxError): @@ -227,7 +228,7 @@ def testSyntaxErrorOffset(self): check('Python = "\u1e54\xfd\u0163\u0125\xf2\xf1" +', 1, 20) check(b'# -*- coding: cp1251 -*-\nPython = "\xcf\xb3\xf2\xee\xed" +', 2, 19, encoding='cp1251') - check(b'Python = "\xcf\xb3\xf2\xee\xed" +', 1, 18) + check(b'Python = "\xcf\xb3\xf2\xee\xed" +', 1, 10) check('x = "a', 1, 5) check('lambda x: x = 2', 1, 1) check('f{a + b + c}', 1, 2) @@ -334,8 +335,7 @@ def test_capi1(): try: _testcapi.raise_exception(BadException, 1) except TypeError as err: - exc, err, tb = sys.exc_info() - co = tb.tb_frame.f_code + co = err.__traceback__.tb_frame.f_code self.assertEqual(co.co_name, "test_capi1") self.assertTrue(co.co_filename.endswith('test_exceptions.py')) else: @@ -346,8 +346,7 @@ def test_capi2(): try: _testcapi.raise_exception(BadException, 0) except RuntimeError as err: - exc, err, tb = sys.exc_info() - tb = tb.tb_next + tb = err.__traceback__.tb_next co = tb.tb_frame.f_code self.assertEqual(co.co_name, "__init__") self.assertTrue(co.co_filename.endswith('test_exceptions.py')) @@ -599,8 +598,8 @@ def test_notes(self): def testWithTraceback(self): try: raise IndexError(4) - except: - tb = sys.exc_info()[2] + except Exception as e: + tb = e.__traceback__ e = BaseException().with_traceback(tb) self.assertIsInstance(e, BaseException) @@ -653,8 +652,8 @@ def test_invalid_delattr(self): def testNoneClearsTracebackAttr(self): try: raise IndexError(4) - except: - tb = sys.exc_info()[2] + except Exception as e: + tb = e.__traceback__ e = Exception() e.__traceback__ = tb @@ -888,28 +887,28 @@ def yield_raise(): try: raise KeyError("caught") except KeyError: - yield sys.exc_info()[0] - yield sys.exc_info()[0] - yield sys.exc_info()[0] + yield sys.exception() + yield sys.exception() + yield sys.exception() g = yield_raise() - self.assertEqual(next(g), KeyError) - self.assertEqual(sys.exc_info()[0], None) - self.assertEqual(next(g), KeyError) - self.assertEqual(sys.exc_info()[0], None) - self.assertEqual(next(g), None) + self.assertIsInstance(next(g), KeyError) + self.assertIsNone(sys.exception()) + self.assertIsInstance(next(g), KeyError) + self.assertIsNone(sys.exception()) + self.assertIsNone(next(g)) # Same test, but inside an exception handler try: raise TypeError("foo") except TypeError: g = yield_raise() - self.assertEqual(next(g), KeyError) - self.assertEqual(sys.exc_info()[0], TypeError) - self.assertEqual(next(g), KeyError) - self.assertEqual(sys.exc_info()[0], TypeError) - self.assertEqual(next(g), TypeError) + self.assertIsInstance(next(g), KeyError) + self.assertIsInstance(sys.exception(), TypeError) + self.assertIsInstance(next(g), KeyError) + self.assertIsInstance(sys.exception(), TypeError) + self.assertIsInstance(next(g), TypeError) del g - self.assertEqual(sys.exc_info()[0], TypeError) + self.assertIsInstance(sys.exception(), TypeError) def test_generator_leaking2(self): # See issue 12475. @@ -924,7 +923,7 @@ def g(): next(it) except StopIteration: pass - self.assertEqual(sys.exc_info(), (None, None, None)) + self.assertIsNone(sys.exception()) def test_generator_leaking3(self): # See issue #23353. When gen.throw() is called, the caller's @@ -933,17 +932,17 @@ def g(): try: yield except ZeroDivisionError: - yield sys.exc_info()[1] + yield sys.exception() it = g() next(it) try: 1/0 except ZeroDivisionError as e: - self.assertIs(sys.exc_info()[1], e) + self.assertIs(sys.exception(), e) gen_exc = it.throw(e) - self.assertIs(sys.exc_info()[1], e) + self.assertIs(sys.exception(), e) self.assertIs(gen_exc, e) - self.assertEqual(sys.exc_info(), (None, None, None)) + self.assertIsNone(sys.exception()) def test_generator_leaking4(self): # See issue #23353. When an exception is raised by a generator, @@ -952,7 +951,7 @@ def g(): try: 1/0 except ZeroDivisionError: - yield sys.exc_info()[0] + yield sys.exception() raise it = g() try: @@ -960,7 +959,7 @@ def g(): except TypeError: # The caller's exception state (TypeError) is temporarily # saved in the generator. - tp = next(it) + tp = type(next(it)) self.assertIs(tp, ZeroDivisionError) try: next(it) @@ -968,15 +967,15 @@ def g(): # with an exception, it shouldn't have restored the old # exception state (TypeError). except ZeroDivisionError as e: - self.assertIs(sys.exc_info()[1], e) + self.assertIs(sys.exception(), e) # We used to find TypeError here. - self.assertEqual(sys.exc_info(), (None, None, None)) + self.assertIsNone(sys.exception()) def test_generator_doesnt_retain_old_exc(self): def g(): - self.assertIsInstance(sys.exc_info()[1], RuntimeError) + self.assertIsInstance(sys.exception(), RuntimeError) yield - self.assertEqual(sys.exc_info(), (None, None, None)) + self.assertIsNone(sys.exception()) it = g() try: raise RuntimeError @@ -984,7 +983,7 @@ def g(): next(it) self.assertRaises(StopIteration, next, it) - def test_generator_finalizing_and_exc_info(self): + def test_generator_finalizing_and_sys_exception(self): # See #7173 def simple_gen(): yield 1 @@ -996,7 +995,7 @@ def run_gen(): return next(gen) run_gen() gc_collect() - self.assertEqual(sys.exc_info(), (None, None, None)) + self.assertIsNone(sys.exception()) def _check_generator_cleanup_exc_state(self, testfunc): # Issue #12791: exception state is cleaned up as soon as a generator @@ -1067,14 +1066,14 @@ def test_3114(self): class MyObject: def __del__(self): nonlocal e - e = sys.exc_info() + e = sys.exception() e = () try: raise Exception(MyObject()) except: pass gc_collect() # For PyPy or other GCs. - self.assertEqual(e, (None, None, None)) + self.assertIsNone(e) def test_raise_does_not_create_context_chain_cycle(self): class A(Exception): @@ -1135,7 +1134,6 @@ def cycle(): self.assertIsInstance(exc.__context__, ValueError) self.assertIs(exc.__context__.__context__, exc.__context__) - @unittest.skip("See issue 44895") def test_no_hang_on_context_chain_cycle2(self): # See issue 25782. Cycle at head of context chain. @@ -1337,11 +1335,11 @@ class MyException(Exception, metaclass=Meta): def g(): try: return g() - except RecursionError: - return sys.exc_info() - e, v, tb = g() - self.assertIsInstance(v, RecursionError, type(v)) - self.assertIn("maximum recursion depth exceeded", str(v)) + except RecursionError as e: + return e + exc = g() + self.assertIsInstance(exc, RecursionError, type(exc)) + self.assertIn("maximum recursion depth exceeded", str(exc)) @cpython_only @@ -1693,7 +1691,7 @@ def g(): raise ValueError except ValueError: yield 1 - self.assertEqual(sys.exc_info(), (None, None, None)) + self.assertIsNone(sys.exception()) yield 2 gen = g() diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py index 69b0d5f1c5a515..6fa49dbc0b730c 100644 --- a/Lib/test/test_format.py +++ b/Lib/test/test_format.py @@ -619,6 +619,8 @@ def test_specifier_z_error(self): error_msg = re.escape("unsupported format character 'z'") with self.assertRaisesRegex(ValueError, error_msg): "%z.1f" % 0 # not allowed in old style string interpolation + with self.assertRaisesRegex(ValueError, error_msg): + b"%z.1f" % 0 if __name__ == "__main__": diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index b3f6ef41d77b8f..5c5176dc54a6d9 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -13,6 +13,7 @@ import types import decimal import unittest +from test import support from test.support.os_helper import temp_cwd from test.support.script_helper import assert_python_failure @@ -329,13 +330,13 @@ def test_ast_line_numbers_multiline_fstring(self): self.assertEqual(t.body[1].lineno, 3) self.assertEqual(t.body[1].value.lineno, 3) self.assertEqual(t.body[1].value.values[0].lineno, 3) - self.assertEqual(t.body[1].value.values[1].lineno, 3) - self.assertEqual(t.body[1].value.values[2].lineno, 3) + self.assertEqual(t.body[1].value.values[1].lineno, 4) + self.assertEqual(t.body[1].value.values[2].lineno, 6) self.assertEqual(t.body[1].col_offset, 0) self.assertEqual(t.body[1].value.col_offset, 0) - self.assertEqual(t.body[1].value.values[0].col_offset, 0) - self.assertEqual(t.body[1].value.values[1].col_offset, 0) - self.assertEqual(t.body[1].value.values[2].col_offset, 0) + self.assertEqual(t.body[1].value.values[0].col_offset, 4) + self.assertEqual(t.body[1].value.values[1].col_offset, 2) + self.assertEqual(t.body[1].value.values[2].col_offset, 11) # NOTE: the following lineno information and col_offset is correct for # expressions within FormattedValues. binop = t.body[1].value.values[1].value @@ -366,13 +367,13 @@ def test_ast_line_numbers_multiline_fstring(self): self.assertEqual(t.body[0].lineno, 2) self.assertEqual(t.body[0].value.lineno, 2) self.assertEqual(t.body[0].value.values[0].lineno, 2) - self.assertEqual(t.body[0].value.values[1].lineno, 2) - self.assertEqual(t.body[0].value.values[2].lineno, 2) + self.assertEqual(t.body[0].value.values[1].lineno, 3) + self.assertEqual(t.body[0].value.values[2].lineno, 3) self.assertEqual(t.body[0].col_offset, 0) self.assertEqual(t.body[0].value.col_offset, 4) - self.assertEqual(t.body[0].value.values[0].col_offset, 4) - self.assertEqual(t.body[0].value.values[1].col_offset, 4) - self.assertEqual(t.body[0].value.values[2].col_offset, 4) + self.assertEqual(t.body[0].value.values[0].col_offset, 8) + self.assertEqual(t.body[0].value.values[1].col_offset, 10) + self.assertEqual(t.body[0].value.values[2].col_offset, 17) # Check {blech} self.assertEqual(t.body[0].value.values[1].value.lineno, 3) self.assertEqual(t.body[0].value.values[1].value.end_lineno, 3) @@ -387,6 +388,20 @@ def test_ast_line_numbers_with_parentheses(self): t = ast.parse(expr) self.assertEqual(type(t), ast.Module) self.assertEqual(len(t.body), 1) + # check the joinedstr location + joinedstr = t.body[0].value + self.assertEqual(type(joinedstr), ast.JoinedStr) + self.assertEqual(joinedstr.lineno, 3) + self.assertEqual(joinedstr.end_lineno, 3) + self.assertEqual(joinedstr.col_offset, 4) + self.assertEqual(joinedstr.end_col_offset, 17) + # check the formatted value location + fv = t.body[0].value.values[1] + self.assertEqual(type(fv), ast.FormattedValue) + self.assertEqual(fv.lineno, 3) + self.assertEqual(fv.end_lineno, 3) + self.assertEqual(fv.col_offset, 7) + self.assertEqual(fv.end_col_offset, 16) # check the test(t) location call = t.body[0].value.values[1].value self.assertEqual(type(call), ast.Call) @@ -397,6 +412,50 @@ def test_ast_line_numbers_with_parentheses(self): expr = """ x = ( + u'wat', + u"wat", + b'wat', + b"wat", + f'wat', + f"wat", +) + +y = ( + u'''wat''', + u\"\"\"wat\"\"\", + b'''wat''', + b\"\"\"wat\"\"\", + f'''wat''', + f\"\"\"wat\"\"\", +) + """ + t = ast.parse(expr) + self.assertEqual(type(t), ast.Module) + self.assertEqual(len(t.body), 2) + x, y = t.body + + # Check the single quoted string offsets first. + offsets = [ + (elt.col_offset, elt.end_col_offset) + for elt in x.value.elts + ] + self.assertTrue(all( + offset == (4, 10) + for offset in offsets + )) + + # Check the triple quoted string offsets. + offsets = [ + (elt.col_offset, elt.end_col_offset) + for elt in y.value.elts + ] + self.assertTrue(all( + offset == (4, 14) + for offset in offsets + )) + + expr = """ +x = ( 'PERL_MM_OPT', ( f'wat' f'some_string={f(x)} ' @@ -415,9 +474,9 @@ def test_ast_line_numbers_with_parentheses(self): # check the first wat self.assertEqual(type(wat1), ast.Constant) self.assertEqual(wat1.lineno, 4) - self.assertEqual(wat1.end_lineno, 6) - self.assertEqual(wat1.col_offset, 12) - self.assertEqual(wat1.end_col_offset, 18) + self.assertEqual(wat1.end_lineno, 5) + self.assertEqual(wat1.col_offset, 14) + self.assertEqual(wat1.end_col_offset, 26) # check the call call = middle.value self.assertEqual(type(call), ast.Call) @@ -427,10 +486,14 @@ def test_ast_line_numbers_with_parentheses(self): self.assertEqual(call.end_col_offset, 31) # check the second wat self.assertEqual(type(wat2), ast.Constant) - self.assertEqual(wat2.lineno, 4) + self.assertEqual(wat2.lineno, 5) self.assertEqual(wat2.end_lineno, 6) - self.assertEqual(wat2.col_offset, 12) - self.assertEqual(wat2.end_col_offset, 18) + self.assertEqual(wat2.col_offset, 32) + # wat ends at the offset 17, but the whole f-string + # ends at the offset 18 (since the quote is part of the + # f-string but not the wat string) + self.assertEqual(wat2.end_col_offset, 17) + self.assertEqual(fstring.end_col_offset, 18) def test_docstring(self): def f(): @@ -467,36 +530,58 @@ def test_literal(self): self.assertEqual(f' ', ' ') def test_unterminated_string(self): - self.assertAllRaise(SyntaxError, 'f-string: unterminated string', + self.assertAllRaise(SyntaxError, 'unterminated string', [r"""f'{"x'""", r"""f'{"x}'""", r"""f'{("x'""", r"""f'{("x}'""", ]) + @unittest.skipIf(support.is_wasi, "exhausts limited stack on WASI") def test_mismatched_parens(self): - self.assertAllRaise(SyntaxError, r"f-string: closing parenthesis '\}' " + self.assertAllRaise(SyntaxError, r"closing parenthesis '\}' " r"does not match opening parenthesis '\('", ["f'{((}'", ]) - self.assertAllRaise(SyntaxError, r"f-string: closing parenthesis '\)' " + self.assertAllRaise(SyntaxError, r"closing parenthesis '\)' " r"does not match opening parenthesis '\['", ["f'{a[4)}'", ]) - self.assertAllRaise(SyntaxError, r"f-string: closing parenthesis '\]' " + self.assertAllRaise(SyntaxError, r"closing parenthesis '\]' " r"does not match opening parenthesis '\('", ["f'{a(4]}'", ]) - self.assertAllRaise(SyntaxError, r"f-string: closing parenthesis '\}' " + self.assertAllRaise(SyntaxError, r"closing parenthesis '\}' " r"does not match opening parenthesis '\['", ["f'{a[4}'", ]) - self.assertAllRaise(SyntaxError, r"f-string: closing parenthesis '\}' " + self.assertAllRaise(SyntaxError, r"closing parenthesis '\}' " r"does not match opening parenthesis '\('", ["f'{a(4}'", ]) self.assertRaises(SyntaxError, eval, "f'{" + "("*500 + "}'") + def test_fstring_nested_too_deeply(self): + self.assertAllRaise(SyntaxError, + "f-string: expressions nested too deeply", + ['f"{1+2:{1+2:{1+1:{1}}}}"']) + + def create_nested_fstring(n): + if n == 0: + return "1+1" + prev = create_nested_fstring(n-1) + return f'f"{{{prev}}}"' + + self.assertAllRaise(SyntaxError, + "too many nested f-strings", + [create_nested_fstring(160)]) + + def test_syntax_error_in_nested_fstring(self): + # See gh-104016 for more information on this crash + self.assertAllRaise(SyntaxError, + "invalid syntax", + ['f"{1 1:' + ('{f"1:' * 199)]) + def test_double_braces(self): self.assertEqual(f'{{', '{') self.assertEqual(f'a{{', 'a{') @@ -559,8 +644,14 @@ def test_compile_time_concat(self): self.assertEqual(f'' '' f'', '') self.assertEqual(f'' '' f'' '', '') - self.assertAllRaise(SyntaxError, "f-string: expecting '}'", - ["f'{3' f'}'", # can't concat to get a valid f-string + # This is not really [f'{'] + [f'}'] since we treat the inside + # of braces as a purely new context, so it is actually f'{ and + # then eval(' f') (a valid expression) and then }' which would + # constitute a valid f-string. + self.assertEqual(f'{' f'}', ' f') + + self.assertAllRaise(SyntaxError, "expecting '}'", + ['''f'{3' f"}"''', # can't concat to get a valid f-string ]) def test_comments(self): @@ -618,25 +709,19 @@ def test_format_specifier_expressions(self): self.assertEqual(f'{-10:-{"#"}1{0}x}', ' -0xa') self.assertEqual(f'{-10:{"-"}#{1}0{"x"}}', ' -0xa') self.assertEqual(f'{10:#{3 != {4:5} and width}x}', ' 0xa') + self.assertEqual(f'result: {value:{width:{0}}.{precision:1}}', 'result: 12.35') - self.assertAllRaise(SyntaxError, - """f-string: invalid conversion character 'r{"': """ - """expected 's', 'r', or 'a'""", + self.assertAllRaise(SyntaxError, "f-string: expecting ':' or '}'", ["""f'{"s"!r{":10"}}'""", - # This looks like a nested format spec. ]) - self.assertAllRaise(SyntaxError, "f-string: invalid syntax", + self.assertAllRaise(SyntaxError, + "f-string: expecting a valid expression after '{'", [# Invalid syntax inside a nested spec. "f'{4:{/5}}'", ]) - self.assertAllRaise(SyntaxError, "f-string: expressions nested too deeply", - [# Can't nest format specifiers. - "f'result: {value:{width:{0}}.{precision:1}}'", - ]) - self.assertAllRaise(SyntaxError, 'f-string: invalid conversion character', [# No expansion inside conversion or for # the : or ! itself. @@ -655,7 +740,8 @@ def __format__(self, spec): self.assertEqual(f'{x} {x}', '1 2') def test_missing_expression(self): - self.assertAllRaise(SyntaxError, 'f-string: empty expression not allowed', + self.assertAllRaise(SyntaxError, + "f-string: valid expression required before '}'", ["f'{}'", "f'{ }'" "f' {} '", @@ -667,8 +753,8 @@ def test_missing_expression(self): "f'''{\t\f\r\n}'''", ]) - # Different error messages are raised when a specifier ('!', ':' or '=') is used after an empty expression - self.assertAllRaise(SyntaxError, "f-string: expression required before '!'", + self.assertAllRaise(SyntaxError, + "f-string: valid expression required before '!'", ["f'{!r}'", "f'{ !r}'", "f'{!}'", @@ -689,7 +775,8 @@ def test_missing_expression(self): "f'{ !xr:a}'", ]) - self.assertAllRaise(SyntaxError, "f-string: expression required before ':'", + self.assertAllRaise(SyntaxError, + "f-string: valid expression required before ':'", ["f'{:}'", "f'{ :!}'", "f'{:2}'", @@ -697,7 +784,8 @@ def test_missing_expression(self): "f'{:'", ]) - self.assertAllRaise(SyntaxError, "f-string: expression required before '='", + self.assertAllRaise(SyntaxError, + "f-string: valid expression required before '='", ["f'{=}'", "f'{ =}'", "f'{ =:}'", @@ -715,24 +803,18 @@ def test_missing_expression(self): def test_parens_in_expressions(self): self.assertEqual(f'{3,}', '(3,)') - # Add these because when an expression is evaluated, parens - # are added around it. But we shouldn't go from an invalid - # expression to a valid one. The added parens are just - # supposed to allow whitespace (including newlines). - self.assertAllRaise(SyntaxError, 'f-string: invalid syntax', + self.assertAllRaise(SyntaxError, + "f-string: expecting a valid expression after '{'", ["f'{,}'", - "f'{,}'", # this is (,), which is an error ]) self.assertAllRaise(SyntaxError, r"f-string: unmatched '\)'", ["f'{3)+(4}'", ]) - self.assertAllRaise(SyntaxError, 'unterminated string literal', - ["f'{\n}'", - ]) def test_newlines_before_syntax_error(self): - self.assertAllRaise(SyntaxError, "invalid syntax", + self.assertAllRaise(SyntaxError, + "f-string: expecting a valid expression after '{'", ["f'{.}'", "\nf'{.}'", "\n\nf'{.}'"]) def test_backslashes_in_string_part(self): @@ -776,7 +858,7 @@ def test_backslashes_in_string_part(self): self.assertEqual(f'2\x203', '2 3') self.assertEqual(f'\x203', ' 3') - with self.assertWarns(SyntaxWarning): # invalid escape sequence + with self.assertWarns(DeprecationWarning): # invalid escape sequence value = eval(r"f'\{6*7}'") self.assertEqual(value, '\\42') self.assertEqual(f'\\{6*7}', '\\42') @@ -809,18 +891,40 @@ def test_misformed_unicode_character_name(self): r"'\N{GREEK CAPITAL LETTER DELTA'", ]) - def test_no_backslashes_in_expression_part(self): - self.assertAllRaise(SyntaxError, 'f-string expression part cannot include a backslash', - [r"f'{\'a\'}'", - r"f'{\t3}'", - r"f'{\}'", - r"rf'{\'a\'}'", - r"rf'{\t3}'", - r"rf'{\}'", - r"""rf'{"\N{LEFT CURLY BRACKET}"}'""", - r"f'{\n}'", + def test_backslashes_in_expression_part(self): + self.assertEqual(f"{( + 1 + + 2 + )}", "3") + + self.assertEqual("\N{LEFT CURLY BRACKET}", '{') + self.assertEqual(f'{"\N{LEFT CURLY BRACKET}"}', '{') + self.assertEqual(rf'{"\N{LEFT CURLY BRACKET}"}', '{') + + self.assertAllRaise(SyntaxError, + "f-string: valid expression required before '}'", + ["f'{\n}'", ]) + def test_invalid_backslashes_inside_fstring_context(self): + # All of these variations are invalid python syntax, + # so they are also invalid in f-strings as well. + cases = [ + formatting.format(expr=expr) + for formatting in [ + "{expr}", + "f'{{{expr}}}'", + "rf'{{{expr}}}'", + ] + for expr in [ + r"\'a\'", + r"\t3", + r"\\"[0], + ] + ] + self.assertAllRaise(SyntaxError, 'unexpected character after line continuation', + cases) + def test_no_escapes_for_braces(self): """ Only literal curly braces begin an expression. @@ -843,11 +947,67 @@ def test_lambda(self): self.assertEqual(f'{(lambda y:x*y)("8"):10}', "88888 ") # lambda doesn't work without parens, because the colon - # makes the parser think it's a format_spec - self.assertAllRaise(SyntaxError, 'f-string: invalid syntax', + # makes the parser think it's a format_spec + # emit warning if we can match a format_spec + self.assertAllRaise(SyntaxError, + "f-string: lambda expressions are not allowed " + "without parentheses", ["f'{lambda x:x}'", + "f'{lambda :x}'", + "f'{lambda *arg, :x}'", + "f'{1, lambda:x}'", + "f'{lambda x:}'", + "f'{lambda :}'", ]) + # but don't emit the paren warning in general cases + with self.assertRaisesRegex(SyntaxError, "f-string: expecting a valid expression after '{'"): + eval("f'{+ lambda:None}'") + + def test_valid_prefixes(self): + self.assertEqual(F'{1}', "1") + self.assertEqual(FR'{2}', "2") + self.assertEqual(fR'{3}', "3") + + def test_roundtrip_raw_quotes(self): + self.assertEqual(fr"\'", "\\'") + self.assertEqual(fr'\"', '\\"') + self.assertEqual(fr'\"\'', '\\"\\\'') + self.assertEqual(fr'\'\"', '\\\'\\"') + self.assertEqual(fr'\"\'\"', '\\"\\\'\\"') + self.assertEqual(fr'\'\"\'', '\\\'\\"\\\'') + self.assertEqual(fr'\"\'\"\'', '\\"\\\'\\"\\\'') + + def test_fstring_backslash_before_double_bracket(self): + self.assertEqual(f'\{{\}}', '\\{\\}') + self.assertEqual(f'\{{', '\\{') + self.assertEqual(f'\{{{1+1}', '\\{2') + self.assertEqual(f'\}}{1+1}', '\\}2') + self.assertEqual(f'{1+1}\}}', '2\\}') + self.assertEqual(fr'\{{\}}', '\\{\\}') + self.assertEqual(fr'\{{', '\\{') + self.assertEqual(fr'\{{{1+1}', '\\{2') + self.assertEqual(fr'\}}{1+1}', '\\}2') + self.assertEqual(fr'{1+1}\}}', '2\\}') + + def test_fstring_backslash_prefix_raw(self): + self.assertEqual(f'\\', '\\') + self.assertEqual(f'\\\\', '\\\\') + self.assertEqual(fr'\\', r'\\') + self.assertEqual(fr'\\\\', r'\\\\') + self.assertEqual(rf'\\', r'\\') + self.assertEqual(rf'\\\\', r'\\\\') + self.assertEqual(Rf'\\', R'\\') + self.assertEqual(Rf'\\\\', R'\\\\') + self.assertEqual(fR'\\', R'\\') + self.assertEqual(fR'\\\\', R'\\\\') + self.assertEqual(FR'\\', R'\\') + self.assertEqual(FR'\\\\', R'\\\\') + + def test_fstring_format_spec_greedy_matching(self): + self.assertEqual(f"{1:}}}", "1}") + self.assertEqual(f"{1:>3{5}}}}", " 1}") + def test_yield(self): # Not terribly useful, but make sure the yield turns # a function into a generator @@ -1037,6 +1197,11 @@ def test_conversions(self): self.assertEqual(f'{"a"!r}', "'a'") self.assertEqual(f'{"a"!a}', "'a'") + # Conversions can have trailing whitespace after them since it + # does not provide any significance + self.assertEqual(f"{3!s }", "3") + self.assertEqual(f'{3.14!s :10.10}', '3.14 ') + # Not a conversion. self.assertEqual(f'{"a!r"}', "a!r") @@ -1049,16 +1214,27 @@ def test_conversions(self): "f'{3!g'", ]) - self.assertAllRaise(SyntaxError, 'f-string: missed conversion character', + self.assertAllRaise(SyntaxError, 'f-string: missing conversion character', ["f'{3!}'", "f'{3!:'", "f'{3!:}'", ]) - for conv in 'g', 'A', '3', 'G', '!', ' s', 's ', ' s ', 'ä', 'ɐ', 'ª': + for conv_identifier in 'g', 'A', 'G', 'ä', 'ɐ': self.assertAllRaise(SyntaxError, "f-string: invalid conversion character %r: " - "expected 's', 'r', or 'a'" % conv, + "expected 's', 'r', or 'a'" % conv_identifier, + ["f'{3!" + conv_identifier + "}'"]) + + for conv_non_identifier in '3', '!': + self.assertAllRaise(SyntaxError, + "f-string: invalid conversion character", + ["f'{3!" + conv_non_identifier + "}'"]) + + for conv in ' s', ' s ': + self.assertAllRaise(SyntaxError, + "f-string: conversion type must come right after the" + " exclamanation mark", ["f'{3!" + conv + "}'"]) self.assertAllRaise(SyntaxError, @@ -1097,8 +1273,7 @@ def test_mismatched_braces(self): ]) self.assertAllRaise(SyntaxError, "f-string: expecting '}'", - ["f'{3:{{>10}'", - "f'{3'", + ["f'{3'", "f'{3!'", "f'{3:'", "f'{3!s'", @@ -1111,11 +1286,14 @@ def test_mismatched_braces(self): "f'{{{'", "f'{{}}{'", "f'{'", - "f'x{<'", # See bpo-46762. - "f'x{>'", "f'{i='", # See gh-93418. ]) + self.assertAllRaise(SyntaxError, + "f-string: expecting a valid expression after '{'", + ["f'{3:{{>10}'", + ]) + # But these are just normal strings. self.assertEqual(f'{"{"}', '{') self.assertEqual(f'{"}"}', '}') @@ -1314,6 +1492,7 @@ def __repr__(self): self.assertEqual(f'X{x =}Y', 'Xx ='+repr(x)+'Y') self.assertEqual(f'X{x= }Y', 'Xx= '+repr(x)+'Y') self.assertEqual(f'X{x = }Y', 'Xx = '+repr(x)+'Y') + self.assertEqual(f"sadsd {1 + 1 = :{1 + 1:1d}f}", "sadsd 1 + 1 = 2.000000") # These next lines contains tabs. Backslash escapes don't # work in f-strings. @@ -1335,7 +1514,8 @@ def test_walrus(self): self.assertEqual(x, 10) def test_invalid_syntax_error_message(self): - with self.assertRaisesRegex(SyntaxError, "f-string: invalid syntax"): + with self.assertRaisesRegex(SyntaxError, + "f-string: expecting '=', or '!', or ':', or '}'"): compile("f'{a $ b}'", "?", "exec") def test_with_two_commas_in_format_specifier(self): @@ -1359,13 +1539,31 @@ def test_with_an_underscore_and_a_comma_in_format_specifier(self): f'{1:_,}' def test_syntax_error_for_starred_expressions(self): - error_msg = re.escape("cannot use starred expression here") - with self.assertRaisesRegex(SyntaxError, error_msg): + with self.assertRaisesRegex(SyntaxError, "can't use starred expression here"): compile("f'{*a}'", "?", "exec") - error_msg = re.escape("cannot use double starred expression here") - with self.assertRaisesRegex(SyntaxError, error_msg): + with self.assertRaisesRegex(SyntaxError, + "f-string: expecting a valid expression after '{'"): compile("f'{**a}'", "?", "exec") + def test_not_closing_quotes(self): + self.assertAllRaise(SyntaxError, "unterminated f-string literal", ['f"', "f'"]) + self.assertAllRaise(SyntaxError, "unterminated triple-quoted f-string literal", + ['f"""', "f'''"]) + + def test_syntax_error_after_debug(self): + self.assertAllRaise(SyntaxError, "f-string: expecting a valid expression after '{'", + [ + "f'{1=}{;'", + "f'{1=}{+;'", + "f'{1=}{2}{;'", + "f'{1=}{3}{;'", + ]) + self.assertAllRaise(SyntaxError, "f-string: expecting '=', or '!', or ':', or '}'", + [ + "f'{1=}{1;'", + "f'{1=}{1;}'", + ]) + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 57db96d37ee369..af286052a7d560 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -2980,7 +2980,7 @@ class MyClass(metaclass=MyMeta): def test_reuse_different_names(self): """Disallow this case because decorated function a would not be cached.""" - with self.assertRaises(RuntimeError) as ctx: + with self.assertRaises(TypeError) as ctx: class ReusedCachedProperty: @py_functools.cached_property def a(self): @@ -2989,7 +2989,7 @@ def a(self): b = a self.assertEqual( - str(ctx.exception.__context__), + str(ctx.exception), str(TypeError("Cannot assign the same cached_property to two different names ('a' and 'b').")) ) diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py index 0f39b8f45714ad..311a864a52387d 100644 --- a/Lib/test/test_gdb.py +++ b/Lib/test/test_gdb.py @@ -962,7 +962,7 @@ def test_wrapper_call(self): cmd = textwrap.dedent(''' class MyList(list): def __init__(self): - super().__init__() # wrapper_call() + super(*[]).__init__() # wrapper_call() id("first break point") l = MyList() diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index 492b77a954d865..cc782ea1ee5dff 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -225,7 +225,22 @@ def f(): gi = f() self.assertIsNone(gi.gi_frame.f_back) + def test_issue103488(self): + def gen_raises(): + yield + raise ValueError() + + def loop(): + try: + for _ in gen_raises(): + if True is False: + return + except ValueError: + pass + + #This should not raise + loop() class ExceptionTest(unittest.TestCase): # Tests for the issue #23353: check that the currently handled exception @@ -234,16 +249,16 @@ class ExceptionTest(unittest.TestCase): def test_except_throw(self): def store_raise_exc_generator(): try: - self.assertEqual(sys.exc_info()[0], None) + self.assertIsNone(sys.exception()) yield except Exception as exc: # exception raised by gen.throw(exc) - self.assertEqual(sys.exc_info()[0], ValueError) + self.assertIsInstance(sys.exception(), ValueError) self.assertIsNone(exc.__context__) yield # ensure that the exception is not lost - self.assertEqual(sys.exc_info()[0], ValueError) + self.assertIsInstance(sys.exception(), ValueError) yield # we should be able to raise back the ValueError @@ -265,11 +280,11 @@ def store_raise_exc_generator(): next(make) self.assertIsNone(cm.exception.__context__) - self.assertEqual(sys.exc_info(), (None, None, None)) + self.assertIsNone(sys.exception()) def test_except_next(self): def gen(): - self.assertEqual(sys.exc_info()[0], ValueError) + self.assertIsInstance(sys.exception(), ValueError) yield "done" g = gen() @@ -277,23 +292,23 @@ def gen(): raise ValueError except Exception: self.assertEqual(next(g), "done") - self.assertEqual(sys.exc_info(), (None, None, None)) + self.assertIsNone(sys.exception()) def test_except_gen_except(self): def gen(): try: - self.assertEqual(sys.exc_info()[0], None) + self.assertIsNone(sys.exception()) yield # we are called from "except ValueError:", TypeError must # inherit ValueError in its context raise TypeError() except TypeError as exc: - self.assertEqual(sys.exc_info()[0], TypeError) + self.assertIsInstance(sys.exception(), TypeError) self.assertEqual(type(exc.__context__), ValueError) # here we are still called from the "except ValueError:" - self.assertEqual(sys.exc_info()[0], ValueError) + self.assertIsInstance(sys.exception(), ValueError) yield - self.assertIsNone(sys.exc_info()[0]) + self.assertIsNone(sys.exception()) yield "done" g = gen() @@ -304,7 +319,7 @@ def gen(): next(g) self.assertEqual(next(g), "done") - self.assertEqual(sys.exc_info(), (None, None, None)) + self.assertIsNone(sys.exception()) def test_nested_gen_except_loop(self): def gen(): @@ -330,19 +345,19 @@ def test_except_throw_exception_context(self): def gen(): try: try: - self.assertEqual(sys.exc_info()[0], None) + self.assertIsNone(sys.exception()) yield except ValueError: # we are called from "except ValueError:" - self.assertEqual(sys.exc_info()[0], ValueError) + self.assertIsInstance(sys.exception(), ValueError) raise TypeError() except Exception as exc: - self.assertEqual(sys.exc_info()[0], TypeError) + self.assertIsInstance(sys.exception(), TypeError) self.assertEqual(type(exc.__context__), ValueError) # we are still called from "except ValueError:" - self.assertEqual(sys.exc_info()[0], ValueError) + self.assertIsInstance(sys.exception(), ValueError) yield - self.assertIsNone(sys.exc_info()[0]) + self.assertIsNone(sys.exception()) yield "done" g = gen() @@ -353,7 +368,7 @@ def gen(): g.throw(exc) self.assertEqual(next(g), "done") - self.assertEqual(sys.exc_info(), (None, None, None)) + self.assertIsNone(sys.exception()) def test_except_throw_bad_exception(self): class E(Exception): diff --git a/Lib/test/test_genericalias.py b/Lib/test/test_genericalias.py index 9b59d1e3e0aad2..24d4216417521c 100644 --- a/Lib/test/test_genericalias.py +++ b/Lib/test/test_genericalias.py @@ -314,8 +314,11 @@ def test_parameter_chaining(self): with self.assertRaises(TypeError): list[int][int] + with self.assertRaises(TypeError): dict[T, int][str, int] + with self.assertRaises(TypeError): dict[str, T][str, int] + with self.assertRaises(TypeError): dict[T, T][str, int] def test_equality(self): diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py index ced9000f75f2e5..ee105a3de17f8a 100644 --- a/Lib/test/test_grammar.py +++ b/Lib/test/test_grammar.py @@ -236,12 +236,9 @@ def check(test, error=False): check(f"[{num}for x in ()]") check(f"{num}spam", error=True) + with self.assertWarnsRegex(SyntaxWarning, r'invalid \w+ literal'): + compile(f"{num}is x", "<testcase>", "eval") with warnings.catch_warnings(): - warnings.filterwarnings('ignore', '"is" with a literal', - SyntaxWarning) - with self.assertWarnsRegex(SyntaxWarning, - r'invalid \w+ literal'): - compile(f"{num}is x", "<testcase>", "eval") warnings.simplefilter('error', SyntaxWarning) with self.assertRaisesRegex(SyntaxError, r'invalid \w+ literal'): @@ -1467,14 +1464,22 @@ def test_comparison(self): if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 not in x is x is not x: pass def test_comparison_is_literal(self): - def check(test, msg='"is" with a literal'): + def check(test, msg): self.check_syntax_warning(test, msg) - check('x is 1') - check('x is "thing"') - check('1 is x') - check('x is y is 1') - check('x is not 1', '"is not" with a literal') + check('x is 1', '"is" with \'int\' literal') + check('x is "thing"', '"is" with \'str\' literal') + check('1 is x', '"is" with \'int\' literal') + check('x is y is 1', '"is" with \'int\' literal') + check('x is not 1', '"is not" with \'int\' literal') + check('x is not (1, 2)', '"is not" with \'tuple\' literal') + check('(1, 2) is not x', '"is not" with \'tuple\' literal') + + check('None is 1', '"is" with \'int\' literal') + check('1 is None', '"is" with \'int\' literal') + + check('x == 3 is y', '"is" with \'int\' literal') + check('x == "thing" is y', '"is" with \'str\' literal') with warnings.catch_warnings(): warnings.simplefilter('error', SyntaxWarning) @@ -1482,6 +1487,10 @@ def check(test, msg='"is" with a literal'): compile('x is False', '<testcase>', 'exec') compile('x is True', '<testcase>', 'exec') compile('x is ...', '<testcase>', 'exec') + compile('None is x', '<testcase>', 'exec') + compile('False is x', '<testcase>', 'exec') + compile('True is x', '<testcase>', 'exec') + compile('... is x', '<testcase>', 'exec') def test_warn_missed_comma(self): def check(test): diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py index 7cf99735ca39f0..a39a2c45ebc2e2 100644 --- a/Lib/test/test_hmac.py +++ b/Lib/test/test_hmac.py @@ -373,6 +373,16 @@ def test_with_digestmod_no_default(self): with self.assertRaisesRegex(TypeError, r'required.*digestmod'): hmac.HMAC(key, msg=data, digestmod='') + def test_with_fallback(self): + cache = getattr(hashlib, '__builtin_constructor_cache') + try: + cache['foo'] = hashlib.sha256 + hexdigest = hmac.digest(b'key', b'message', 'foo').hex() + expected = '6e9ef29b75fffc5b7abae527d58fdadb2fe42e7219011976917343065f58ed4a' + self.assertEqual(hexdigest, expected) + finally: + cache.pop('foo') + class ConstructorTestCase(unittest.TestCase): diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py index 9ff6afcbadec54..b4f4e2b14351a6 100644 --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -2187,11 +2187,12 @@ def test_getting_header_defaultint(self): class TunnelTests(TestCase): def setUp(self): response_text = ( - 'HTTP/1.0 200 OK\r\n\r\n' # Reply to CONNECT + 'HTTP/1.1 200 OK\r\n\r\n' # Reply to CONNECT 'HTTP/1.1 200 OK\r\n' # Reply to HEAD 'Content-Length: 42\r\n\r\n' ) self.host = 'proxy.com' + self.port = client.HTTP_PORT self.conn = client.HTTPConnection(self.host) self.conn._create_connection = self._create_connection(response_text) @@ -2203,15 +2204,45 @@ def create_connection(address, timeout=None, source_address=None): return FakeSocket(response_text, host=address[0], port=address[1]) return create_connection - def test_set_tunnel_host_port_headers(self): + def test_set_tunnel_host_port_headers_add_host_missing(self): tunnel_host = 'destination.com' tunnel_port = 8888 tunnel_headers = {'User-Agent': 'Mozilla/5.0 (compatible, MSIE 11)'} + tunnel_headers_after = tunnel_headers.copy() + tunnel_headers_after['Host'] = '%s:%d' % (tunnel_host, tunnel_port) self.conn.set_tunnel(tunnel_host, port=tunnel_port, headers=tunnel_headers) self.conn.request('HEAD', '/', '') self.assertEqual(self.conn.sock.host, self.host) - self.assertEqual(self.conn.sock.port, client.HTTP_PORT) + self.assertEqual(self.conn.sock.port, self.port) + self.assertEqual(self.conn._tunnel_host, tunnel_host) + self.assertEqual(self.conn._tunnel_port, tunnel_port) + self.assertEqual(self.conn._tunnel_headers, tunnel_headers_after) + + def test_set_tunnel_host_port_headers_set_host_identical(self): + tunnel_host = 'destination.com' + tunnel_port = 8888 + tunnel_headers = {'User-Agent': 'Mozilla/5.0 (compatible, MSIE 11)', + 'Host': '%s:%d' % (tunnel_host, tunnel_port)} + self.conn.set_tunnel(tunnel_host, port=tunnel_port, + headers=tunnel_headers) + self.conn.request('HEAD', '/', '') + self.assertEqual(self.conn.sock.host, self.host) + self.assertEqual(self.conn.sock.port, self.port) + self.assertEqual(self.conn._tunnel_host, tunnel_host) + self.assertEqual(self.conn._tunnel_port, tunnel_port) + self.assertEqual(self.conn._tunnel_headers, tunnel_headers) + + def test_set_tunnel_host_port_headers_set_host_different(self): + tunnel_host = 'destination.com' + tunnel_port = 8888 + tunnel_headers = {'User-Agent': 'Mozilla/5.0 (compatible, MSIE 11)', + 'Host': '%s:%d' % ('example.com', 4200)} + self.conn.set_tunnel(tunnel_host, port=tunnel_port, + headers=tunnel_headers) + self.conn.request('HEAD', '/', '') + self.assertEqual(self.conn.sock.host, self.host) + self.assertEqual(self.conn.sock.port, self.port) self.assertEqual(self.conn._tunnel_host, tunnel_host) self.assertEqual(self.conn._tunnel_port, tunnel_port) self.assertEqual(self.conn._tunnel_headers, tunnel_headers) @@ -2223,17 +2254,96 @@ def test_disallow_set_tunnel_after_connect(self): 'destination.com') def test_connect_with_tunnel(self): - self.conn.set_tunnel('destination.com') + d = { + b'host': b'destination.com', + b'port': client.HTTP_PORT, + } + self.conn.set_tunnel(d[b'host'].decode('ascii')) + self.conn.request('HEAD', '/', '') + self.assertEqual(self.conn.sock.host, self.host) + self.assertEqual(self.conn.sock.port, self.port) + self.assertIn(b'CONNECT %(host)s:%(port)d HTTP/1.1\r\n' + b'Host: %(host)s:%(port)d\r\n\r\n' % d, + self.conn.sock.data) + self.assertIn(b'HEAD / HTTP/1.1\r\nHost: %(host)s\r\n' % d, + self.conn.sock.data) + + def test_connect_with_tunnel_with_default_port(self): + d = { + b'host': b'destination.com', + b'port': client.HTTP_PORT, + } + self.conn.set_tunnel(d[b'host'].decode('ascii'), port=d[b'port']) + self.conn.request('HEAD', '/', '') + self.assertEqual(self.conn.sock.host, self.host) + self.assertEqual(self.conn.sock.port, self.port) + self.assertIn(b'CONNECT %(host)s:%(port)d HTTP/1.1\r\n' + b'Host: %(host)s:%(port)d\r\n\r\n' % d, + self.conn.sock.data) + self.assertIn(b'HEAD / HTTP/1.1\r\nHost: %(host)s\r\n' % d, + self.conn.sock.data) + + def test_connect_with_tunnel_with_nonstandard_port(self): + d = { + b'host': b'destination.com', + b'port': 8888, + } + self.conn.set_tunnel(d[b'host'].decode('ascii'), port=d[b'port']) + self.conn.request('HEAD', '/', '') + self.assertEqual(self.conn.sock.host, self.host) + self.assertEqual(self.conn.sock.port, self.port) + self.assertIn(b'CONNECT %(host)s:%(port)d HTTP/1.1\r\n' + b'Host: %(host)s:%(port)d\r\n\r\n' % d, + self.conn.sock.data) + self.assertIn(b'HEAD / HTTP/1.1\r\nHost: %(host)s:%(port)d\r\n' % d, + self.conn.sock.data) + + # This request is not RFC-valid, but it's been possible with the library + # for years, so don't break it unexpectedly... This also tests + # case-insensitivity when injecting Host: headers if they're missing. + def test_connect_with_tunnel_with_different_host_header(self): + d = { + b'host': b'destination.com', + b'tunnel_host_header': b'example.com:9876', + b'port': client.HTTP_PORT, + } + self.conn.set_tunnel( + d[b'host'].decode('ascii'), + headers={'HOST': d[b'tunnel_host_header'].decode('ascii')}) + self.conn.request('HEAD', '/', '') + self.assertEqual(self.conn.sock.host, self.host) + self.assertEqual(self.conn.sock.port, self.port) + self.assertIn(b'CONNECT %(host)s:%(port)d HTTP/1.1\r\n' + b'HOST: %(tunnel_host_header)s\r\n\r\n' % d, + self.conn.sock.data) + self.assertIn(b'HEAD / HTTP/1.1\r\nHost: %(host)s\r\n' % d, + self.conn.sock.data) + + def test_connect_with_tunnel_different_host(self): + d = { + b'host': b'destination.com', + b'port': client.HTTP_PORT, + } + self.conn.set_tunnel(d[b'host'].decode('ascii')) + self.conn.request('HEAD', '/', '') + self.assertEqual(self.conn.sock.host, self.host) + self.assertEqual(self.conn.sock.port, self.port) + self.assertIn(b'CONNECT %(host)s:%(port)d HTTP/1.1\r\n' + b'Host: %(host)s:%(port)d\r\n\r\n' % d, + self.conn.sock.data) + self.assertIn(b'HEAD / HTTP/1.1\r\nHost: %(host)s\r\n' % d, + self.conn.sock.data) + + def test_connect_with_tunnel_idna(self): + dest = '\u03b4\u03c0\u03b8.gr' + dest_port = b'%s:%d' % (dest.encode('idna'), client.HTTP_PORT) + expected = b'CONNECT %s HTTP/1.1\r\nHost: %s\r\n\r\n' % ( + dest_port, dest_port) + self.conn.set_tunnel(dest) self.conn.request('HEAD', '/', '') self.assertEqual(self.conn.sock.host, self.host) self.assertEqual(self.conn.sock.port, client.HTTP_PORT) - self.assertIn(b'CONNECT destination.com', self.conn.sock.data) - # issue22095 - self.assertNotIn(b'Host: destination.com:None', self.conn.sock.data) - self.assertIn(b'Host: destination.com', self.conn.sock.data) - - # This test should be removed when CONNECT gets the HTTP/1.1 blessing - self.assertNotIn(b'Host: proxy.com', self.conn.sock.data) + self.assertIn(expected, self.conn.sock.data) def test_tunnel_connect_single_send_connection_setup(self): """Regresstion test for https://bugs.python.org/issue43332.""" @@ -2253,12 +2363,19 @@ def test_tunnel_connect_single_send_connection_setup(self): msg=f'unexpected proxy data sent {proxy_setup_data_sent!r}') def test_connect_put_request(self): - self.conn.set_tunnel('destination.com') + d = { + b'host': b'destination.com', + b'port': client.HTTP_PORT, + } + self.conn.set_tunnel(d[b'host'].decode('ascii')) self.conn.request('PUT', '/', '') self.assertEqual(self.conn.sock.host, self.host) - self.assertEqual(self.conn.sock.port, client.HTTP_PORT) - self.assertIn(b'CONNECT destination.com', self.conn.sock.data) - self.assertIn(b'Host: destination.com', self.conn.sock.data) + self.assertEqual(self.conn.sock.port, self.port) + self.assertIn(b'CONNECT %(host)s:%(port)d HTTP/1.1\r\n' + b'Host: %(host)s:%(port)d\r\n\r\n' % d, + self.conn.sock.data) + self.assertIn(b'PUT / HTTP/1.1\r\nHost: %(host)s\r\n' % d, + self.conn.sock.data) def test_tunnel_debuglog(self): expected_header = 'X-Dummy: 1' diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py deleted file mode 100644 index 03e3adba221e57..00000000000000 --- a/Lib/test/test_imp.py +++ /dev/null @@ -1,1364 +0,0 @@ -import gc -import json -import importlib -import importlib.util -import os -import os.path -import py_compile -import sys -from test import support -from test.support import import_helper -from test.support import os_helper -from test.support import script_helper -from test.support import warnings_helper -import textwrap -import types -import unittest -import warnings -imp = warnings_helper.import_deprecated('imp') -import _imp -import _testinternalcapi -try: - import _xxsubinterpreters as _interpreters -except ModuleNotFoundError: - _interpreters = None - - -OS_PATH_NAME = os.path.__name__ - - -def requires_subinterpreters(meth): - """Decorator to skip a test if subinterpreters are not supported.""" - return unittest.skipIf(_interpreters is None, - 'subinterpreters required')(meth) - - -def requires_load_dynamic(meth): - """Decorator to skip a test if not running under CPython or lacking - imp.load_dynamic().""" - meth = support.cpython_only(meth) - return unittest.skipIf(getattr(imp, 'load_dynamic', None) is None, - 'imp.load_dynamic() required')(meth) - - -class ModuleSnapshot(types.SimpleNamespace): - """A representation of a module for testing. - - Fields: - - * id - the module's object ID - * module - the actual module or an adequate substitute - * __file__ - * __spec__ - * name - * origin - * ns - a copy (dict) of the module's __dict__ (or None) - * ns_id - the object ID of the module's __dict__ - * cached - the sys.modules[mod.__spec__.name] entry (or None) - * cached_id - the object ID of the sys.modules entry (or None) - - In cases where the value is not available (e.g. due to serialization), - the value will be None. - """ - _fields = tuple('id module ns ns_id cached cached_id'.split()) - - @classmethod - def from_module(cls, mod): - name = mod.__spec__.name - cached = sys.modules.get(name) - return cls( - id=id(mod), - module=mod, - ns=types.SimpleNamespace(**mod.__dict__), - ns_id=id(mod.__dict__), - cached=cached, - cached_id=id(cached), - ) - - SCRIPT = textwrap.dedent(''' - {imports} - - name = {name!r} - - {prescript} - - mod = {name} - - {body} - - {postscript} - ''') - IMPORTS = textwrap.dedent(''' - import sys - ''').strip() - SCRIPT_BODY = textwrap.dedent(''' - # Capture the snapshot data. - cached = sys.modules.get(name) - snapshot = dict( - id=id(mod), - module=dict( - __file__=mod.__file__, - __spec__=dict( - name=mod.__spec__.name, - origin=mod.__spec__.origin, - ), - ), - ns=None, - ns_id=id(mod.__dict__), - cached=None, - cached_id=id(cached) if cached else None, - ) - ''').strip() - CLEANUP_SCRIPT = textwrap.dedent(''' - # Clean up the module. - sys.modules.pop(name, None) - ''').strip() - - @classmethod - def build_script(cls, name, *, - prescript=None, - import_first=False, - postscript=None, - postcleanup=False, - ): - if postcleanup is True: - postcleanup = cls.CLEANUP_SCRIPT - elif isinstance(postcleanup, str): - postcleanup = textwrap.dedent(postcleanup).strip() - postcleanup = cls.CLEANUP_SCRIPT + os.linesep + postcleanup - else: - postcleanup = '' - prescript = textwrap.dedent(prescript).strip() if prescript else '' - postscript = textwrap.dedent(postscript).strip() if postscript else '' - - if postcleanup: - if postscript: - postscript = postscript + os.linesep * 2 + postcleanup - else: - postscript = postcleanup - - if import_first: - prescript += textwrap.dedent(f''' - - # Now import the module. - assert name not in sys.modules - import {name}''') - - return cls.SCRIPT.format( - imports=cls.IMPORTS.strip(), - name=name, - prescript=prescript.strip(), - body=cls.SCRIPT_BODY.strip(), - postscript=postscript, - ) - - @classmethod - def parse(cls, text): - raw = json.loads(text) - mod = raw['module'] - mod['__spec__'] = types.SimpleNamespace(**mod['__spec__']) - raw['module'] = types.SimpleNamespace(**mod) - return cls(**raw) - - @classmethod - def from_subinterp(cls, name, interpid=None, *, pipe=None, **script_kwds): - if pipe is not None: - return cls._from_subinterp(name, interpid, pipe, script_kwds) - pipe = os.pipe() - try: - return cls._from_subinterp(name, interpid, pipe, script_kwds) - finally: - r, w = pipe - os.close(r) - os.close(w) - - @classmethod - def _from_subinterp(cls, name, interpid, pipe, script_kwargs): - r, w = pipe - - # Build the script. - postscript = textwrap.dedent(f''' - # Send the result over the pipe. - import json - import os - os.write({w}, json.dumps(snapshot).encode()) - - ''') - _postscript = script_kwargs.get('postscript') - if _postscript: - _postscript = textwrap.dedent(_postscript).lstrip() - postscript += _postscript - script_kwargs['postscript'] = postscript.strip() - script = cls.build_script(name, **script_kwargs) - - # Run the script. - if interpid is None: - ret = support.run_in_subinterp(script) - if ret != 0: - raise AssertionError(f'{ret} != 0') - else: - _interpreters.run_string(interpid, script) - - # Parse the results. - text = os.read(r, 1000) - return cls.parse(text.decode()) - - -class LockTests(unittest.TestCase): - - """Very basic test of import lock functions.""" - - def verify_lock_state(self, expected): - self.assertEqual(imp.lock_held(), expected, - "expected imp.lock_held() to be %r" % expected) - def testLock(self): - LOOPS = 50 - - # The import lock may already be held, e.g. if the test suite is run - # via "import test.autotest". - lock_held_at_start = imp.lock_held() - self.verify_lock_state(lock_held_at_start) - - for i in range(LOOPS): - imp.acquire_lock() - self.verify_lock_state(True) - - for i in range(LOOPS): - imp.release_lock() - - # The original state should be restored now. - self.verify_lock_state(lock_held_at_start) - - if not lock_held_at_start: - try: - imp.release_lock() - except RuntimeError: - pass - else: - self.fail("release_lock() without lock should raise " - "RuntimeError") - -class ImportTests(unittest.TestCase): - def setUp(self): - mod = importlib.import_module('test.encoded_modules') - self.test_strings = mod.test_strings - self.test_path = mod.__path__ - - # test_import_encoded_module moved to test_source_encoding.py - - def test_find_module_encoding(self): - for mod, encoding, _ in self.test_strings: - with imp.find_module('module_' + mod, self.test_path)[0] as fd: - self.assertEqual(fd.encoding, encoding) - - path = [os.path.dirname(__file__)] - with self.assertRaises(SyntaxError): - imp.find_module('badsyntax_pep3120', path) - - def test_issue1267(self): - for mod, encoding, _ in self.test_strings: - fp, filename, info = imp.find_module('module_' + mod, - self.test_path) - with fp: - self.assertNotEqual(fp, None) - self.assertEqual(fp.encoding, encoding) - self.assertEqual(fp.tell(), 0) - self.assertEqual(fp.readline(), '# test %s encoding\n' - % encoding) - - fp, filename, info = imp.find_module("tokenize") - with fp: - self.assertNotEqual(fp, None) - self.assertEqual(fp.encoding, "utf-8") - self.assertEqual(fp.tell(), 0) - self.assertEqual(fp.readline(), - '"""Tokenization help for Python programs.\n') - - def test_issue3594(self): - temp_mod_name = 'test_imp_helper' - sys.path.insert(0, '.') - try: - with open(temp_mod_name + '.py', 'w', encoding="latin-1") as file: - file.write("# coding: cp1252\nu = 'test.test_imp'\n") - file, filename, info = imp.find_module(temp_mod_name) - file.close() - self.assertEqual(file.encoding, 'cp1252') - finally: - del sys.path[0] - os_helper.unlink(temp_mod_name + '.py') - os_helper.unlink(temp_mod_name + '.pyc') - - def test_issue5604(self): - # Test cannot cover imp.load_compiled function. - # Martin von Loewis note what shared library cannot have non-ascii - # character because init_xxx function cannot be compiled - # and issue never happens for dynamic modules. - # But sources modified to follow generic way for processing paths. - - # the return encoding could be uppercase or None - fs_encoding = sys.getfilesystemencoding() - - # covers utf-8 and Windows ANSI code pages - # one non-space symbol from every page - # (http://en.wikipedia.org/wiki/Code_page) - known_locales = { - 'utf-8' : b'\xc3\xa4', - 'cp1250' : b'\x8C', - 'cp1251' : b'\xc0', - 'cp1252' : b'\xc0', - 'cp1253' : b'\xc1', - 'cp1254' : b'\xc0', - 'cp1255' : b'\xe0', - 'cp1256' : b'\xe0', - 'cp1257' : b'\xc0', - 'cp1258' : b'\xc0', - } - - if sys.platform == 'darwin': - self.assertEqual(fs_encoding, 'utf-8') - # Mac OS X uses the Normal Form D decomposition - # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html - special_char = b'a\xcc\x88' - else: - special_char = known_locales.get(fs_encoding) - - if not special_char: - self.skipTest("can't run this test with %s as filesystem encoding" - % fs_encoding) - decoded_char = special_char.decode(fs_encoding) - temp_mod_name = 'test_imp_helper_' + decoded_char - test_package_name = 'test_imp_helper_package_' + decoded_char - init_file_name = os.path.join(test_package_name, '__init__.py') - try: - # if the curdir is not in sys.path the test fails when run with - # ./python ./Lib/test/regrtest.py test_imp - sys.path.insert(0, os.curdir) - with open(temp_mod_name + '.py', 'w', encoding="utf-8") as file: - file.write('a = 1\n') - file, filename, info = imp.find_module(temp_mod_name) - with file: - self.assertIsNotNone(file) - self.assertTrue(filename[:-3].endswith(temp_mod_name)) - self.assertEqual(info[0], '.py') - self.assertEqual(info[1], 'r') - self.assertEqual(info[2], imp.PY_SOURCE) - - mod = imp.load_module(temp_mod_name, file, filename, info) - self.assertEqual(mod.a, 1) - - with warnings.catch_warnings(): - warnings.simplefilter('ignore') - mod = imp.load_source(temp_mod_name, temp_mod_name + '.py') - self.assertEqual(mod.a, 1) - - with warnings.catch_warnings(): - warnings.simplefilter('ignore') - if not sys.dont_write_bytecode: - mod = imp.load_compiled( - temp_mod_name, - imp.cache_from_source(temp_mod_name + '.py')) - self.assertEqual(mod.a, 1) - - if not os.path.exists(test_package_name): - os.mkdir(test_package_name) - with open(init_file_name, 'w', encoding="utf-8") as file: - file.write('b = 2\n') - with warnings.catch_warnings(): - warnings.simplefilter('ignore') - package = imp.load_package(test_package_name, test_package_name) - self.assertEqual(package.b, 2) - finally: - del sys.path[0] - for ext in ('.py', '.pyc'): - os_helper.unlink(temp_mod_name + ext) - os_helper.unlink(init_file_name + ext) - os_helper.rmtree(test_package_name) - os_helper.rmtree('__pycache__') - - def test_issue9319(self): - path = os.path.dirname(__file__) - self.assertRaises(SyntaxError, - imp.find_module, "badsyntax_pep3120", [path]) - - def test_load_from_source(self): - # Verify that the imp module can correctly load and find .py files - # XXX (ncoghlan): It would be nice to use import_helper.CleanImport - # here, but that breaks because the os module registers some - # handlers in copy_reg on import. Since CleanImport doesn't - # revert that registration, the module is left in a broken - # state after reversion. Reinitialising the module contents - # and just reverting os.environ to its previous state is an OK - # workaround - with import_helper.CleanImport('os', 'os.path', OS_PATH_NAME): - import os - orig_path = os.path - orig_getenv = os.getenv - with os_helper.EnvironmentVarGuard(): - x = imp.find_module("os") - self.addCleanup(x[0].close) - new_os = imp.load_module("os", *x) - self.assertIs(os, new_os) - self.assertIs(orig_path, new_os.path) - self.assertIsNot(orig_getenv, new_os.getenv) - - @requires_load_dynamic - def test_issue15828_load_extensions(self): - # Issue 15828 picked up that the adapter between the old imp API - # and importlib couldn't handle C extensions - example = "_heapq" - x = imp.find_module(example) - file_ = x[0] - if file_ is not None: - self.addCleanup(file_.close) - mod = imp.load_module(example, *x) - self.assertEqual(mod.__name__, example) - - @requires_load_dynamic - def test_issue16421_multiple_modules_in_one_dll(self): - # Issue 16421: loading several modules from the same compiled file fails - m = '_testimportmultiple' - fileobj, pathname, description = imp.find_module(m) - fileobj.close() - mod0 = imp.load_dynamic(m, pathname) - mod1 = imp.load_dynamic('_testimportmultiple_foo', pathname) - mod2 = imp.load_dynamic('_testimportmultiple_bar', pathname) - self.assertEqual(mod0.__name__, m) - self.assertEqual(mod1.__name__, '_testimportmultiple_foo') - self.assertEqual(mod2.__name__, '_testimportmultiple_bar') - with self.assertRaises(ImportError): - imp.load_dynamic('nonexistent', pathname) - - @requires_load_dynamic - def test_load_dynamic_ImportError_path(self): - # Issue #1559549 added `name` and `path` attributes to ImportError - # in order to provide better detail. Issue #10854 implemented those - # attributes on import failures of extensions on Windows. - path = 'bogus file path' - name = 'extension' - with self.assertRaises(ImportError) as err: - imp.load_dynamic(name, path) - self.assertIn(path, err.exception.path) - self.assertEqual(name, err.exception.name) - - @requires_load_dynamic - def test_load_module_extension_file_is_None(self): - # When loading an extension module and the file is None, open one - # on the behalf of imp.load_dynamic(). - # Issue #15902 - name = '_testimportmultiple' - found = imp.find_module(name) - if found[0] is not None: - found[0].close() - if found[2][2] != imp.C_EXTENSION: - self.skipTest("found module doesn't appear to be a C extension") - imp.load_module(name, None, *found[1:]) - - @requires_load_dynamic - def test_issue24748_load_module_skips_sys_modules_check(self): - name = 'test.imp_dummy' - try: - del sys.modules[name] - except KeyError: - pass - try: - module = importlib.import_module(name) - spec = importlib.util.find_spec('_testmultiphase') - module = imp.load_dynamic(name, spec.origin) - self.assertEqual(module.__name__, name) - self.assertEqual(module.__spec__.name, name) - self.assertEqual(module.__spec__.origin, spec.origin) - self.assertRaises(AttributeError, getattr, module, 'dummy_name') - self.assertEqual(module.int_const, 1969) - self.assertIs(sys.modules[name], module) - finally: - try: - del sys.modules[name] - except KeyError: - pass - - @unittest.skipIf(sys.dont_write_bytecode, - "test meaningful only when writing bytecode") - def test_bug7732(self): - with os_helper.temp_cwd(): - source = os_helper.TESTFN + '.py' - os.mkdir(source) - self.assertRaisesRegex(ImportError, '^No module', - imp.find_module, os_helper.TESTFN, ["."]) - - def test_multiple_calls_to_get_data(self): - # Issue #18755: make sure multiple calls to get_data() can succeed. - loader = imp._LoadSourceCompatibility('imp', imp.__file__, - open(imp.__file__, encoding="utf-8")) - loader.get_data(imp.__file__) # File should be closed - loader.get_data(imp.__file__) # Will need to create a newly opened file - - def test_load_source(self): - # Create a temporary module since load_source(name) modifies - # sys.modules[name] attributes like __loader___ - modname = f"tmp{__name__}" - mod = type(sys.modules[__name__])(modname) - with support.swap_item(sys.modules, modname, mod): - with self.assertRaisesRegex(ValueError, 'embedded null'): - imp.load_source(modname, __file__ + "\0") - - @support.cpython_only - def test_issue31315(self): - # There shouldn't be an assertion failure in imp.create_dynamic(), - # when spec.name is not a string. - create_dynamic = support.get_attribute(imp, 'create_dynamic') - class BadSpec: - name = None - origin = 'foo' - with self.assertRaises(TypeError): - create_dynamic(BadSpec()) - - def test_issue_35321(self): - # Both _frozen_importlib and _frozen_importlib_external - # should have a spec origin of "frozen" and - # no need to clean up imports in this case. - - import _frozen_importlib_external - self.assertEqual(_frozen_importlib_external.__spec__.origin, "frozen") - - import _frozen_importlib - self.assertEqual(_frozen_importlib.__spec__.origin, "frozen") - - def test_source_hash(self): - self.assertEqual(_imp.source_hash(42, b'hi'), b'\xfb\xd9G\x05\xaf$\x9b~') - self.assertEqual(_imp.source_hash(43, b'hi'), b'\xd0/\x87C\xccC\xff\xe2') - - def test_pyc_invalidation_mode_from_cmdline(self): - cases = [ - ([], "default"), - (["--check-hash-based-pycs", "default"], "default"), - (["--check-hash-based-pycs", "always"], "always"), - (["--check-hash-based-pycs", "never"], "never"), - ] - for interp_args, expected in cases: - args = interp_args + [ - "-c", - "import _imp; print(_imp.check_hash_based_pycs)", - ] - res = script_helper.assert_python_ok(*args) - self.assertEqual(res.out.strip().decode('utf-8'), expected) - - def test_find_and_load_checked_pyc(self): - # issue 34056 - with os_helper.temp_cwd(): - with open('mymod.py', 'wb') as fp: - fp.write(b'x = 42\n') - py_compile.compile( - 'mymod.py', - doraise=True, - invalidation_mode=py_compile.PycInvalidationMode.CHECKED_HASH, - ) - file, path, description = imp.find_module('mymod', path=['.']) - mod = imp.load_module('mymod', file, path, description) - self.assertEqual(mod.x, 42) - - def test_issue98354(self): - # _imp.create_builtin should raise TypeError - # if 'name' attribute of 'spec' argument is not a 'str' instance - - create_builtin = support.get_attribute(_imp, "create_builtin") - - class FakeSpec: - def __init__(self, name): - self.name = self - spec = FakeSpec("time") - with self.assertRaises(TypeError): - create_builtin(spec) - - class FakeSpec2: - name = [1, 2, 3, 4] - spec = FakeSpec2() - with self.assertRaises(TypeError): - create_builtin(spec) - - import builtins - class UnicodeSubclass(str): - pass - class GoodSpec: - name = UnicodeSubclass("builtins") - spec = GoodSpec() - bltin = create_builtin(spec) - self.assertEqual(bltin, builtins) - - class UnicodeSubclassFakeSpec(str): - def __init__(self, name): - self.name = self - spec = UnicodeSubclassFakeSpec("builtins") - bltin = create_builtin(spec) - self.assertEqual(bltin, builtins) - - @support.cpython_only - def test_create_builtin_subinterp(self): - # gh-99578: create_builtin() behavior changes after the creation of the - # first sub-interpreter. Test both code paths, before and after the - # creation of a sub-interpreter. Previously, create_builtin() had - # a reference leak after the creation of the first sub-interpreter. - - import builtins - create_builtin = support.get_attribute(_imp, "create_builtin") - class Spec: - name = "builtins" - spec = Spec() - - def check_get_builtins(): - refcnt = sys.getrefcount(builtins) - mod = _imp.create_builtin(spec) - self.assertIs(mod, builtins) - self.assertEqual(sys.getrefcount(builtins), refcnt + 1) - # Check that a GC collection doesn't crash - gc.collect() - - check_get_builtins() - - ret = support.run_in_subinterp("import builtins") - self.assertEqual(ret, 0) - - check_get_builtins() - - -class TestSinglePhaseSnapshot(ModuleSnapshot): - - @classmethod - def from_module(cls, mod): - self = super().from_module(mod) - self.summed = mod.sum(1, 2) - self.lookedup = mod.look_up_self() - self.lookedup_id = id(self.lookedup) - self.state_initialized = mod.state_initialized() - if hasattr(mod, 'initialized_count'): - self.init_count = mod.initialized_count() - return self - - SCRIPT_BODY = ModuleSnapshot.SCRIPT_BODY + textwrap.dedent(f''' - snapshot['module'].update(dict( - int_const=mod.int_const, - str_const=mod.str_const, - _module_initialized=mod._module_initialized, - )) - snapshot.update(dict( - summed=mod.sum(1, 2), - lookedup_id=id(mod.look_up_self()), - state_initialized=mod.state_initialized(), - init_count=mod.initialized_count(), - has_spam=hasattr(mod, 'spam'), - spam=getattr(mod, 'spam', None), - )) - ''').rstrip() - - @classmethod - def parse(cls, text): - self = super().parse(text) - if not self.has_spam: - del self.spam - del self.has_spam - return self - - -@requires_load_dynamic -class SinglephaseInitTests(unittest.TestCase): - - NAME = '_testsinglephase' - - @classmethod - def setUpClass(cls): - if '-R' in sys.argv or '--huntrleaks' in sys.argv: - # https://github.com/python/cpython/issues/102251 - raise unittest.SkipTest('unresolved refleaks (see gh-102251)') - fileobj, filename, _ = imp.find_module(cls.NAME) - fileobj.close() - cls.FILE = filename - - # Start fresh. - cls.clean_up() - - def tearDown(self): - # Clean up the module. - self.clean_up() - - @classmethod - def clean_up(cls): - name = cls.NAME - filename = cls.FILE - if name in sys.modules: - if hasattr(sys.modules[name], '_clear_globals'): - assert sys.modules[name].__file__ == filename - sys.modules[name]._clear_globals() - del sys.modules[name] - # Clear all internally cached data for the extension. - _testinternalcapi.clear_extension(name, filename) - - ######################### - # helpers - - def add_module_cleanup(self, name): - def clean_up(): - # Clear all internally cached data for the extension. - _testinternalcapi.clear_extension(name, self.FILE) - self.addCleanup(clean_up) - - def load(self, name): - try: - already_loaded = self.already_loaded - except AttributeError: - already_loaded = self.already_loaded = {} - assert name not in already_loaded - mod = imp.load_dynamic(name, self.FILE) - self.assertNotIn(mod, already_loaded.values()) - already_loaded[name] = mod - return types.SimpleNamespace( - name=name, - module=mod, - snapshot=TestSinglePhaseSnapshot.from_module(mod), - ) - - def re_load(self, name, mod): - assert sys.modules[name] is mod - assert mod.__dict__ == mod.__dict__ - reloaded = imp.load_dynamic(name, self.FILE) - return types.SimpleNamespace( - name=name, - module=reloaded, - snapshot=TestSinglePhaseSnapshot.from_module(reloaded), - ) - - # subinterpreters - - def add_subinterpreter(self): - interpid = _interpreters.create(isolated=False) - _interpreters.run_string(interpid, textwrap.dedent(''' - import sys - import _testinternalcapi - ''')) - def clean_up(): - _interpreters.run_string(interpid, textwrap.dedent(f''' - name = {self.NAME!r} - if name in sys.modules: - sys.modules[name]._clear_globals() - _testinternalcapi.clear_extension(name, {self.FILE!r}) - ''')) - _interpreters.destroy(interpid) - self.addCleanup(clean_up) - return interpid - - def import_in_subinterp(self, interpid=None, *, - postscript=None, - postcleanup=False, - ): - name = self.NAME - - if postcleanup: - import_ = 'import _testinternalcapi' if interpid is None else '' - postcleanup = f''' - {import_} - mod._clear_globals() - _testinternalcapi.clear_extension(name, {self.FILE!r}) - ''' - - try: - pipe = self._pipe - except AttributeError: - r, w = pipe = self._pipe = os.pipe() - self.addCleanup(os.close, r) - self.addCleanup(os.close, w) - - snapshot = TestSinglePhaseSnapshot.from_subinterp( - name, - interpid, - pipe=pipe, - import_first=True, - postscript=postscript, - postcleanup=postcleanup, - ) - - return types.SimpleNamespace( - name=name, - module=None, - snapshot=snapshot, - ) - - # checks - - def check_common(self, loaded): - isolated = False - - mod = loaded.module - if not mod: - # It came from a subinterpreter. - isolated = True - mod = loaded.snapshot.module - # mod.__name__ might not match, but the spec will. - self.assertEqual(mod.__spec__.name, loaded.name) - self.assertEqual(mod.__file__, self.FILE) - self.assertEqual(mod.__spec__.origin, self.FILE) - if not isolated: - self.assertTrue(issubclass(mod.error, Exception)) - self.assertEqual(mod.int_const, 1969) - self.assertEqual(mod.str_const, 'something different') - self.assertIsInstance(mod._module_initialized, float) - self.assertGreater(mod._module_initialized, 0) - - snap = loaded.snapshot - self.assertEqual(snap.summed, 3) - if snap.state_initialized is not None: - self.assertIsInstance(snap.state_initialized, float) - self.assertGreater(snap.state_initialized, 0) - if isolated: - # The "looked up" module is interpreter-specific - # (interp->imports.modules_by_index was set for the module). - self.assertEqual(snap.lookedup_id, snap.id) - self.assertEqual(snap.cached_id, snap.id) - with self.assertRaises(AttributeError): - snap.spam - else: - self.assertIs(snap.lookedup, mod) - self.assertIs(snap.cached, mod) - - def check_direct(self, loaded): - # The module has its own PyModuleDef, with a matching name. - self.assertEqual(loaded.module.__name__, loaded.name) - self.assertIs(loaded.snapshot.lookedup, loaded.module) - - def check_indirect(self, loaded, orig): - # The module re-uses another's PyModuleDef, with a different name. - assert orig is not loaded.module - assert orig.__name__ != loaded.name - self.assertNotEqual(loaded.module.__name__, loaded.name) - self.assertIs(loaded.snapshot.lookedup, loaded.module) - - def check_basic(self, loaded, expected_init_count): - # m_size == -1 - # The module loads fresh the first time and copies m_copy after. - snap = loaded.snapshot - self.assertIsNot(snap.state_initialized, None) - self.assertIsInstance(snap.init_count, int) - self.assertGreater(snap.init_count, 0) - self.assertEqual(snap.init_count, expected_init_count) - - def check_with_reinit(self, loaded): - # m_size >= 0 - # The module loads fresh every time. - pass - - def check_fresh(self, loaded): - """ - The module had not been loaded before (at least since fully reset). - """ - snap = loaded.snapshot - # The module's init func was run. - # A copy of the module's __dict__ was stored in def->m_base.m_copy. - # The previous m_copy was deleted first. - # _PyRuntime.imports.extensions was set. - self.assertEqual(snap.init_count, 1) - # The global state was initialized. - # The module attrs were initialized from that state. - self.assertEqual(snap.module._module_initialized, - snap.state_initialized) - - def check_semi_fresh(self, loaded, base, prev): - """ - The module had been loaded before and then reset - (but the module global state wasn't). - """ - snap = loaded.snapshot - # The module's init func was run again. - # A copy of the module's __dict__ was stored in def->m_base.m_copy. - # The previous m_copy was deleted first. - # The module globals did not get reset. - self.assertNotEqual(snap.id, base.snapshot.id) - self.assertNotEqual(snap.id, prev.snapshot.id) - self.assertEqual(snap.init_count, prev.snapshot.init_count + 1) - # The global state was updated. - # The module attrs were initialized from that state. - self.assertEqual(snap.module._module_initialized, - snap.state_initialized) - self.assertNotEqual(snap.state_initialized, - base.snapshot.state_initialized) - self.assertNotEqual(snap.state_initialized, - prev.snapshot.state_initialized) - - def check_copied(self, loaded, base): - """ - The module had been loaded before and never reset. - """ - snap = loaded.snapshot - # The module's init func was not run again. - # The interpreter copied m_copy, as set by the other interpreter, - # with objects owned by the other interpreter. - # The module globals did not get reset. - self.assertNotEqual(snap.id, base.snapshot.id) - self.assertEqual(snap.init_count, base.snapshot.init_count) - # The global state was not updated since the init func did not run. - # The module attrs were not directly initialized from that state. - # The state and module attrs still match the previous loading. - self.assertEqual(snap.module._module_initialized, - snap.state_initialized) - self.assertEqual(snap.state_initialized, - base.snapshot.state_initialized) - - ######################### - # the tests - - def test_cleared_globals(self): - loaded = self.load(self.NAME) - _testsinglephase = loaded.module - init_before = _testsinglephase.state_initialized() - - _testsinglephase._clear_globals() - init_after = _testsinglephase.state_initialized() - init_count = _testsinglephase.initialized_count() - - self.assertGreater(init_before, 0) - self.assertEqual(init_after, 0) - self.assertEqual(init_count, -1) - - def test_variants(self): - # Exercise the most meaningful variants described in Python/import.c. - self.maxDiff = None - - # Check the "basic" module. - - name = self.NAME - expected_init_count = 1 - with self.subTest(name): - loaded = self.load(name) - - self.check_common(loaded) - self.check_direct(loaded) - self.check_basic(loaded, expected_init_count) - basic = loaded.module - - # Check its indirect variants. - - name = f'{self.NAME}_basic_wrapper' - self.add_module_cleanup(name) - expected_init_count += 1 - with self.subTest(name): - loaded = self.load(name) - - self.check_common(loaded) - self.check_indirect(loaded, basic) - self.check_basic(loaded, expected_init_count) - - # Currently PyState_AddModule() always replaces the cached module. - self.assertIs(basic.look_up_self(), loaded.module) - self.assertEqual(basic.initialized_count(), expected_init_count) - - # The cached module shouldn't change after this point. - basic_lookedup = loaded.module - - # Check its direct variant. - - name = f'{self.NAME}_basic_copy' - self.add_module_cleanup(name) - expected_init_count += 1 - with self.subTest(name): - loaded = self.load(name) - - self.check_common(loaded) - self.check_direct(loaded) - self.check_basic(loaded, expected_init_count) - - # This should change the cached module for _testsinglephase. - self.assertIs(basic.look_up_self(), basic_lookedup) - self.assertEqual(basic.initialized_count(), expected_init_count) - - # Check the non-basic variant that has no state. - - name = f'{self.NAME}_with_reinit' - self.add_module_cleanup(name) - with self.subTest(name): - loaded = self.load(name) - - self.check_common(loaded) - self.assertIs(loaded.snapshot.state_initialized, None) - self.check_direct(loaded) - self.check_with_reinit(loaded) - - # This should change the cached module for _testsinglephase. - self.assertIs(basic.look_up_self(), basic_lookedup) - self.assertEqual(basic.initialized_count(), expected_init_count) - - # Check the basic variant that has state. - - name = f'{self.NAME}_with_state' - self.add_module_cleanup(name) - with self.subTest(name): - loaded = self.load(name) - - self.check_common(loaded) - self.assertIsNot(loaded.snapshot.state_initialized, None) - self.check_direct(loaded) - self.check_with_reinit(loaded) - - # This should change the cached module for _testsinglephase. - self.assertIs(basic.look_up_self(), basic_lookedup) - self.assertEqual(basic.initialized_count(), expected_init_count) - - def test_basic_reloaded(self): - # m_copy is copied into the existing module object. - # Global state is not changed. - self.maxDiff = None - - for name in [ - self.NAME, # the "basic" module - f'{self.NAME}_basic_wrapper', # the indirect variant - f'{self.NAME}_basic_copy', # the direct variant - ]: - self.add_module_cleanup(name) - with self.subTest(name): - loaded = self.load(name) - reloaded = self.re_load(name, loaded.module) - - self.check_common(loaded) - self.check_common(reloaded) - - # Make sure the original __dict__ did not get replaced. - self.assertEqual(id(loaded.module.__dict__), - loaded.snapshot.ns_id) - self.assertEqual(loaded.snapshot.ns.__dict__, - loaded.module.__dict__) - - self.assertEqual(reloaded.module.__spec__.name, reloaded.name) - self.assertEqual(reloaded.module.__name__, - reloaded.snapshot.ns.__name__) - - self.assertIs(reloaded.module, loaded.module) - self.assertIs(reloaded.module.__dict__, loaded.module.__dict__) - # It only happens to be the same but that's good enough here. - # We really just want to verify that the re-loaded attrs - # didn't change. - self.assertIs(reloaded.snapshot.lookedup, - loaded.snapshot.lookedup) - self.assertEqual(reloaded.snapshot.state_initialized, - loaded.snapshot.state_initialized) - self.assertEqual(reloaded.snapshot.init_count, - loaded.snapshot.init_count) - - self.assertIs(reloaded.snapshot.cached, reloaded.module) - - def test_with_reinit_reloaded(self): - # The module's m_init func is run again. - self.maxDiff = None - - # Keep a reference around. - basic = self.load(self.NAME) - - for name in [ - f'{self.NAME}_with_reinit', # m_size == 0 - f'{self.NAME}_with_state', # m_size > 0 - ]: - self.add_module_cleanup(name) - with self.subTest(name): - loaded = self.load(name) - reloaded = self.re_load(name, loaded.module) - - self.check_common(loaded) - self.check_common(reloaded) - - # Make sure the original __dict__ did not get replaced. - self.assertEqual(id(loaded.module.__dict__), - loaded.snapshot.ns_id) - self.assertEqual(loaded.snapshot.ns.__dict__, - loaded.module.__dict__) - - self.assertEqual(reloaded.module.__spec__.name, reloaded.name) - self.assertEqual(reloaded.module.__name__, - reloaded.snapshot.ns.__name__) - - self.assertIsNot(reloaded.module, loaded.module) - self.assertNotEqual(reloaded.module.__dict__, - loaded.module.__dict__) - self.assertIs(reloaded.snapshot.lookedup, reloaded.module) - if loaded.snapshot.state_initialized is None: - self.assertIs(reloaded.snapshot.state_initialized, None) - else: - self.assertGreater(reloaded.snapshot.state_initialized, - loaded.snapshot.state_initialized) - - self.assertIs(reloaded.snapshot.cached, reloaded.module) - - # Currently, for every single-phrase init module loaded - # in multiple interpreters, those interpreters share a - # PyModuleDef for that object, which can be a problem. - # Also, we test with a single-phase module that has global state, - # which is shared by all interpreters. - - @requires_subinterpreters - def test_basic_multiple_interpreters_main_no_reset(self): - # without resetting; already loaded in main interpreter - - # At this point: - # * alive in 0 interpreters - # * module def may or may not be loaded already - # * module def not in _PyRuntime.imports.extensions - # * mod init func has not run yet (since reset, at least) - # * m_copy not set (hasn't been loaded yet or already cleared) - # * module's global state has not been initialized yet - # (or already cleared) - - main_loaded = self.load(self.NAME) - _testsinglephase = main_loaded.module - # Attrs set after loading are not in m_copy. - _testsinglephase.spam = 'spam, spam, spam, spam, eggs, and spam' - - self.check_common(main_loaded) - self.check_fresh(main_loaded) - - interpid1 = self.add_subinterpreter() - interpid2 = self.add_subinterpreter() - - # At this point: - # * alive in 1 interpreter (main) - # * module def in _PyRuntime.imports.extensions - # * mod init func ran for the first time (since reset, at least) - # * m_copy was copied from the main interpreter (was NULL) - # * module's global state was initialized - - # Use an interpreter that gets destroyed right away. - loaded = self.import_in_subinterp() - self.check_common(loaded) - self.check_copied(loaded, main_loaded) - - # At this point: - # * alive in 1 interpreter (main) - # * module def still in _PyRuntime.imports.extensions - # * mod init func ran again - # * m_copy is NULL (claered when the interpreter was destroyed) - # (was from main interpreter) - # * module's global state was updated, not reset - - # Use a subinterpreter that sticks around. - loaded = self.import_in_subinterp(interpid1) - self.check_common(loaded) - self.check_copied(loaded, main_loaded) - - # At this point: - # * alive in 2 interpreters (main, interp1) - # * module def still in _PyRuntime.imports.extensions - # * mod init func ran again - # * m_copy was copied from interp1 - # * module's global state was updated, not reset - - # Use a subinterpreter while the previous one is still alive. - loaded = self.import_in_subinterp(interpid2) - self.check_common(loaded) - self.check_copied(loaded, main_loaded) - - # At this point: - # * alive in 3 interpreters (main, interp1, interp2) - # * module def still in _PyRuntime.imports.extensions - # * mod init func ran again - # * m_copy was copied from interp2 (was from interp1) - # * module's global state was updated, not reset - - @requires_subinterpreters - def test_basic_multiple_interpreters_deleted_no_reset(self): - # without resetting; already loaded in a deleted interpreter - - # At this point: - # * alive in 0 interpreters - # * module def may or may not be loaded already - # * module def not in _PyRuntime.imports.extensions - # * mod init func has not run yet (since reset, at least) - # * m_copy not set (hasn't been loaded yet or already cleared) - # * module's global state has not been initialized yet - # (or already cleared) - - interpid1 = self.add_subinterpreter() - interpid2 = self.add_subinterpreter() - - # First, load in the main interpreter but then completely clear it. - loaded_main = self.load(self.NAME) - loaded_main.module._clear_globals() - _testinternalcapi.clear_extension(self.NAME, self.FILE) - - # At this point: - # * alive in 0 interpreters - # * module def loaded already - # * module def was in _PyRuntime.imports.extensions, but cleared - # * mod init func ran for the first time (since reset, at least) - # * m_copy was set, but cleared (was NULL) - # * module's global state was initialized but cleared - - # Start with an interpreter that gets destroyed right away. - base = self.import_in_subinterp(postscript=''' - # Attrs set after loading are not in m_copy. - mod.spam = 'spam, spam, mash, spam, eggs, and spam' - ''') - self.check_common(base) - self.check_fresh(base) - - # At this point: - # * alive in 0 interpreters - # * module def in _PyRuntime.imports.extensions - # * mod init func ran again - # * m_copy is NULL (claered when the interpreter was destroyed) - # * module's global state was initialized, not reset - - # Use a subinterpreter that sticks around. - loaded_interp1 = self.import_in_subinterp(interpid1) - self.check_common(loaded_interp1) - self.check_semi_fresh(loaded_interp1, loaded_main, base) - - # At this point: - # * alive in 1 interpreter (interp1) - # * module def still in _PyRuntime.imports.extensions - # * mod init func ran again - # * m_copy was copied from interp1 (was NULL) - # * module's global state was updated, not reset - - # Use a subinterpreter while the previous one is still alive. - loaded_interp2 = self.import_in_subinterp(interpid2) - self.check_common(loaded_interp2) - self.check_copied(loaded_interp2, loaded_interp1) - - # At this point: - # * alive in 2 interpreters (interp1, interp2) - # * module def still in _PyRuntime.imports.extensions - # * mod init func ran again - # * m_copy was copied from interp2 (was from interp1) - # * module's global state was updated, not reset - - @requires_subinterpreters - @requires_load_dynamic - def test_basic_multiple_interpreters_reset_each(self): - # resetting between each interpreter - - # At this point: - # * alive in 0 interpreters - # * module def may or may not be loaded already - # * module def not in _PyRuntime.imports.extensions - # * mod init func has not run yet (since reset, at least) - # * m_copy not set (hasn't been loaded yet or already cleared) - # * module's global state has not been initialized yet - # (or already cleared) - - interpid1 = self.add_subinterpreter() - interpid2 = self.add_subinterpreter() - - # Use an interpreter that gets destroyed right away. - loaded = self.import_in_subinterp( - postscript=''' - # Attrs set after loading are not in m_copy. - mod.spam = 'spam, spam, mash, spam, eggs, and spam' - ''', - postcleanup=True, - ) - self.check_common(loaded) - self.check_fresh(loaded) - - # At this point: - # * alive in 0 interpreters - # * module def in _PyRuntime.imports.extensions - # * mod init func ran for the first time (since reset, at least) - # * m_copy is NULL (claered when the interpreter was destroyed) - # * module's global state was initialized, not reset - - # Use a subinterpreter that sticks around. - loaded = self.import_in_subinterp(interpid1, postcleanup=True) - self.check_common(loaded) - self.check_fresh(loaded) - - # At this point: - # * alive in 1 interpreter (interp1) - # * module def still in _PyRuntime.imports.extensions - # * mod init func ran again - # * m_copy was copied from interp1 (was NULL) - # * module's global state was initialized, not reset - - # Use a subinterpreter while the previous one is still alive. - loaded = self.import_in_subinterp(interpid2, postcleanup=True) - self.check_common(loaded) - self.check_fresh(loaded) - - # At this point: - # * alive in 2 interpreters (interp2, interp2) - # * module def still in _PyRuntime.imports.extensions - # * mod init func ran again - # * m_copy was copied from interp2 (was from interp1) - # * module's global state was initialized, not reset - - -class ReloadTests(unittest.TestCase): - - """Very basic tests to make sure that imp.reload() operates just like - reload().""" - - def test_source(self): - # XXX (ncoghlan): It would be nice to use test.import_helper.CleanImport - # here, but that breaks because the os module registers some - # handlers in copy_reg on import. Since CleanImport doesn't - # revert that registration, the module is left in a broken - # state after reversion. Reinitialising the module contents - # and just reverting os.environ to its previous state is an OK - # workaround - with os_helper.EnvironmentVarGuard(): - import os - imp.reload(os) - - def test_extension(self): - with import_helper.CleanImport('time'): - import time - imp.reload(time) - - def test_builtin(self): - with import_helper.CleanImport('marshal'): - import marshal - imp.reload(marshal) - - def test_with_deleted_parent(self): - # see #18681 - from html import parser - html = sys.modules.pop('html') - def cleanup(): - sys.modules['html'] = html - self.addCleanup(cleanup) - with self.assertRaisesRegex(ImportError, 'html'): - imp.reload(parser) - - -class PEP3147Tests(unittest.TestCase): - """Tests of PEP 3147.""" - - tag = imp.get_tag() - - @unittest.skipUnless(sys.implementation.cache_tag is not None, - 'requires sys.implementation.cache_tag not be None') - def test_cache_from_source(self): - # Given the path to a .py file, return the path to its PEP 3147 - # defined .pyc file (i.e. under __pycache__). - path = os.path.join('foo', 'bar', 'baz', 'qux.py') - expect = os.path.join('foo', 'bar', 'baz', '__pycache__', - 'qux.{}.pyc'.format(self.tag)) - self.assertEqual(imp.cache_from_source(path, True), expect) - - @unittest.skipUnless(sys.implementation.cache_tag is not None, - 'requires sys.implementation.cache_tag to not be ' - 'None') - def test_source_from_cache(self): - # Given the path to a PEP 3147 defined .pyc file, return the path to - # its source. This tests the good path. - path = os.path.join('foo', 'bar', 'baz', '__pycache__', - 'qux.{}.pyc'.format(self.tag)) - expect = os.path.join('foo', 'bar', 'baz', 'qux.py') - self.assertEqual(imp.source_from_cache(path), expect) - - -class NullImporterTests(unittest.TestCase): - @unittest.skipIf(os_helper.TESTFN_UNENCODABLE is None, - "Need an undecodeable filename") - def test_unencodeable(self): - name = os_helper.TESTFN_UNENCODABLE - os.mkdir(name) - try: - self.assertRaises(ImportError, imp.NullImporter, name) - finally: - os.rmdir(name) - - -if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py index 96815b3f758a5b..41dfdaabe24664 100644 --- a/Lib/test/test_import/__init__.py +++ b/Lib/test/test_import/__init__.py @@ -2,8 +2,12 @@ import contextlib import errno import glob +import json import importlib.util from importlib._bootstrap_external import _get_sourcefile +from importlib.machinery import ( + BuiltinImporter, ExtensionFileLoader, FrozenImporter, SourceFileLoader, +) import marshal import os import py_compile @@ -15,13 +19,15 @@ import textwrap import threading import time +import types import unittest from unittest import mock +import _testinternalcapi from test.support import os_helper from test.support import ( STDLIB_DIR, swap_attr, swap_item, cpython_only, is_emscripten, - is_wasi, run_in_subinterp_with_config) + is_wasi, run_in_subinterp, run_in_subinterp_with_config) from test.support.import_helper import ( forget, make_legacy_pyc, unlink, unload, DirsOnSysPath, CleanImport) from test.support.os_helper import ( @@ -38,12 +44,59 @@ import _testmultiphase except ImportError: _testmultiphase = None +try: + import _xxsubinterpreters as _interpreters +except ModuleNotFoundError: + _interpreters = None skip_if_dont_write_bytecode = unittest.skipIf( sys.dont_write_bytecode, "test meaningful only when writing bytecode") + +def _require_loader(module, loader, skip): + if isinstance(module, str): + module = __import__(module) + + MODULE_KINDS = { + BuiltinImporter: 'built-in', + ExtensionFileLoader: 'extension', + FrozenImporter: 'frozen', + SourceFileLoader: 'pure Python', + } + + expected = loader + assert isinstance(expected, type), expected + expected = MODULE_KINDS[expected] + + actual = module.__spec__.loader + if not isinstance(actual, type): + actual = type(actual) + actual = MODULE_KINDS[actual] + + if actual != expected: + err = f'expected module to be {expected}, got {module.__spec__}' + if skip: + raise unittest.SkipTest(err) + raise Exception(err) + return module + +def require_builtin(module, *, skip=False): + module = _require_loader(module, BuiltinImporter, skip) + assert module.__spec__.origin == 'built-in', module.__spec__ + +def require_extension(module, *, skip=False): + _require_loader(module, ExtensionFileLoader, skip) + +def require_frozen(module, *, skip=True): + module = _require_loader(module, FrozenImporter, skip) + assert module.__spec__.origin == 'frozen', module.__spec__ + +def require_pure_python(module, *, skip=False): + _require_loader(module, SourceFileLoader, skip) + + def remove_files(name): for f in (name + ".py", name + ".pyc", @@ -74,6 +127,182 @@ def _ready_to_import(name=None, source=""): del sys.modules[name] +def requires_subinterpreters(meth): + """Decorator to skip a test if subinterpreters are not supported.""" + return unittest.skipIf(_interpreters is None, + 'subinterpreters required')(meth) + + +def requires_singlephase_init(meth): + """Decorator to skip if single-phase init modules are not supported.""" + meth = cpython_only(meth) + return unittest.skipIf(_testsinglephase is None, + 'test requires _testsinglephase module')(meth) + + +class ModuleSnapshot(types.SimpleNamespace): + """A representation of a module for testing. + + Fields: + + * id - the module's object ID + * module - the actual module or an adequate substitute + * __file__ + * __spec__ + * name + * origin + * ns - a copy (dict) of the module's __dict__ (or None) + * ns_id - the object ID of the module's __dict__ + * cached - the sys.modules[mod.__spec__.name] entry (or None) + * cached_id - the object ID of the sys.modules entry (or None) + + In cases where the value is not available (e.g. due to serialization), + the value will be None. + """ + _fields = tuple('id module ns ns_id cached cached_id'.split()) + + @classmethod + def from_module(cls, mod): + name = mod.__spec__.name + cached = sys.modules.get(name) + return cls( + id=id(mod), + module=mod, + ns=types.SimpleNamespace(**mod.__dict__), + ns_id=id(mod.__dict__), + cached=cached, + cached_id=id(cached), + ) + + SCRIPT = textwrap.dedent(''' + {imports} + + name = {name!r} + + {prescript} + + mod = {name} + + {body} + + {postscript} + ''') + IMPORTS = textwrap.dedent(''' + import sys + ''').strip() + SCRIPT_BODY = textwrap.dedent(''' + # Capture the snapshot data. + cached = sys.modules.get(name) + snapshot = dict( + id=id(mod), + module=dict( + __file__=mod.__file__, + __spec__=dict( + name=mod.__spec__.name, + origin=mod.__spec__.origin, + ), + ), + ns=None, + ns_id=id(mod.__dict__), + cached=None, + cached_id=id(cached) if cached else None, + ) + ''').strip() + CLEANUP_SCRIPT = textwrap.dedent(''' + # Clean up the module. + sys.modules.pop(name, None) + ''').strip() + + @classmethod + def build_script(cls, name, *, + prescript=None, + import_first=False, + postscript=None, + postcleanup=False, + ): + if postcleanup is True: + postcleanup = cls.CLEANUP_SCRIPT + elif isinstance(postcleanup, str): + postcleanup = textwrap.dedent(postcleanup).strip() + postcleanup = cls.CLEANUP_SCRIPT + os.linesep + postcleanup + else: + postcleanup = '' + prescript = textwrap.dedent(prescript).strip() if prescript else '' + postscript = textwrap.dedent(postscript).strip() if postscript else '' + + if postcleanup: + if postscript: + postscript = postscript + os.linesep * 2 + postcleanup + else: + postscript = postcleanup + + if import_first: + prescript += textwrap.dedent(f''' + + # Now import the module. + assert name not in sys.modules + import {name}''') + + return cls.SCRIPT.format( + imports=cls.IMPORTS.strip(), + name=name, + prescript=prescript.strip(), + body=cls.SCRIPT_BODY.strip(), + postscript=postscript, + ) + + @classmethod + def parse(cls, text): + raw = json.loads(text) + mod = raw['module'] + mod['__spec__'] = types.SimpleNamespace(**mod['__spec__']) + raw['module'] = types.SimpleNamespace(**mod) + return cls(**raw) + + @classmethod + def from_subinterp(cls, name, interpid=None, *, pipe=None, **script_kwds): + if pipe is not None: + return cls._from_subinterp(name, interpid, pipe, script_kwds) + pipe = os.pipe() + try: + return cls._from_subinterp(name, interpid, pipe, script_kwds) + finally: + r, w = pipe + os.close(r) + os.close(w) + + @classmethod + def _from_subinterp(cls, name, interpid, pipe, script_kwargs): + r, w = pipe + + # Build the script. + postscript = textwrap.dedent(f''' + # Send the result over the pipe. + import json + import os + os.write({w}, json.dumps(snapshot).encode()) + + ''') + _postscript = script_kwargs.get('postscript') + if _postscript: + _postscript = textwrap.dedent(_postscript).lstrip() + postscript += _postscript + script_kwargs['postscript'] = postscript.strip() + script = cls.build_script(name, **script_kwargs) + + # Run the script. + if interpid is None: + ret = run_in_subinterp(script) + if ret != 0: + raise AssertionError(f'{ret} != 0') + else: + _interpreters.run_string(interpid, script) + + # Parse the results. + text = os.read(r, 1000) + return cls.parse(text.decode()) + + class ImportTests(unittest.TestCase): def setUp(self): @@ -1407,7 +1636,12 @@ class SubinterpImportTests(unittest.TestCase): allow_exec=False, allow_threads=True, allow_daemon_threads=False, + # Isolation-related config values aren't included here. + ) + ISOLATED = dict( + use_main_obmalloc=False, ) + NOT_ISOLATED = {k: not v for k, v in ISOLATED.items()} @unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()") def pipe(self): @@ -1437,10 +1671,11 @@ def import_script(self, name, fd, check_override=None): os.write({fd}, text.encode('utf-8')) ''') - def run_shared(self, name, *, - check_singlephase_setting=False, - check_singlephase_override=None, - ): + def run_here(self, name, *, + check_singlephase_setting=False, + check_singlephase_override=None, + isolated=False, + ): """ Try importing the named module in a subinterpreter. @@ -1460,6 +1695,7 @@ def run_shared(self, name, *, kwargs = dict( **self.RUN_KWARGS, + **(self.ISOLATED if isolated else self.NOT_ISOLATED), check_multi_interp_extensions=check_singlephase_setting, ) @@ -1470,27 +1706,38 @@ def run_shared(self, name, *, self.assertEqual(ret, 0) return os.read(r, 100) - def check_compatible_shared(self, name, *, strict=False): + def check_compatible_here(self, name, *, strict=False, isolated=False): # Verify that the named module may be imported in a subinterpreter. - # (See run_shared() for more info.) - out = self.run_shared(name, check_singlephase_setting=strict) + # (See run_here() for more info.) + out = self.run_here(name, + check_singlephase_setting=strict, + isolated=isolated, + ) self.assertEqual(out, b'okay') - def check_incompatible_shared(self, name): - # Differences from check_compatible_shared(): + def check_incompatible_here(self, name, *, isolated=False): + # Differences from check_compatible_here(): # * verify that import fails # * "strict" is always True - out = self.run_shared(name, check_singlephase_setting=True) + out = self.run_here(name, + check_singlephase_setting=True, + isolated=isolated, + ) self.assertEqual( out.decode('utf-8'), f'ImportError: module {name} does not support loading in subinterpreters', ) - def check_compatible_isolated(self, name, *, strict=False): - # Differences from check_compatible_shared(): + def check_compatible_fresh(self, name, *, strict=False, isolated=False): + # Differences from check_compatible_here(): # * subinterpreter in a new process # * module has never been imported before in that process # * this tests importing the module for the first time + kwargs = dict( + **self.RUN_KWARGS, + **(self.ISOLATED if isolated else self.NOT_ISOLATED), + check_multi_interp_extensions=strict, + ) _, out, err = script_helper.assert_python_ok('-c', textwrap.dedent(f''' import _testcapi, sys assert ( @@ -1499,25 +1746,28 @@ def check_compatible_isolated(self, name, *, strict=False): ), repr({name!r}) ret = _testcapi.run_in_subinterp_with_config( {self.import_script(name, "sys.stdout.fileno()")!r}, - **{self.RUN_KWARGS}, - check_multi_interp_extensions={strict}, + **{kwargs}, ) assert ret == 0, ret ''')) self.assertEqual(err, b'') self.assertEqual(out, b'okay') - def check_incompatible_isolated(self, name): - # Differences from check_compatible_isolated(): + def check_incompatible_fresh(self, name, *, isolated=False): + # Differences from check_compatible_fresh(): # * verify that import fails # * "strict" is always True + kwargs = dict( + **self.RUN_KWARGS, + **(self.ISOLATED if isolated else self.NOT_ISOLATED), + check_multi_interp_extensions=True, + ) _, out, err = script_helper.assert_python_ok('-c', textwrap.dedent(f''' import _testcapi, sys assert {name!r} not in sys.modules, {name!r} ret = _testcapi.run_in_subinterp_with_config( {self.import_script(name, "sys.stdout.fileno()")!r}, - **{self.RUN_KWARGS}, - check_multi_interp_extensions=True, + **{kwargs}, ) assert ret == 0, ret ''')) @@ -1528,59 +1778,65 @@ def check_incompatible_isolated(self, name): ) def test_builtin_compat(self): - module = 'sys' + # For now we avoid using sys or builtins + # since they still don't implement multi-phase init. + module = '_imp' + require_builtin(module) with self.subTest(f'{module}: not strict'): - self.check_compatible_shared(module, strict=False) - with self.subTest(f'{module}: strict, shared'): - self.check_compatible_shared(module, strict=True) + self.check_compatible_here(module, strict=False) + with self.subTest(f'{module}: strict, not fresh'): + self.check_compatible_here(module, strict=True) @cpython_only def test_frozen_compat(self): module = '_frozen_importlib' + require_frozen(module, skip=True) if __import__(module).__spec__.origin != 'frozen': raise unittest.SkipTest(f'{module} is unexpectedly not frozen') with self.subTest(f'{module}: not strict'): - self.check_compatible_shared(module, strict=False) - with self.subTest(f'{module}: strict, shared'): - self.check_compatible_shared(module, strict=True) + self.check_compatible_here(module, strict=False) + with self.subTest(f'{module}: strict, not fresh'): + self.check_compatible_here(module, strict=True) - @unittest.skipIf(_testsinglephase is None, "test requires _testsinglephase module") + @requires_singlephase_init def test_single_init_extension_compat(self): module = '_testsinglephase' + require_extension(module) with self.subTest(f'{module}: not strict'): - self.check_compatible_shared(module, strict=False) - with self.subTest(f'{module}: strict, shared'): - self.check_incompatible_shared(module) - with self.subTest(f'{module}: strict, isolated'): - self.check_incompatible_isolated(module) + self.check_compatible_here(module, strict=False) + with self.subTest(f'{module}: strict, not fresh'): + self.check_incompatible_here(module) + with self.subTest(f'{module}: strict, fresh'): + self.check_incompatible_fresh(module) @unittest.skipIf(_testmultiphase is None, "test requires _testmultiphase module") def test_multi_init_extension_compat(self): module = '_testmultiphase' + require_extension(module) with self.subTest(f'{module}: not strict'): - self.check_compatible_shared(module, strict=False) - with self.subTest(f'{module}: strict, shared'): - self.check_compatible_shared(module, strict=True) - with self.subTest(f'{module}: strict, isolated'): - self.check_compatible_isolated(module, strict=True) + self.check_compatible_here(module, strict=False) + with self.subTest(f'{module}: strict, not fresh'): + self.check_compatible_here(module, strict=True) + with self.subTest(f'{module}: strict, fresh'): + self.check_compatible_fresh(module, strict=True) def test_python_compat(self): module = 'threading' - if __import__(module).__spec__.origin == 'frozen': - raise unittest.SkipTest(f'{module} is unexpectedly frozen') + require_pure_python(module) with self.subTest(f'{module}: not strict'): - self.check_compatible_shared(module, strict=False) - with self.subTest(f'{module}: strict, shared'): - self.check_compatible_shared(module, strict=True) - with self.subTest(f'{module}: strict, isolated'): - self.check_compatible_isolated(module, strict=True) + self.check_compatible_here(module, strict=False) + with self.subTest(f'{module}: strict, not fresh'): + self.check_compatible_here(module, strict=True) + with self.subTest(f'{module}: strict, fresh'): + self.check_compatible_fresh(module, strict=True) - @unittest.skipIf(_testsinglephase is None, "test requires _testsinglephase module") + @requires_singlephase_init def test_singlephase_check_with_setting_and_override(self): module = '_testsinglephase' + require_extension(module) def check_compatible(setting, override): - out = self.run_shared( + out = self.run_here( module, check_singlephase_setting=setting, check_singlephase_override=override, @@ -1588,7 +1844,7 @@ def check_compatible(setting, override): self.assertEqual(out, b'okay') def check_incompatible(setting, override): - out = self.run_shared( + out = self.run_here( module, check_singlephase_setting=setting, check_singlephase_override=override, @@ -1609,6 +1865,693 @@ def check_incompatible(setting, override): with self.subTest('config: check disabled; override: disabled'): check_compatible(False, -1) + def test_isolated_config(self): + module = 'threading' + require_pure_python(module) + with self.subTest(f'{module}: strict, not fresh'): + self.check_compatible_here(module, strict=True, isolated=True) + with self.subTest(f'{module}: strict, fresh'): + self.check_compatible_fresh(module, strict=True, isolated=True) + + +class TestSinglePhaseSnapshot(ModuleSnapshot): + + @classmethod + def from_module(cls, mod): + self = super().from_module(mod) + self.summed = mod.sum(1, 2) + self.lookedup = mod.look_up_self() + self.lookedup_id = id(self.lookedup) + self.state_initialized = mod.state_initialized() + if hasattr(mod, 'initialized_count'): + self.init_count = mod.initialized_count() + return self + + SCRIPT_BODY = ModuleSnapshot.SCRIPT_BODY + textwrap.dedent(''' + snapshot['module'].update(dict( + int_const=mod.int_const, + str_const=mod.str_const, + _module_initialized=mod._module_initialized, + )) + snapshot.update(dict( + summed=mod.sum(1, 2), + lookedup_id=id(mod.look_up_self()), + state_initialized=mod.state_initialized(), + init_count=mod.initialized_count(), + has_spam=hasattr(mod, 'spam'), + spam=getattr(mod, 'spam', None), + )) + ''').rstrip() + + @classmethod + def parse(cls, text): + self = super().parse(text) + if not self.has_spam: + del self.spam + del self.has_spam + return self + + +@requires_singlephase_init +class SinglephaseInitTests(unittest.TestCase): + + NAME = '_testsinglephase' + + @classmethod + def setUpClass(cls): + if '-R' in sys.argv or '--huntrleaks' in sys.argv: + # https://github.com/python/cpython/issues/102251 + raise unittest.SkipTest('unresolved refleaks (see gh-102251)') + + spec = importlib.util.find_spec(cls.NAME) + from importlib.machinery import ExtensionFileLoader + cls.FILE = spec.origin + cls.LOADER = type(spec.loader) + assert cls.LOADER is ExtensionFileLoader + + # Start fresh. + cls.clean_up() + + def tearDown(self): + # Clean up the module. + self.clean_up() + + @classmethod + def clean_up(cls): + name = cls.NAME + filename = cls.FILE + if name in sys.modules: + if hasattr(sys.modules[name], '_clear_globals'): + assert sys.modules[name].__file__ == filename + sys.modules[name]._clear_globals() + del sys.modules[name] + # Clear all internally cached data for the extension. + _testinternalcapi.clear_extension(name, filename) + + ######################### + # helpers + + def add_module_cleanup(self, name): + def clean_up(): + # Clear all internally cached data for the extension. + _testinternalcapi.clear_extension(name, self.FILE) + self.addCleanup(clean_up) + + def _load_dynamic(self, name, path): + """ + Load an extension module. + """ + # This is essentially copied from the old imp module. + from importlib._bootstrap import _load + loader = self.LOADER(name, path) + + # Issue bpo-24748: Skip the sys.modules check in _load_module_shim; + # always load new extension. + spec = importlib.util.spec_from_file_location(name, path, + loader=loader) + return _load(spec) + + def load(self, name): + try: + already_loaded = self.already_loaded + except AttributeError: + already_loaded = self.already_loaded = {} + assert name not in already_loaded + mod = self._load_dynamic(name, self.FILE) + self.assertNotIn(mod, already_loaded.values()) + already_loaded[name] = mod + return types.SimpleNamespace( + name=name, + module=mod, + snapshot=TestSinglePhaseSnapshot.from_module(mod), + ) + + def re_load(self, name, mod): + assert sys.modules[name] is mod + assert mod.__dict__ == mod.__dict__ + reloaded = self._load_dynamic(name, self.FILE) + return types.SimpleNamespace( + name=name, + module=reloaded, + snapshot=TestSinglePhaseSnapshot.from_module(reloaded), + ) + + # subinterpreters + + def add_subinterpreter(self): + interpid = _interpreters.create(isolated=False) + _interpreters.run_string(interpid, textwrap.dedent(''' + import sys + import _testinternalcapi + ''')) + def clean_up(): + _interpreters.run_string(interpid, textwrap.dedent(f''' + name = {self.NAME!r} + if name in sys.modules: + sys.modules[name]._clear_globals() + _testinternalcapi.clear_extension(name, {self.FILE!r}) + ''')) + _interpreters.destroy(interpid) + self.addCleanup(clean_up) + return interpid + + def import_in_subinterp(self, interpid=None, *, + postscript=None, + postcleanup=False, + ): + name = self.NAME + + if postcleanup: + import_ = 'import _testinternalcapi' if interpid is None else '' + postcleanup = f''' + {import_} + mod._clear_globals() + _testinternalcapi.clear_extension(name, {self.FILE!r}) + ''' + + try: + pipe = self._pipe + except AttributeError: + r, w = pipe = self._pipe = os.pipe() + self.addCleanup(os.close, r) + self.addCleanup(os.close, w) + + snapshot = TestSinglePhaseSnapshot.from_subinterp( + name, + interpid, + pipe=pipe, + import_first=True, + postscript=postscript, + postcleanup=postcleanup, + ) + + return types.SimpleNamespace( + name=name, + module=None, + snapshot=snapshot, + ) + + # checks + + def check_common(self, loaded): + isolated = False + + mod = loaded.module + if not mod: + # It came from a subinterpreter. + isolated = True + mod = loaded.snapshot.module + # mod.__name__ might not match, but the spec will. + self.assertEqual(mod.__spec__.name, loaded.name) + self.assertEqual(mod.__file__, self.FILE) + self.assertEqual(mod.__spec__.origin, self.FILE) + if not isolated: + self.assertTrue(issubclass(mod.error, Exception)) + self.assertEqual(mod.int_const, 1969) + self.assertEqual(mod.str_const, 'something different') + self.assertIsInstance(mod._module_initialized, float) + self.assertGreater(mod._module_initialized, 0) + + snap = loaded.snapshot + self.assertEqual(snap.summed, 3) + if snap.state_initialized is not None: + self.assertIsInstance(snap.state_initialized, float) + self.assertGreater(snap.state_initialized, 0) + if isolated: + # The "looked up" module is interpreter-specific + # (interp->imports.modules_by_index was set for the module). + self.assertEqual(snap.lookedup_id, snap.id) + self.assertEqual(snap.cached_id, snap.id) + with self.assertRaises(AttributeError): + snap.spam + else: + self.assertIs(snap.lookedup, mod) + self.assertIs(snap.cached, mod) + + def check_direct(self, loaded): + # The module has its own PyModuleDef, with a matching name. + self.assertEqual(loaded.module.__name__, loaded.name) + self.assertIs(loaded.snapshot.lookedup, loaded.module) + + def check_indirect(self, loaded, orig): + # The module re-uses another's PyModuleDef, with a different name. + assert orig is not loaded.module + assert orig.__name__ != loaded.name + self.assertNotEqual(loaded.module.__name__, loaded.name) + self.assertIs(loaded.snapshot.lookedup, loaded.module) + + def check_basic(self, loaded, expected_init_count): + # m_size == -1 + # The module loads fresh the first time and copies m_copy after. + snap = loaded.snapshot + self.assertIsNot(snap.state_initialized, None) + self.assertIsInstance(snap.init_count, int) + self.assertGreater(snap.init_count, 0) + self.assertEqual(snap.init_count, expected_init_count) + + def check_with_reinit(self, loaded): + # m_size >= 0 + # The module loads fresh every time. + pass + + def check_fresh(self, loaded): + """ + The module had not been loaded before (at least since fully reset). + """ + snap = loaded.snapshot + # The module's init func was run. + # A copy of the module's __dict__ was stored in def->m_base.m_copy. + # The previous m_copy was deleted first. + # _PyRuntime.imports.extensions was set. + self.assertEqual(snap.init_count, 1) + # The global state was initialized. + # The module attrs were initialized from that state. + self.assertEqual(snap.module._module_initialized, + snap.state_initialized) + + def check_semi_fresh(self, loaded, base, prev): + """ + The module had been loaded before and then reset + (but the module global state wasn't). + """ + snap = loaded.snapshot + # The module's init func was run again. + # A copy of the module's __dict__ was stored in def->m_base.m_copy. + # The previous m_copy was deleted first. + # The module globals did not get reset. + self.assertNotEqual(snap.id, base.snapshot.id) + self.assertNotEqual(snap.id, prev.snapshot.id) + self.assertEqual(snap.init_count, prev.snapshot.init_count + 1) + # The global state was updated. + # The module attrs were initialized from that state. + self.assertEqual(snap.module._module_initialized, + snap.state_initialized) + self.assertNotEqual(snap.state_initialized, + base.snapshot.state_initialized) + self.assertNotEqual(snap.state_initialized, + prev.snapshot.state_initialized) + + def check_copied(self, loaded, base): + """ + The module had been loaded before and never reset. + """ + snap = loaded.snapshot + # The module's init func was not run again. + # The interpreter copied m_copy, as set by the other interpreter, + # with objects owned by the other interpreter. + # The module globals did not get reset. + self.assertNotEqual(snap.id, base.snapshot.id) + self.assertEqual(snap.init_count, base.snapshot.init_count) + # The global state was not updated since the init func did not run. + # The module attrs were not directly initialized from that state. + # The state and module attrs still match the previous loading. + self.assertEqual(snap.module._module_initialized, + snap.state_initialized) + self.assertEqual(snap.state_initialized, + base.snapshot.state_initialized) + + ######################### + # the tests + + def test_cleared_globals(self): + loaded = self.load(self.NAME) + _testsinglephase = loaded.module + init_before = _testsinglephase.state_initialized() + + _testsinglephase._clear_globals() + init_after = _testsinglephase.state_initialized() + init_count = _testsinglephase.initialized_count() + + self.assertGreater(init_before, 0) + self.assertEqual(init_after, 0) + self.assertEqual(init_count, -1) + + def test_variants(self): + # Exercise the most meaningful variants described in Python/import.c. + self.maxDiff = None + + # Check the "basic" module. + + name = self.NAME + expected_init_count = 1 + with self.subTest(name): + loaded = self.load(name) + + self.check_common(loaded) + self.check_direct(loaded) + self.check_basic(loaded, expected_init_count) + basic = loaded.module + + # Check its indirect variants. + + name = f'{self.NAME}_basic_wrapper' + self.add_module_cleanup(name) + expected_init_count += 1 + with self.subTest(name): + loaded = self.load(name) + + self.check_common(loaded) + self.check_indirect(loaded, basic) + self.check_basic(loaded, expected_init_count) + + # Currently PyState_AddModule() always replaces the cached module. + self.assertIs(basic.look_up_self(), loaded.module) + self.assertEqual(basic.initialized_count(), expected_init_count) + + # The cached module shouldn't change after this point. + basic_lookedup = loaded.module + + # Check its direct variant. + + name = f'{self.NAME}_basic_copy' + self.add_module_cleanup(name) + expected_init_count += 1 + with self.subTest(name): + loaded = self.load(name) + + self.check_common(loaded) + self.check_direct(loaded) + self.check_basic(loaded, expected_init_count) + + # This should change the cached module for _testsinglephase. + self.assertIs(basic.look_up_self(), basic_lookedup) + self.assertEqual(basic.initialized_count(), expected_init_count) + + # Check the non-basic variant that has no state. + + name = f'{self.NAME}_with_reinit' + self.add_module_cleanup(name) + with self.subTest(name): + loaded = self.load(name) + + self.check_common(loaded) + self.assertIs(loaded.snapshot.state_initialized, None) + self.check_direct(loaded) + self.check_with_reinit(loaded) + + # This should change the cached module for _testsinglephase. + self.assertIs(basic.look_up_self(), basic_lookedup) + self.assertEqual(basic.initialized_count(), expected_init_count) + + # Check the basic variant that has state. + + name = f'{self.NAME}_with_state' + self.add_module_cleanup(name) + with self.subTest(name): + loaded = self.load(name) + + self.check_common(loaded) + self.assertIsNot(loaded.snapshot.state_initialized, None) + self.check_direct(loaded) + self.check_with_reinit(loaded) + + # This should change the cached module for _testsinglephase. + self.assertIs(basic.look_up_self(), basic_lookedup) + self.assertEqual(basic.initialized_count(), expected_init_count) + + def test_basic_reloaded(self): + # m_copy is copied into the existing module object. + # Global state is not changed. + self.maxDiff = None + + for name in [ + self.NAME, # the "basic" module + f'{self.NAME}_basic_wrapper', # the indirect variant + f'{self.NAME}_basic_copy', # the direct variant + ]: + self.add_module_cleanup(name) + with self.subTest(name): + loaded = self.load(name) + reloaded = self.re_load(name, loaded.module) + + self.check_common(loaded) + self.check_common(reloaded) + + # Make sure the original __dict__ did not get replaced. + self.assertEqual(id(loaded.module.__dict__), + loaded.snapshot.ns_id) + self.assertEqual(loaded.snapshot.ns.__dict__, + loaded.module.__dict__) + + self.assertEqual(reloaded.module.__spec__.name, reloaded.name) + self.assertEqual(reloaded.module.__name__, + reloaded.snapshot.ns.__name__) + + self.assertIs(reloaded.module, loaded.module) + self.assertIs(reloaded.module.__dict__, loaded.module.__dict__) + # It only happens to be the same but that's good enough here. + # We really just want to verify that the re-loaded attrs + # didn't change. + self.assertIs(reloaded.snapshot.lookedup, + loaded.snapshot.lookedup) + self.assertEqual(reloaded.snapshot.state_initialized, + loaded.snapshot.state_initialized) + self.assertEqual(reloaded.snapshot.init_count, + loaded.snapshot.init_count) + + self.assertIs(reloaded.snapshot.cached, reloaded.module) + + def test_with_reinit_reloaded(self): + # The module's m_init func is run again. + self.maxDiff = None + + # Keep a reference around. + basic = self.load(self.NAME) + + for name in [ + f'{self.NAME}_with_reinit', # m_size == 0 + f'{self.NAME}_with_state', # m_size > 0 + ]: + self.add_module_cleanup(name) + with self.subTest(name): + loaded = self.load(name) + reloaded = self.re_load(name, loaded.module) + + self.check_common(loaded) + self.check_common(reloaded) + + # Make sure the original __dict__ did not get replaced. + self.assertEqual(id(loaded.module.__dict__), + loaded.snapshot.ns_id) + self.assertEqual(loaded.snapshot.ns.__dict__, + loaded.module.__dict__) + + self.assertEqual(reloaded.module.__spec__.name, reloaded.name) + self.assertEqual(reloaded.module.__name__, + reloaded.snapshot.ns.__name__) + + self.assertIsNot(reloaded.module, loaded.module) + self.assertNotEqual(reloaded.module.__dict__, + loaded.module.__dict__) + self.assertIs(reloaded.snapshot.lookedup, reloaded.module) + if loaded.snapshot.state_initialized is None: + self.assertIs(reloaded.snapshot.state_initialized, None) + else: + self.assertGreater(reloaded.snapshot.state_initialized, + loaded.snapshot.state_initialized) + + self.assertIs(reloaded.snapshot.cached, reloaded.module) + + # Currently, for every single-phrase init module loaded + # in multiple interpreters, those interpreters share a + # PyModuleDef for that object, which can be a problem. + # Also, we test with a single-phase module that has global state, + # which is shared by all interpreters. + + @requires_subinterpreters + def test_basic_multiple_interpreters_main_no_reset(self): + # without resetting; already loaded in main interpreter + + # At this point: + # * alive in 0 interpreters + # * module def may or may not be loaded already + # * module def not in _PyRuntime.imports.extensions + # * mod init func has not run yet (since reset, at least) + # * m_copy not set (hasn't been loaded yet or already cleared) + # * module's global state has not been initialized yet + # (or already cleared) + + main_loaded = self.load(self.NAME) + _testsinglephase = main_loaded.module + # Attrs set after loading are not in m_copy. + _testsinglephase.spam = 'spam, spam, spam, spam, eggs, and spam' + + self.check_common(main_loaded) + self.check_fresh(main_loaded) + + interpid1 = self.add_subinterpreter() + interpid2 = self.add_subinterpreter() + + # At this point: + # * alive in 1 interpreter (main) + # * module def in _PyRuntime.imports.extensions + # * mod init func ran for the first time (since reset, at least) + # * m_copy was copied from the main interpreter (was NULL) + # * module's global state was initialized + + # Use an interpreter that gets destroyed right away. + loaded = self.import_in_subinterp() + self.check_common(loaded) + self.check_copied(loaded, main_loaded) + + # At this point: + # * alive in 1 interpreter (main) + # * module def still in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy is NULL (claered when the interpreter was destroyed) + # (was from main interpreter) + # * module's global state was updated, not reset + + # Use a subinterpreter that sticks around. + loaded = self.import_in_subinterp(interpid1) + self.check_common(loaded) + self.check_copied(loaded, main_loaded) + + # At this point: + # * alive in 2 interpreters (main, interp1) + # * module def still in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy was copied from interp1 + # * module's global state was updated, not reset + + # Use a subinterpreter while the previous one is still alive. + loaded = self.import_in_subinterp(interpid2) + self.check_common(loaded) + self.check_copied(loaded, main_loaded) + + # At this point: + # * alive in 3 interpreters (main, interp1, interp2) + # * module def still in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy was copied from interp2 (was from interp1) + # * module's global state was updated, not reset + + @requires_subinterpreters + def test_basic_multiple_interpreters_deleted_no_reset(self): + # without resetting; already loaded in a deleted interpreter + + # At this point: + # * alive in 0 interpreters + # * module def may or may not be loaded already + # * module def not in _PyRuntime.imports.extensions + # * mod init func has not run yet (since reset, at least) + # * m_copy not set (hasn't been loaded yet or already cleared) + # * module's global state has not been initialized yet + # (or already cleared) + + interpid1 = self.add_subinterpreter() + interpid2 = self.add_subinterpreter() + + # First, load in the main interpreter but then completely clear it. + loaded_main = self.load(self.NAME) + loaded_main.module._clear_globals() + _testinternalcapi.clear_extension(self.NAME, self.FILE) + + # At this point: + # * alive in 0 interpreters + # * module def loaded already + # * module def was in _PyRuntime.imports.extensions, but cleared + # * mod init func ran for the first time (since reset, at least) + # * m_copy was set, but cleared (was NULL) + # * module's global state was initialized but cleared + + # Start with an interpreter that gets destroyed right away. + base = self.import_in_subinterp(postscript=''' + # Attrs set after loading are not in m_copy. + mod.spam = 'spam, spam, mash, spam, eggs, and spam' + ''') + self.check_common(base) + self.check_fresh(base) + + # At this point: + # * alive in 0 interpreters + # * module def in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy is NULL (claered when the interpreter was destroyed) + # * module's global state was initialized, not reset + + # Use a subinterpreter that sticks around. + loaded_interp1 = self.import_in_subinterp(interpid1) + self.check_common(loaded_interp1) + self.check_semi_fresh(loaded_interp1, loaded_main, base) + + # At this point: + # * alive in 1 interpreter (interp1) + # * module def still in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy was copied from interp1 (was NULL) + # * module's global state was updated, not reset + + # Use a subinterpreter while the previous one is still alive. + loaded_interp2 = self.import_in_subinterp(interpid2) + self.check_common(loaded_interp2) + self.check_copied(loaded_interp2, loaded_interp1) + + # At this point: + # * alive in 2 interpreters (interp1, interp2) + # * module def still in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy was copied from interp2 (was from interp1) + # * module's global state was updated, not reset + + @requires_subinterpreters + def test_basic_multiple_interpreters_reset_each(self): + # resetting between each interpreter + + # At this point: + # * alive in 0 interpreters + # * module def may or may not be loaded already + # * module def not in _PyRuntime.imports.extensions + # * mod init func has not run yet (since reset, at least) + # * m_copy not set (hasn't been loaded yet or already cleared) + # * module's global state has not been initialized yet + # (or already cleared) + + interpid1 = self.add_subinterpreter() + interpid2 = self.add_subinterpreter() + + # Use an interpreter that gets destroyed right away. + loaded = self.import_in_subinterp( + postscript=''' + # Attrs set after loading are not in m_copy. + mod.spam = 'spam, spam, mash, spam, eggs, and spam' + ''', + postcleanup=True, + ) + self.check_common(loaded) + self.check_fresh(loaded) + + # At this point: + # * alive in 0 interpreters + # * module def in _PyRuntime.imports.extensions + # * mod init func ran for the first time (since reset, at least) + # * m_copy is NULL (claered when the interpreter was destroyed) + # * module's global state was initialized, not reset + + # Use a subinterpreter that sticks around. + loaded = self.import_in_subinterp(interpid1, postcleanup=True) + self.check_common(loaded) + self.check_fresh(loaded) + + # At this point: + # * alive in 1 interpreter (interp1) + # * module def still in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy was copied from interp1 (was NULL) + # * module's global state was initialized, not reset + + # Use a subinterpreter while the previous one is still alive. + loaded = self.import_in_subinterp(interpid2, postcleanup=True) + self.check_common(loaded) + self.check_fresh(loaded) + + # At this point: + # * alive in 2 interpreters (interp2, interp2) + # * module def still in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy was copied from interp2 (was from interp1) + # * module's global state was initialized, not reset + if __name__ == '__main__': # Test needs to be a package, so we can do relative imports. diff --git a/Lib/test/test_importlib/_context.py b/Lib/test/test_importlib/_context.py new file mode 100644 index 00000000000000..8a53eb55d1503b --- /dev/null +++ b/Lib/test/test_importlib/_context.py @@ -0,0 +1,13 @@ +import contextlib + + +# from jaraco.context 4.3 +class suppress(contextlib.suppress, contextlib.ContextDecorator): + """ + A version of contextlib.suppress with decorator support. + + >>> @suppress(KeyError) + ... def key_error(): + ... {}[''] + >>> key_error() + """ diff --git a/Lib/test/test_importlib/_path.py b/Lib/test/test_importlib/_path.py new file mode 100644 index 00000000000000..71a704389b986e --- /dev/null +++ b/Lib/test/test_importlib/_path.py @@ -0,0 +1,109 @@ +# from jaraco.path 3.5 + +import functools +import pathlib +from typing import Dict, Union + +try: + from typing import Protocol, runtime_checkable +except ImportError: # pragma: no cover + # Python 3.7 + from typing_extensions import Protocol, runtime_checkable # type: ignore + + +FilesSpec = Dict[str, Union[str, bytes, 'FilesSpec']] # type: ignore + + +@runtime_checkable +class TreeMaker(Protocol): + def __truediv__(self, *args, **kwargs): + ... # pragma: no cover + + def mkdir(self, **kwargs): + ... # pragma: no cover + + def write_text(self, content, **kwargs): + ... # pragma: no cover + + def write_bytes(self, content): + ... # pragma: no cover + + +def _ensure_tree_maker(obj: Union[str, TreeMaker]) -> TreeMaker: + return obj if isinstance(obj, TreeMaker) else pathlib.Path(obj) # type: ignore + + +def build( + spec: FilesSpec, + prefix: Union[str, TreeMaker] = pathlib.Path(), # type: ignore +): + """ + Build a set of files/directories, as described by the spec. + + Each key represents a pathname, and the value represents + the content. Content may be a nested directory. + + >>> spec = { + ... 'README.txt': "A README file", + ... "foo": { + ... "__init__.py": "", + ... "bar": { + ... "__init__.py": "", + ... }, + ... "baz.py": "# Some code", + ... } + ... } + >>> target = getfixture('tmp_path') + >>> build(spec, target) + >>> target.joinpath('foo/baz.py').read_text(encoding='utf-8') + '# Some code' + """ + for name, contents in spec.items(): + create(contents, _ensure_tree_maker(prefix) / name) + + +@functools.singledispatch +def create(content: Union[str, bytes, FilesSpec], path): + path.mkdir(exist_ok=True) + build(content, prefix=path) # type: ignore + + +@create.register +def _(content: bytes, path): + path.write_bytes(content) + + +@create.register +def _(content: str, path): + path.write_text(content, encoding='utf-8') + + +@create.register +def _(content: str, path): + path.write_text(content, encoding='utf-8') + + +class Recording: + """ + A TreeMaker object that records everything that would be written. + + >>> r = Recording() + >>> build({'foo': {'foo1.txt': 'yes'}, 'bar.txt': 'abc'}, r) + >>> r.record + ['foo/foo1.txt', 'bar.txt'] + """ + + def __init__(self, loc=pathlib.PurePosixPath(), record=None): + self.loc = loc + self.record = record if record is not None else [] + + def __truediv__(self, other): + return Recording(self.loc / other, self.record) + + def write_text(self, content, **kwargs): + self.record.append(str(self.loc)) + + write_bytes = write_text + + def mkdir(self, **kwargs): + return diff --git a/Lib/test/test_importlib/builtin/test_finder.py b/Lib/test/test_importlib/builtin/test_finder.py index a4869e07b9c0c2..81dc5a3699d952 100644 --- a/Lib/test/test_importlib/builtin/test_finder.py +++ b/Lib/test/test_importlib/builtin/test_finder.py @@ -37,13 +37,6 @@ def test_failure(self): spec = self.machinery.BuiltinImporter.find_spec(name) self.assertIsNone(spec) - def test_ignore_path(self): - # The value for 'path' should always trigger a failed import. - with util.uncache(util.BUILTINS.good_name): - spec = self.machinery.BuiltinImporter.find_spec(util.BUILTINS.good_name, - ['pkg']) - self.assertIsNone(spec) - (Frozen_FindSpecTests, Source_FindSpecTests @@ -77,16 +70,6 @@ def test_failure(self): loader = self.machinery.BuiltinImporter.find_module('importlib') self.assertIsNone(loader) - def test_ignore_path(self): - # The value for 'path' should always trigger a failed import. - with util.uncache(util.BUILTINS.good_name): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - loader = self.machinery.BuiltinImporter.find_module( - util.BUILTINS.good_name, - ['pkg']) - self.assertIsNone(loader) - (Frozen_FinderTests, Source_FinderTests diff --git a/Lib/test/test_importlib/fixtures.py b/Lib/test/test_importlib/fixtures.py index e7be77b3957c67..a364a977bce781 100644 --- a/Lib/test/test_importlib/fixtures.py +++ b/Lib/test/test_importlib/fixtures.py @@ -10,7 +10,10 @@ from test.support.os_helper import FS_NONASCII from test.support import requires_zlib -from typing import Dict, Union + +from . import _path +from ._path import FilesSpec + try: from importlib import resources # type: ignore @@ -83,13 +86,8 @@ def setUp(self): self.fixtures.enter_context(self.add_sys_path(self.site_dir)) -# Except for python/mypy#731, prefer to define -# FilesDef = Dict[str, Union['FilesDef', str]] -FilesDef = Dict[str, Union[Dict[str, Union[Dict[str, str], str]], str]] - - class DistInfoPkg(OnSysPath, SiteDir): - files: FilesDef = { + files: FilesSpec = { "distinfo_pkg-1.0.0.dist-info": { "METADATA": """ Name: distinfo-pkg @@ -131,7 +129,7 @@ def make_uppercase(self): class DistInfoPkgWithDot(OnSysPath, SiteDir): - files: FilesDef = { + files: FilesSpec = { "pkg_dot-1.0.0.dist-info": { "METADATA": """ Name: pkg.dot @@ -146,7 +144,7 @@ def setUp(self): class DistInfoPkgWithDotLegacy(OnSysPath, SiteDir): - files: FilesDef = { + files: FilesSpec = { "pkg.dot-1.0.0.dist-info": { "METADATA": """ Name: pkg.dot @@ -173,7 +171,7 @@ def setUp(self): class EggInfoPkg(OnSysPath, SiteDir): - files: FilesDef = { + files: FilesSpec = { "egginfo_pkg.egg-info": { "PKG-INFO": """ Name: egginfo-pkg @@ -212,8 +210,99 @@ def setUp(self): build_files(EggInfoPkg.files, prefix=self.site_dir) +class EggInfoPkgPipInstalledNoToplevel(OnSysPath, SiteDir): + files: FilesSpec = { + "egg_with_module_pkg.egg-info": { + "PKG-INFO": "Name: egg_with_module-pkg", + # SOURCES.txt is made from the source archive, and contains files + # (setup.py) that are not present after installation. + "SOURCES.txt": """ + egg_with_module.py + setup.py + egg_with_module_pkg.egg-info/PKG-INFO + egg_with_module_pkg.egg-info/SOURCES.txt + egg_with_module_pkg.egg-info/top_level.txt + """, + # installed-files.txt is written by pip, and is a strictly more + # accurate source than SOURCES.txt as to the installed contents of + # the package. + "installed-files.txt": """ + ../egg_with_module.py + PKG-INFO + SOURCES.txt + top_level.txt + """, + # missing top_level.txt (to trigger fallback to installed-files.txt) + }, + "egg_with_module.py": """ + def main(): + print("hello world") + """, + } + + def setUp(self): + super().setUp() + build_files(EggInfoPkgPipInstalledNoToplevel.files, prefix=self.site_dir) + + +class EggInfoPkgPipInstalledNoModules(OnSysPath, SiteDir): + files: FilesSpec = { + "egg_with_no_modules_pkg.egg-info": { + "PKG-INFO": "Name: egg_with_no_modules-pkg", + # SOURCES.txt is made from the source archive, and contains files + # (setup.py) that are not present after installation. + "SOURCES.txt": """ + setup.py + egg_with_no_modules_pkg.egg-info/PKG-INFO + egg_with_no_modules_pkg.egg-info/SOURCES.txt + egg_with_no_modules_pkg.egg-info/top_level.txt + """, + # installed-files.txt is written by pip, and is a strictly more + # accurate source than SOURCES.txt as to the installed contents of + # the package. + "installed-files.txt": """ + PKG-INFO + SOURCES.txt + top_level.txt + """, + # top_level.txt correctly reflects that no modules are installed + "top_level.txt": b"\n", + }, + } + + def setUp(self): + super().setUp() + build_files(EggInfoPkgPipInstalledNoModules.files, prefix=self.site_dir) + + +class EggInfoPkgSourcesFallback(OnSysPath, SiteDir): + files: FilesSpec = { + "sources_fallback_pkg.egg-info": { + "PKG-INFO": "Name: sources_fallback-pkg", + # SOURCES.txt is made from the source archive, and contains files + # (setup.py) that are not present after installation. + "SOURCES.txt": """ + sources_fallback.py + setup.py + sources_fallback_pkg.egg-info/PKG-INFO + sources_fallback_pkg.egg-info/SOURCES.txt + """, + # missing installed-files.txt (i.e. not installed by pip) and + # missing top_level.txt (to trigger fallback to SOURCES.txt) + }, + "sources_fallback.py": """ + def main(): + print("hello world") + """, + } + + def setUp(self): + super().setUp() + build_files(EggInfoPkgSourcesFallback.files, prefix=self.site_dir) + + class EggInfoFile(OnSysPath, SiteDir): - files: FilesDef = { + files: FilesSpec = { "egginfo_file.egg-info": """ Metadata-Version: 1.0 Name: egginfo_file @@ -233,38 +322,22 @@ def setUp(self): build_files(EggInfoFile.files, prefix=self.site_dir) -def build_files(file_defs, prefix=pathlib.Path()): - """Build a set of files/directories, as described by the +# dedent all text strings before writing +orig = _path.create.registry[str] +_path.create.register(str, lambda content, path: orig(DALS(content), path)) - file_defs dictionary. Each key/value pair in the dictionary is - interpreted as a filename/contents pair. If the contents value is a - dictionary, a directory is created, and the dictionary interpreted - as the files within it, recursively. - For example: +build_files = _path.build - {"README.txt": "A README file", - "foo": { - "__init__.py": "", - "bar": { - "__init__.py": "", - }, - "baz.py": "# Some code", - } - } - """ - for name, contents in file_defs.items(): - full_name = prefix / name - if isinstance(contents, dict): - full_name.mkdir() - build_files(contents, prefix=full_name) - else: - if isinstance(contents, bytes): - with full_name.open('wb') as f: - f.write(contents) - else: - with full_name.open('w', encoding='utf-8') as f: - f.write(DALS(contents)) + +def build_record(file_defs): + return ''.join(f'{name},,\n' for name in record_names(file_defs)) + + +def record_names(file_defs): + recording = _path.Recording() + _path.build(file_defs, recording) + return recording.record class FileBuilder: diff --git a/Lib/test/test_importlib/test_main.py b/Lib/test/test_importlib/test_main.py index 30b68b6ae7d86e..46cd2b696d4cc8 100644 --- a/Lib/test/test_importlib/test_main.py +++ b/Lib/test/test_importlib/test_main.py @@ -1,7 +1,10 @@ import re import pickle import unittest +import warnings import importlib.metadata +import contextlib +import itertools try: import pyfakefs.fake_filesystem_unittest as ffs @@ -9,6 +12,7 @@ from .stubs import fake_filesystem_unittest as ffs from . import fixtures +from ._context import suppress from importlib.metadata import ( Distribution, EntryPoint, @@ -22,6 +26,13 @@ ) +@contextlib.contextmanager +def suppress_known_deprecation(): + with warnings.catch_warnings(record=True) as ctx: + warnings.simplefilter('default', category=DeprecationWarning) + yield ctx + + class BasicTests(fixtures.DistInfoPkg, unittest.TestCase): version_pattern = r'\d+\.\d+(\.\d)?' @@ -37,7 +48,7 @@ def test_for_name_does_not_exist(self): def test_package_not_found_mentions_metadata(self): """ When a package is not found, that could indicate that the - packgae is not installed or that it is installed without + package is not installed or that it is installed without metadata. Ensure the exception mentions metadata to help guide users toward the cause. See #124. """ @@ -46,8 +57,12 @@ def test_package_not_found_mentions_metadata(self): assert "metadata" in str(ctx.exception) - def test_new_style_classes(self): - self.assertIsInstance(Distribution, type) + # expected to fail until ABC is enforced + @suppress(AssertionError) + @suppress_known_deprecation() + def test_abc_enforced(self): + with self.assertRaises(TypeError): + type('DistributionSubclass', (Distribution,), {})() @fixtures.parameterize( dict(name=None), @@ -172,11 +187,21 @@ def test_metadata_loads_egg_info(self): assert meta['Description'] == 'pôrˈtend' -class DiscoveryTests(fixtures.EggInfoPkg, fixtures.DistInfoPkg, unittest.TestCase): +class DiscoveryTests( + fixtures.EggInfoPkg, + fixtures.EggInfoPkgPipInstalledNoToplevel, + fixtures.EggInfoPkgPipInstalledNoModules, + fixtures.EggInfoPkgSourcesFallback, + fixtures.DistInfoPkg, + unittest.TestCase, +): def test_package_discovery(self): dists = list(distributions()) assert all(isinstance(dist, Distribution) for dist in dists) assert any(dist.metadata['Name'] == 'egginfo-pkg' for dist in dists) + assert any(dist.metadata['Name'] == 'egg_with_module-pkg' for dist in dists) + assert any(dist.metadata['Name'] == 'egg_with_no_modules-pkg' for dist in dists) + assert any(dist.metadata['Name'] == 'sources_fallback-pkg' for dist in dists) assert any(dist.metadata['Name'] == 'distinfo-pkg' for dist in dists) def test_invalid_usage(self): @@ -324,3 +349,79 @@ def test_packages_distributions_neither_toplevel_nor_files(self): prefix=self.site_dir, ) packages_distributions() + + def test_packages_distributions_all_module_types(self): + """ + Test top-level modules detected on a package without 'top-level.txt'. + """ + suffixes = importlib.machinery.all_suffixes() + metadata = dict( + METADATA=""" + Name: all_distributions + Version: 1.0.0 + """, + ) + files = { + 'all_distributions-1.0.0.dist-info': metadata, + } + for i, suffix in enumerate(suffixes): + files.update( + { + f'importable-name {i}{suffix}': '', + f'in_namespace_{i}': { + f'mod{suffix}': '', + }, + f'in_package_{i}': { + '__init__.py': '', + f'mod{suffix}': '', + }, + } + ) + metadata.update(RECORD=fixtures.build_record(files)) + fixtures.build_files(files, prefix=self.site_dir) + + distributions = packages_distributions() + + for i in range(len(suffixes)): + assert distributions[f'importable-name {i}'] == ['all_distributions'] + assert distributions[f'in_namespace_{i}'] == ['all_distributions'] + assert distributions[f'in_package_{i}'] == ['all_distributions'] + + assert not any(name.endswith('.dist-info') for name in distributions) + + +class PackagesDistributionsEggTest( + fixtures.EggInfoPkg, + fixtures.EggInfoPkgPipInstalledNoToplevel, + fixtures.EggInfoPkgPipInstalledNoModules, + fixtures.EggInfoPkgSourcesFallback, + unittest.TestCase, +): + def test_packages_distributions_on_eggs(self): + """ + Test old-style egg packages with a variation of 'top_level.txt', + 'SOURCES.txt', and 'installed-files.txt', available. + """ + distributions = packages_distributions() + + def import_names_from_package(package_name): + return { + import_name + for import_name, package_names in distributions.items() + if package_name in package_names + } + + # egginfo-pkg declares one import ('mod') via top_level.txt + assert import_names_from_package('egginfo-pkg') == {'mod'} + + # egg_with_module-pkg has one import ('egg_with_module') inferred from + # installed-files.txt (top_level.txt is missing) + assert import_names_from_package('egg_with_module-pkg') == {'egg_with_module'} + + # egg_with_no_modules-pkg should not be associated with any import names + # (top_level.txt is empty, and installed-files.txt has no .py files) + assert import_names_from_package('egg_with_no_modules-pkg') == set() + + # sources_fallback-pkg has one import ('sources_fallback') inferred from + # SOURCES.txt (top_level.txt and installed-files.txt is missing) + assert import_names_from_package('sources_fallback-pkg') == {'sources_fallback'} diff --git a/Lib/test/test_importlib/test_metadata_api.py b/Lib/test/test_importlib/test_metadata_api.py index 71c47e62d27124..33c6e85ee94753 100644 --- a/Lib/test/test_importlib/test_metadata_api.py +++ b/Lib/test/test_importlib/test_metadata_api.py @@ -27,12 +27,14 @@ def suppress_known_deprecation(): class APITests( fixtures.EggInfoPkg, + fixtures.EggInfoPkgPipInstalledNoToplevel, + fixtures.EggInfoPkgPipInstalledNoModules, + fixtures.EggInfoPkgSourcesFallback, fixtures.DistInfoPkg, fixtures.DistInfoPkgWithDot, fixtures.EggInfoFile, unittest.TestCase, ): - version_pattern = r'\d+\.\d+(\.\d)?' def test_retrieves_version_of_self(self): @@ -63,15 +65,28 @@ def test_prefix_not_matched(self): distribution(prefix) def test_for_top_level(self): - self.assertEqual( - distribution('egginfo-pkg').read_text('top_level.txt').strip(), 'mod' - ) + tests = [ + ('egginfo-pkg', 'mod'), + ('egg_with_no_modules-pkg', ''), + ] + for pkg_name, expect_content in tests: + with self.subTest(pkg_name): + self.assertEqual( + distribution(pkg_name).read_text('top_level.txt').strip(), + expect_content, + ) def test_read_text(self): - top_level = [ - path for path in files('egginfo-pkg') if path.name == 'top_level.txt' - ][0] - self.assertEqual(top_level.read_text(), 'mod\n') + tests = [ + ('egginfo-pkg', 'mod\n'), + ('egg_with_no_modules-pkg', '\n'), + ] + for pkg_name, expect_content in tests: + with self.subTest(pkg_name): + top_level = [ + path for path in files(pkg_name) if path.name == 'top_level.txt' + ][0] + self.assertEqual(top_level.read_text(), expect_content) def test_entry_points(self): eps = entry_points() @@ -137,6 +152,28 @@ def test_metadata_for_this_package(self): classifiers = md.get_all('Classifier') assert 'Topic :: Software Development :: Libraries' in classifiers + def test_missing_key_legacy(self): + """ + Requesting a missing key will still return None, but warn. + """ + md = metadata('distinfo-pkg') + with suppress_known_deprecation(): + assert md['does-not-exist'] is None + + def test_get_key(self): + """ + Getting a key gets the key. + """ + md = metadata('egginfo-pkg') + assert md.get('Name') == 'egginfo-pkg' + + def test_get_missing_key(self): + """ + Requesting a missing key will return None. + """ + md = metadata('distinfo-pkg') + assert md.get('does-not-exist') is None + @staticmethod def _test_files(files): root = files[0].root @@ -159,6 +196,9 @@ def test_files_dist_info(self): def test_files_egg_info(self): self._test_files(files('egginfo-pkg')) + self._test_files(files('egg_with_module-pkg')) + self._test_files(files('egg_with_no_modules-pkg')) + self._test_files(files('sources_fallback-pkg')) def test_version_egg_info_file(self): self.assertEqual(version('egginfo-file'), '0.1') diff --git a/Lib/test/test_importlib/util.py b/Lib/test/test_importlib/util.py index 9032fd18d3f95b..e348733f6ce3c3 100644 --- a/Lib/test/test_importlib/util.py +++ b/Lib/test/test_importlib/util.py @@ -131,9 +131,8 @@ def uncache(*names): """ for name in names: - if name in ('sys', 'marshal', 'imp'): - raise ValueError( - "cannot uncache {0}".format(name)) + if name in ('sys', 'marshal'): + raise ValueError("cannot uncache {}".format(name)) try: del sys.modules[name] except KeyError: diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py index 0511b9e60e7b6c..e118114186dec3 100644 --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -430,7 +430,7 @@ def __init__(self, *args, **kwargs): git.abuse(7, 8, 9) def test_abuse_done(self): - self.istest(inspect.istraceback, 'git.ex[2]') + self.istest(inspect.istraceback, 'git.ex.__traceback__') self.istest(inspect.isframe, 'mod.fr') def test_stack(self): @@ -1820,8 +1820,7 @@ def test_errors(self): self.assertEqualException(f, '2, 3, 4') self.assertEqualException(f, '1, 2, 3, a=1') self.assertEqualException(f, '2, 3, 4, c=5') - # XXX: success of this one depends on dict order - ## self.assertEqualException(f, '2, 3, 4, a=1, c=5') + self.assertEqualException(f, '2, 3, 4, a=1, c=5') # f got an unexpected keyword argument self.assertEqualException(f, 'c=2') self.assertEqualException(f, '2, c=3') @@ -1832,17 +1831,19 @@ def test_errors(self): self.assertEqualException(f, '1, a=2') self.assertEqualException(f, '1, **{"a":2}') self.assertEqualException(f, '1, 2, b=3') - # XXX: Python inconsistency - # - for functions and bound methods: unexpected keyword 'c' - # - for unbound methods: multiple values for keyword 'a' - #self.assertEqualException(f, '1, c=3, a=2') + self.assertEqualException(f, '1, c=3, a=2') # issue11256: f3 = self.makeCallable('**c') self.assertEqualException(f3, '1, 2') self.assertEqualException(f3, '1, 2, a=1, b=2') f4 = self.makeCallable('*, a, b=0') - self.assertEqualException(f3, '1, 2') - self.assertEqualException(f3, '1, 2, a=1, b=2') + self.assertEqualException(f4, '1, 2') + self.assertEqualException(f4, '1, 2, a=1, b=2') + self.assertEqualException(f4, 'a=1, a=3') + self.assertEqualException(f4, 'a=1, c=3') + self.assertEqualException(f4, 'a=1, a=3, b=4') + self.assertEqualException(f4, 'a=1, b=2, a=3, b=4') + self.assertEqualException(f4, 'a=1, a=2, a=3, b=4') # issue #20816: getcallargs() fails to iterate over non-existent # kwonlydefaults and raises a wrong TypeError @@ -2462,18 +2463,43 @@ def test_signature_object(self): self.assertEqual(str(S()), '()') self.assertEqual(repr(S().parameters), 'mappingproxy(OrderedDict())') - def test(po, pk, pod=42, pkd=100, *args, ko, **kwargs): + def test(po, /, pk, pkd=100, *args, ko, kod=10, **kwargs): pass + sig = inspect.signature(test) - po = sig.parameters['po'].replace(kind=P.POSITIONAL_ONLY) - pod = sig.parameters['pod'].replace(kind=P.POSITIONAL_ONLY) + self.assertTrue(repr(sig).startswith('<Signature')) + self.assertTrue('(po, /, pk' in repr(sig)) + + # We need two functions, because it is impossible to represent + # all param kinds in a single one. + def test2(pod=42, /): + pass + + sig2 = inspect.signature(test2) + self.assertTrue(repr(sig2).startswith('<Signature')) + self.assertTrue('(pod=42, /)' in repr(sig2)) + + po = sig.parameters['po'] + pod = sig2.parameters['pod'] pk = sig.parameters['pk'] pkd = sig.parameters['pkd'] args = sig.parameters['args'] ko = sig.parameters['ko'] + kod = sig.parameters['kod'] kwargs = sig.parameters['kwargs'] S((po, pk, args, ko, kwargs)) + S((po, pk, ko, kod)) + S((po, pod, ko)) + S((po, pod, kod)) + S((pod, ko, kod)) + S((pod, kod)) + S((pod, args, kod, kwargs)) + # keyword-only parameters without default values + # can follow keyword-only parameters with default values: + S((kod, ko)) + S((kod, ko, kwargs)) + S((args, kod, ko)) with self.assertRaisesRegex(ValueError, 'wrong parameter order'): S((pk, po, args, ko, kwargs)) @@ -2494,15 +2520,18 @@ def test(po, pk, pod=42, pkd=100, *args, ko, **kwargs): with self.assertRaisesRegex(ValueError, 'follows default argument'): S((pod, po)) + with self.assertRaisesRegex(ValueError, 'follows default argument'): + S((pod, pk)) + + with self.assertRaisesRegex(ValueError, 'follows default argument'): + S((po, pod, pk)) + with self.assertRaisesRegex(ValueError, 'follows default argument'): S((po, pkd, pk)) with self.assertRaisesRegex(ValueError, 'follows default argument'): S((pkd, pk)) - self.assertTrue(repr(sig).startswith('<Signature')) - self.assertTrue('(po, pk' in repr(sig)) - def test_signature_object_pickle(self): def foo(a, b, *, c:1={}, **kw) -> {42:'ham'}: pass foo_partial = functools.partial(foo, a=1) @@ -2872,8 +2901,6 @@ def foo(cls, *, arg): def test_signature_on_partial(self): from functools import partial - Parameter = inspect.Parameter - def test(): pass @@ -2988,8 +3015,6 @@ def test(a, b, c:int) -> 42: ((('c', ..., int, "positional_or_keyword"),), 42)) - psig = inspect.signature(partial(partial(test, 1), 2)) - def foo(a): return a _foo = partial(partial(foo, a=10), a=20) @@ -3044,14 +3069,9 @@ def foo(a=1, b=2, c=3): self.assertEqual(_foo(*ba.args, **ba.kwargs), (12, 10, 20)) - def foo(a, b, c, d, **kwargs): + def foo(a, b, /, c, d, **kwargs): pass sig = inspect.signature(foo) - params = sig.parameters.copy() - params['a'] = params['a'].replace(kind=Parameter.POSITIONAL_ONLY) - params['b'] = params['b'].replace(kind=Parameter.POSITIONAL_ONLY) - foo.__signature__ = inspect.Signature(params.values()) - sig = inspect.signature(foo) self.assertEqual(str(sig), '(a, b, /, c, d, **kwargs)') self.assertEqual(self.signature(partial(foo, 1)), @@ -3556,14 +3576,9 @@ def test_signature_str_positional_only(self): P = inspect.Parameter S = inspect.Signature - def test(a_po, *, b, **kwargs): + def test(a_po, /, *, b, **kwargs): return a_po, kwargs - sig = inspect.signature(test) - new_params = list(sig.parameters.values()) - new_params[0] = new_params[0].replace(kind=P.POSITIONAL_ONLY) - test.__signature__ = sig.replace(parameters=new_params) - self.assertEqual(str(inspect.signature(test)), '(a_po, /, *, b, **kwargs)') @@ -3593,6 +3608,14 @@ def test() -> 42: self.assertEqual(sig.return_annotation, 42) self.assertEqual(sig, inspect.signature(test)) + def test_signature_replaced(self): + def test(): + pass + + spam_param = inspect.Parameter('spam', inspect.Parameter.POSITIONAL_ONLY) + sig = test.__signature__ = inspect.Signature(parameters=(spam_param,)) + self.assertEqual(sig, inspect.signature(test)) + def test_signature_on_mangled_parameters(self): class Spam: def foo(self, __p1:1=2, *, __p2:2=3): @@ -4155,18 +4178,9 @@ def test(a, *args, b, z=100, **kwargs): self.assertEqual(ba.args, (10, 20)) def test_signature_bind_positional_only(self): - P = inspect.Parameter - - def test(a_po, b_po, c_po=3, foo=42, *, bar=50, **kwargs): + def test(a_po, b_po, c_po=3, /, foo=42, *, bar=50, **kwargs): return a_po, b_po, c_po, foo, bar, kwargs - sig = inspect.signature(test) - new_params = collections.OrderedDict(tuple(sig.parameters.items())) - for name in ('a_po', 'b_po', 'c_po'): - new_params[name] = new_params[name].replace(kind=P.POSITIONAL_ONLY) - new_sig = sig.replace(parameters=new_params.values()) - test.__signature__ = new_sig - self.assertEqual(self.call(test, 1, 2, 4, 5, bar=6), (1, 2, 4, 5, 6, {})) @@ -4587,7 +4601,6 @@ def test_qualname_source(self): self.assertEqual(err, b'') def test_builtins(self): - module = importlib.import_module('unittest') _, out, err = assert_python_failure('-m', 'inspect', 'sys') lines = err.decode().splitlines() diff --git a/Lib/test/test_int.py b/Lib/test/test_int.py index 334fea0774be51..5545ee39d8e942 100644 --- a/Lib/test/test_int.py +++ b/Lib/test/test_int.py @@ -155,6 +155,8 @@ def test_basic(self): self.assertEqual(int(' 0O123 ', 0), 83) self.assertEqual(int(' 0X123 ', 0), 291) self.assertEqual(int(' 0B100 ', 0), 4) + with self.assertRaises(ValueError): + int('010', 0) # without base still base 10 self.assertEqual(int('0123'), 123) @@ -221,6 +223,24 @@ def test_basic(self): self.assertEqual(int('2br45qc', 35), 4294967297) self.assertEqual(int('1z141z5', 36), 4294967297) + def test_invalid_signs(self): + with self.assertRaises(ValueError): + int('+') + with self.assertRaises(ValueError): + int('-') + with self.assertRaises(ValueError): + int('- 1') + with self.assertRaises(ValueError): + int('+ 1') + with self.assertRaises(ValueError): + int(' + 1 ') + + def test_unicode(self): + self.assertEqual(int("१२३४५६७८९०1234567890"), 12345678901234567890) + self.assertEqual(int('١٢٣٤٥٦٧٨٩٠'), 1234567890) + self.assertEqual(int("१२३४५६७८९०1234567890", 0), 12345678901234567890) + self.assertEqual(int('١٢٣٤٥٦٧٨٩٠', 0), 1234567890) + def test_underscores(self): for lit in VALID_UNDERSCORE_LITERALS: if any(ch in lit for ch in '.eEjJ'): diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py index 7014bc97100cb4..9fe559d4b7eed5 100644 --- a/Lib/test/test_itertools.py +++ b/Lib/test/test_itertools.py @@ -1846,7 +1846,7 @@ def batched_recipe(iterable, n): if n < 1: raise ValueError('n must be at least one') it = iter(iterable) - while (batch := tuple(islice(it, n))): + while batch := tuple(islice(it, n)): yield batch for iterable, n in product( diff --git a/Lib/test/test_launcher.py b/Lib/test/test_launcher.py index 2f35eaf08a2dc9..362b507d158288 100644 --- a/Lib/test/test_launcher.py +++ b/Lib/test/test_launcher.py @@ -394,17 +394,17 @@ def test_filter_to_company_with_default(self): def test_filter_to_tag(self): company = "PythonTestSuite" - data = self.run_py([f"-V:3.100"]) + data = self.run_py(["-V:3.100"]) self.assertEqual("X.Y.exe", data["LaunchCommand"]) self.assertEqual(company, data["env.company"]) self.assertEqual("3.100", data["env.tag"]) - data = self.run_py([f"-V:3.100-32"]) + data = self.run_py(["-V:3.100-32"]) self.assertEqual("X.Y-32.exe", data["LaunchCommand"]) self.assertEqual(company, data["env.company"]) self.assertEqual("3.100-32", data["env.tag"]) - data = self.run_py([f"-V:3.100-arm64"]) + data = self.run_py(["-V:3.100-arm64"]) self.assertEqual("X.Y-arm64.exe -X fake_arg_for_test", data["LaunchCommand"]) self.assertEqual(company, data["env.company"]) self.assertEqual("3.100-arm64", data["env.tag"]) @@ -421,7 +421,7 @@ def test_filter_to_company_and_tag(self): def test_filter_with_single_install(self): company = "PythonTestSuite1" data = self.run_py( - [f"-V:Nonexistent"], + ["-V:Nonexistent"], env={"PYLAUNCHER_LIMIT_TO_COMPANY": company}, expect_returncode=103, ) @@ -500,7 +500,7 @@ def test_py_default_short_argv0(self): data = self.run_py(["--version"], argv=f'{argv0} --version') self.assertEqual("PythonTestSuite", data["SearchInfo.company"]) self.assertEqual("3.100", data["SearchInfo.tag"]) - self.assertEqual(f'X.Y.exe --version', data["stdout"].strip()) + self.assertEqual("X.Y.exe --version", data["stdout"].strip()) def test_py_default_in_list(self): data = self.run_py(["-0"], env=TEST_PY_ENV) @@ -662,7 +662,7 @@ def test_install(self): self.assertIn("9PJPW5LDXLZ5", cmd) def test_literal_shebang_absolute(self): - with self.script(f"#! C:/some_random_app -witharg") as script: + with self.script("#! C:/some_random_app -witharg") as script: data = self.run_py([script]) self.assertEqual( f"C:\\some_random_app -witharg {script}", @@ -670,7 +670,7 @@ def test_literal_shebang_absolute(self): ) def test_literal_shebang_relative(self): - with self.script(f"#! ..\\some_random_app -witharg") as script: + with self.script("#! ..\\some_random_app -witharg") as script: data = self.run_py([script]) self.assertEqual( f"{script.parent.parent}\\some_random_app -witharg {script}", @@ -678,14 +678,14 @@ def test_literal_shebang_relative(self): ) def test_literal_shebang_quoted(self): - with self.script(f'#! "some random app" -witharg') as script: + with self.script('#! "some random app" -witharg') as script: data = self.run_py([script]) self.assertEqual( f'"{script.parent}\\some random app" -witharg {script}', data["stdout"].strip(), ) - with self.script(f'#! some" random "app -witharg') as script: + with self.script('#! some" random "app -witharg') as script: data = self.run_py([script]) self.assertEqual( f'"{script.parent}\\some random app" -witharg {script}', @@ -693,7 +693,7 @@ def test_literal_shebang_quoted(self): ) def test_literal_shebang_quoted_escape(self): - with self.script(f'#! some\\" random "app -witharg') as script: + with self.script('#! some\\" random "app -witharg') as script: data = self.run_py([script]) self.assertEqual( f'"{script.parent}\\some\\ random app" -witharg {script}', diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index 072056d3722106..9176d8eeb56d01 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -1524,6 +1524,32 @@ class ConfigFileTest(BaseTest): kwargs={{"encoding": "utf-8"}} """ + + config9 = """ + [loggers] + keys=root + + [handlers] + keys=hand1 + + [formatters] + keys=form1 + + [logger_root] + level=WARNING + handlers=hand1 + + [handler_hand1] + class=StreamHandler + level=NOTSET + formatter=form1 + args=(sys.stdout,) + + [formatter_form1] + format=%(message)s ++ %(customfield)s + defaults={"customfield": "defaultvalue"} + """ + disable_test = """ [loggers] keys=root @@ -1687,6 +1713,16 @@ def test_config8_ok(self): handler = logging.root.handlers[0] self.addCleanup(closeFileHandler, handler, fn) + def test_config9_ok(self): + self.apply_config(self.config9) + formatter = logging.root.handlers[0].formatter + result = formatter.format(logging.makeLogRecord({'msg': 'test'})) + self.assertEqual(result, 'test ++ defaultvalue') + result = formatter.format(logging.makeLogRecord( + {'msg': 'test', 'customfield': "customvalue"})) + self.assertEqual(result, 'test ++ customvalue') + + def test_logger_disabling(self): self.apply_config(self.disable_test) logger = logging.getLogger('some_pristine_logger') @@ -2909,6 +2945,30 @@ class ConfigDictTest(BaseTest): }, } + # config0 but with default values for formatter. Skipped 15, it is defined + # in the test code. + config16 = { + 'version': 1, + 'formatters': { + 'form1' : { + 'format' : '%(message)s ++ %(customfield)s', + 'defaults': {"customfield": "defaultvalue"} + }, + }, + 'handlers' : { + 'hand1' : { + 'class' : 'logging.StreamHandler', + 'formatter' : 'form1', + 'level' : 'NOTSET', + 'stream' : 'ext://sys.stdout', + }, + }, + 'root' : { + 'level' : 'WARNING', + 'handlers' : ['hand1'], + }, + } + bad_format = { "version": 1, "formatters": { @@ -3021,7 +3081,7 @@ class ConfigDictTest(BaseTest): } } - # Configuration with custom function and 'validate' set to False + # Configuration with custom function, 'validate' set to False and no defaults custom_formatter_with_function = { 'version': 1, 'formatters': { @@ -3048,6 +3108,33 @@ class ConfigDictTest(BaseTest): } } + # Configuration with custom function, and defaults + custom_formatter_with_defaults = { + 'version': 1, + 'formatters': { + 'form1': { + '()': formatFunc, + 'format': '%(levelname)s:%(name)s:%(message)s:%(customfield)s', + 'defaults': {"customfield": "myvalue"} + }, + }, + 'handlers' : { + 'hand1' : { + 'class': 'logging.StreamHandler', + 'formatter': 'form1', + 'level': 'NOTSET', + 'stream': 'ext://sys.stdout', + }, + }, + "loggers": { + "my_test_logger_custom_formatter": { + "level": "DEBUG", + "handlers": ["hand1"], + "propagate": "true" + } + } + } + config_queue_handler = { 'version': 1, 'handlers' : { @@ -3349,6 +3436,22 @@ def test_config15_ok(self): handler = logging.root.handlers[0] self.addCleanup(closeFileHandler, handler, fn) + def test_config16_ok(self): + self.apply_config(self.config16) + h = logging._handlers['hand1'] + + # Custom value + result = h.formatter.format(logging.makeLogRecord( + {'msg': 'Hello', 'customfield': 'customvalue'})) + self.assertEqual(result, 'Hello ++ customvalue') + + # Default value + result = h.formatter.format(logging.makeLogRecord( + {'msg': 'Hello'})) + self.assertEqual(result, 'Hello ++ defaultvalue') + + + def setup_via_listener(self, text, verify=None): text = text.encode("utf-8") # Ask for a randomly assigned port (by using port 0) @@ -3516,6 +3619,9 @@ def test_custom_formatter_class_with_validate3(self): def test_custom_formatter_function_with_validate(self): self.assertRaises(ValueError, self.apply_config, self.custom_formatter_with_function) + def test_custom_formatter_function_with_defaults(self): + self.assertRaises(ValueError, self.apply_config, self.custom_formatter_with_defaults) + def test_baseconfig(self): d = { 'atuple': (1, 2, 3), @@ -5097,8 +5203,7 @@ def test_encoding_errors_none(self): message = [] def dummy_handle_error(record): - _, v, _ = sys.exc_info() - message.append(str(v)) + message.append(str(sys.exception())) handler.handleError = dummy_handle_error logging.debug('The Øresund Bridge joins Copenhagen to Malmö') diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py index 07c2764dfd1b2f..4c592eaf34da23 100644 --- a/Lib/test/test_mailbox.py +++ b/Lib/test/test_mailbox.py @@ -31,7 +31,7 @@ def _check_sample(self, msg): # Inspect a mailbox.Message representation of the sample message self.assertIsInstance(msg, email.message.Message) self.assertIsInstance(msg, mailbox.Message) - for key, value in _sample_headers.items(): + for key, value in _sample_headers: self.assertIn(value, msg.get_all(key)) self.assertTrue(msg.is_multipart()) self.assertEqual(len(msg.get_payload()), len(_sample_payloads)) @@ -2264,30 +2264,31 @@ def test_nonempty_maildir_both(self): _bytes_sample_message = _sample_message.encode('ascii') -_sample_headers = { - "Return-Path":"<gkj@gregorykjohnson.com>", - "X-Original-To":"gkj+person@localhost", - "Delivered-To":"gkj+person@localhost", - "Received":"""from localhost (localhost [127.0.0.1]) +_sample_headers = [ + ("Return-Path", "<gkj@gregorykjohnson.com>"), + ("X-Original-To", "gkj+person@localhost"), + ("Delivered-To", "gkj+person@localhost"), + ("Received", """from localhost (localhost [127.0.0.1]) by andy.gregorykjohnson.com (Postfix) with ESMTP id 356ED9DD17 - for <gkj+person@localhost>; Wed, 13 Jul 2005 17:23:16 -0400 (EDT)""", - "Delivered-To":"gkj@sundance.gregorykjohnson.com", - "Received":"""from localhost [127.0.0.1] + for <gkj+person@localhost>; Wed, 13 Jul 2005 17:23:16 -0400 (EDT)"""), + ("Delivered-To", "gkj@sundance.gregorykjohnson.com"), + ("Received", """from localhost [127.0.0.1] by localhost with POP3 (fetchmail-6.2.5) - for gkj+person@localhost (single-drop); Wed, 13 Jul 2005 17:23:16 -0400 (EDT)""", - "Received":"""from andy.gregorykjohnson.com (andy.gregorykjohnson.com [64.32.235.228]) + for gkj+person@localhost (single-drop); Wed, 13 Jul 2005 17:23:16 -0400 (EDT)"""), + ("Received", """from andy.gregorykjohnson.com (andy.gregorykjohnson.com [64.32.235.228]) by sundance.gregorykjohnson.com (Postfix) with ESMTP id 5B056316746 - for <gkj@gregorykjohnson.com>; Wed, 13 Jul 2005 17:23:11 -0400 (EDT)""", - "Received":"""by andy.gregorykjohnson.com (Postfix, from userid 1000) - id 490CD9DD17; Wed, 13 Jul 2005 17:23:11 -0400 (EDT)""", - "Date":"Wed, 13 Jul 2005 17:23:11 -0400", - "From":""""Gregory K. Johnson" <gkj@gregorykjohnson.com>""", - "To":"gkj@gregorykjohnson.com", - "Subject":"Sample message", - "Mime-Version":"1.0", - "Content-Type":"""multipart/mixed; boundary="NMuMz9nt05w80d4+\"""", - "Content-Disposition":"inline", - "User-Agent": "Mutt/1.5.9i" } + for <gkj@gregorykjohnson.com>; Wed, 13 Jul 2005 17:23:11 -0400 (EDT)"""), + ("Received", """by andy.gregorykjohnson.com (Postfix, from userid 1000) + id 490CD9DD17; Wed, 13 Jul 2005 17:23:11 -0400 (EDT)"""), + ("Date", "Wed, 13 Jul 2005 17:23:11 -0400"), + ("From", """"Gregory K. Johnson" <gkj@gregorykjohnson.com>"""), + ("To", "gkj@gregorykjohnson.com"), + ("Subject", "Sample message"), + ("Mime-Version", "1.0"), + ("Content-Type", """multipart/mixed; boundary="NMuMz9nt05w80d4+\""""), + ("Content-Disposition", "inline"), + ("User-Agent", "Mutt/1.5.9i"), +] _sample_payloads = ("""This is a sample message. diff --git a/Lib/test/test_mailcap.py b/Lib/test/test_mailcap.py index 819dc80a266433..8a94b0cb1f27c7 100644 --- a/Lib/test/test_mailcap.py +++ b/Lib/test/test_mailcap.py @@ -127,7 +127,6 @@ def test_subst(self): (["", "audio/*", "foo.txt"], ""), (["echo foo", "audio/*", "foo.txt"], "echo foo"), (["echo %s", "audio/*", "foo.txt"], "echo foo.txt"), - (["echo %t", "audio/*", "foo.txt"], None), (["echo %t", "audio/wav", "foo.txt"], "echo audio/wav"), (["echo \\%t", "audio/*", "foo.txt"], "echo %t"), (["echo foo", "audio/*", "foo.txt", plist], "echo foo"), @@ -210,9 +209,6 @@ def test_findmatch(self): ([c, "audio/basic"], {"key": "description", "filename": fname}, ('"An audio fragment"', audio_basic_entry)), - ([c, "audio/*"], - {"filename": fname}, - (None, None)), ([c, "audio/wav"], {"filename": fname}, ("/usr/local/bin/showaudio audio/wav", audio_entry)), @@ -245,6 +241,30 @@ def test_test(self): ] self._run_cases(cases) + def test_unsafe_mailcap_input(self): + with self.assertWarnsRegex(mailcap.UnsafeMailcapInput, + 'Refusing to substitute parameter.*' + 'into a shell command'): + unsafe_param = mailcap.subst("echo %{total}", + "audio/wav", + "foo.txt", + ["total=*"]) + self.assertEqual(unsafe_param, None) + + with self.assertWarnsRegex(mailcap.UnsafeMailcapInput, + 'Refusing to substitute MIME type' + '.*into a shell'): + unsafe_mimetype = mailcap.subst("echo %t", "audio/*", "foo.txt") + self.assertEqual(unsafe_mimetype, None) + + with self.assertWarnsRegex(mailcap.UnsafeMailcapInput, + 'Refusing to use mailcap with filename.*' + 'Use a safe temporary filename.'): + unsafe_filename = mailcap.findmatch(MAILCAPDICT, + "audio/wav", + filename="foo*.txt") + self.assertEqual(unsafe_filename, (None, None)) + def _run_cases(self, cases): for c in cases: self.assertEqual(mailcap.findmatch(*c[0], **c[1]), c[2]) diff --git a/Lib/test/test_module.py b/Lib/test/test_module.py index 70e4efea69359a..c7eb92290e1b6d 100644 --- a/Lib/test/test_module.py +++ b/Lib/test/test_module.py @@ -236,7 +236,7 @@ def test_module_repr_with_full_loader(self): # Yes, a class not an instance. m.__loader__ = FullLoader self.assertEqual( - repr(m), "<module 'foo' (<class 'test.test_module.FullLoader'>)>") + repr(m), f"<module 'foo' (<class '{__name__}.FullLoader'>)>") def test_module_repr_with_bare_loader_and_filename(self): m = ModuleType('foo') diff --git a/Lib/test/test_monitoring.py b/Lib/test/test_monitoring.py new file mode 100644 index 00000000000000..738ace923cc523 --- /dev/null +++ b/Lib/test/test_monitoring.py @@ -0,0 +1,1115 @@ +"""Test suite for the sys.monitoring.""" + +import collections +import functools +import operator +import sys +import types +import unittest + + +PAIR = (0,1) + +def f1(): + pass + +def f2(): + len([]) + sys.getsizeof(0) + +def floop(): + for item in PAIR: + pass + +def gen(): + yield + yield + +def g1(): + for _ in gen(): + pass + +TEST_TOOL = 2 +TEST_TOOL2 = 3 +TEST_TOOL3 = 4 + +class MonitoringBasicTest(unittest.TestCase): + + def test_has_objects(self): + m = sys.monitoring + m.events + m.use_tool_id + m.free_tool_id + m.get_tool + m.get_events + m.set_events + m.get_local_events + m.set_local_events + m.register_callback + m.restart_events + m.DISABLE + m.MISSING + m.events.NO_EVENTS + + def test_tool(self): + sys.monitoring.use_tool_id(TEST_TOOL, "MonitoringTest.Tool") + self.assertEqual(sys.monitoring.get_tool(TEST_TOOL), "MonitoringTest.Tool") + sys.monitoring.set_events(TEST_TOOL, 15) + self.assertEqual(sys.monitoring.get_events(TEST_TOOL), 15) + sys.monitoring.set_events(TEST_TOOL, 0) + with self.assertRaises(ValueError): + sys.monitoring.set_events(TEST_TOOL, sys.monitoring.events.C_RETURN) + with self.assertRaises(ValueError): + sys.monitoring.set_events(TEST_TOOL, sys.monitoring.events.C_RAISE) + sys.monitoring.free_tool_id(TEST_TOOL) + self.assertEqual(sys.monitoring.get_tool(TEST_TOOL), None) + with self.assertRaises(ValueError): + sys.monitoring.set_events(TEST_TOOL, sys.monitoring.events.CALL) + + +class MonitoringTestBase: + + def setUp(self): + # Check that a previous test hasn't left monitoring on. + for tool in range(6): + self.assertEqual(sys.monitoring.get_events(tool), 0) + self.assertIs(sys.monitoring.get_tool(TEST_TOOL), None) + self.assertIs(sys.monitoring.get_tool(TEST_TOOL2), None) + self.assertIs(sys.monitoring.get_tool(TEST_TOOL3), None) + sys.monitoring.use_tool_id(TEST_TOOL, "test " + self.__class__.__name__) + sys.monitoring.use_tool_id(TEST_TOOL2, "test2 " + self.__class__.__name__) + sys.monitoring.use_tool_id(TEST_TOOL3, "test3 " + self.__class__.__name__) + + def tearDown(self): + # Check that test hasn't left monitoring on. + for tool in range(6): + self.assertEqual(sys.monitoring.get_events(tool), 0) + sys.monitoring.free_tool_id(TEST_TOOL) + sys.monitoring.free_tool_id(TEST_TOOL2) + sys.monitoring.free_tool_id(TEST_TOOL3) + + +class MonitoringCountTest(MonitoringTestBase, unittest.TestCase): + + def check_event_count(self, func, event, expected): + + class Counter: + def __init__(self): + self.count = 0 + def __call__(self, *args): + self.count += 1 + + counter = Counter() + sys.monitoring.register_callback(TEST_TOOL, event, counter) + if event == E.C_RETURN or event == E.C_RAISE: + sys.monitoring.set_events(TEST_TOOL, E.CALL) + else: + sys.monitoring.set_events(TEST_TOOL, event) + self.assertEqual(counter.count, 0) + counter.count = 0 + func() + self.assertEqual(counter.count, expected) + prev = sys.monitoring.register_callback(TEST_TOOL, event, None) + counter.count = 0 + func() + self.assertEqual(counter.count, 0) + self.assertEqual(prev, counter) + sys.monitoring.set_events(TEST_TOOL, 0) + + def test_start_count(self): + self.check_event_count(f1, E.PY_START, 1) + + def test_resume_count(self): + self.check_event_count(g1, E.PY_RESUME, 2) + + def test_return_count(self): + self.check_event_count(f1, E.PY_RETURN, 1) + + def test_call_count(self): + self.check_event_count(f2, E.CALL, 3) + + def test_c_return_count(self): + self.check_event_count(f2, E.C_RETURN, 2) + + +E = sys.monitoring.events + +SIMPLE_EVENTS = [ + (E.PY_START, "start"), + (E.PY_RESUME, "resume"), + (E.PY_RETURN, "return"), + (E.PY_YIELD, "yield"), + (E.JUMP, "jump"), + (E.BRANCH, "branch"), + (E.RAISE, "raise"), + (E.PY_UNWIND, "unwind"), + (E.EXCEPTION_HANDLED, "exception_handled"), + (E.C_RAISE, "c_raise"), + (E.C_RETURN, "c_return"), +] + +SIMPLE_EVENT_SET = functools.reduce(operator.or_, [ev for (ev, _) in SIMPLE_EVENTS], 0) | E.CALL + + +def just_pass(): + pass + +just_pass.events = [ + "py_call", + "start", + "return", +] + +def just_raise(): + raise Exception + +just_raise.events = [ + 'py_call', + "start", + "raise", + "unwind", +] + +def just_call(): + len([]) + +just_call.events = [ + 'py_call', + "start", + "c_call", + "c_return", + "return", +] + +def caught(): + try: + 1/0 + except Exception: + pass + +caught.events = [ + 'py_call', + "start", + "raise", + "exception_handled", + "branch", + "return", +] + +def nested_call(): + just_pass() + +nested_call.events = [ + "py_call", + "start", + "py_call", + "start", + "return", + "return", +] + +PY_CALLABLES = (types.FunctionType, types.MethodType) + +class MonitoringEventsBase(MonitoringTestBase): + + def gather_events(self, func): + events = [] + for event, event_name in SIMPLE_EVENTS: + def record(*args, event_name=event_name): + events.append(event_name) + sys.monitoring.register_callback(TEST_TOOL, event, record) + def record_call(code, offset, obj, arg): + if isinstance(obj, PY_CALLABLES): + events.append("py_call") + else: + events.append("c_call") + sys.monitoring.register_callback(TEST_TOOL, E.CALL, record_call) + sys.monitoring.set_events(TEST_TOOL, SIMPLE_EVENT_SET) + events = [] + try: + func() + except: + pass + sys.monitoring.set_events(TEST_TOOL, 0) + #Remove the final event, the call to `sys.monitoring.set_events` + events = events[:-1] + return events + + def check_events(self, func, expected=None): + events = self.gather_events(func) + if expected is None: + expected = func.events + self.assertEqual(events, expected) + + +class MonitoringEventsTest(MonitoringEventsBase, unittest.TestCase): + + def test_just_pass(self): + self.check_events(just_pass) + + def test_just_raise(self): + try: + self.check_events(just_raise) + except Exception: + pass + self.assertEqual(sys.monitoring.get_events(TEST_TOOL), 0) + + def test_just_call(self): + self.check_events(just_call) + + def test_caught(self): + self.check_events(caught) + + def test_nested_call(self): + self.check_events(nested_call) + +UP_EVENTS = (E.C_RETURN, E.C_RAISE, E.PY_RETURN, E.PY_UNWIND, E.PY_YIELD) +DOWN_EVENTS = (E.PY_START, E.PY_RESUME) + +from test.profilee import testfunc + +class SimulateProfileTest(MonitoringEventsBase, unittest.TestCase): + + def test_balanced(self): + events = self.gather_events(testfunc) + c = collections.Counter(events) + self.assertEqual(c["c_call"], c["c_return"]) + self.assertEqual(c["start"], c["return"] + c["unwind"]) + self.assertEqual(c["raise"], c["exception_handled"] + c["unwind"]) + + def test_frame_stack(self): + self.maxDiff = None + stack = [] + errors = [] + seen = set() + def up(*args): + frame = sys._getframe(1) + if not stack: + errors.append("empty") + else: + expected = stack.pop() + if frame != expected: + errors.append(f" Popping {frame} expected {expected}") + def down(*args): + frame = sys._getframe(1) + stack.append(frame) + seen.add(frame.f_code) + def call(code, offset, callable, arg): + if not isinstance(callable, PY_CALLABLES): + stack.append(sys._getframe(1)) + for event in UP_EVENTS: + sys.monitoring.register_callback(TEST_TOOL, event, up) + for event in DOWN_EVENTS: + sys.monitoring.register_callback(TEST_TOOL, event, down) + sys.monitoring.register_callback(TEST_TOOL, E.CALL, call) + sys.monitoring.set_events(TEST_TOOL, SIMPLE_EVENT_SET) + testfunc() + sys.monitoring.set_events(TEST_TOOL, 0) + self.assertEqual(errors, []) + self.assertEqual(stack, [sys._getframe()]) + self.assertEqual(len(seen), 9) + + +class CounterWithDisable: + + def __init__(self): + self.disable = False + self.count = 0 + + def __call__(self, *args): + self.count += 1 + if self.disable: + return sys.monitoring.DISABLE + + +class RecorderWithDisable: + + def __init__(self, events): + self.disable = False + self.events = events + + def __call__(self, code, event): + self.events.append(event) + if self.disable: + return sys.monitoring.DISABLE + + +class MontoringDisableAndRestartTest(MonitoringTestBase, unittest.TestCase): + + def test_disable(self): + try: + counter = CounterWithDisable() + sys.monitoring.register_callback(TEST_TOOL, E.PY_START, counter) + sys.monitoring.set_events(TEST_TOOL, E.PY_START) + self.assertEqual(counter.count, 0) + counter.count = 0 + f1() + self.assertEqual(counter.count, 1) + counter.disable = True + counter.count = 0 + f1() + self.assertEqual(counter.count, 1) + counter.count = 0 + f1() + self.assertEqual(counter.count, 0) + sys.monitoring.set_events(TEST_TOOL, 0) + finally: + sys.monitoring.restart_events() + + def test_restart(self): + try: + counter = CounterWithDisable() + sys.monitoring.register_callback(TEST_TOOL, E.PY_START, counter) + sys.monitoring.set_events(TEST_TOOL, E.PY_START) + counter.disable = True + f1() + counter.count = 0 + f1() + self.assertEqual(counter.count, 0) + sys.monitoring.restart_events() + counter.count = 0 + f1() + self.assertEqual(counter.count, 1) + sys.monitoring.set_events(TEST_TOOL, 0) + finally: + sys.monitoring.restart_events() + + +class MultipleMonitorsTest(MonitoringTestBase, unittest.TestCase): + + def test_two_same(self): + try: + self.assertEqual(sys.monitoring._all_events(), {}) + counter1 = CounterWithDisable() + counter2 = CounterWithDisable() + sys.monitoring.register_callback(TEST_TOOL, E.PY_START, counter1) + sys.monitoring.register_callback(TEST_TOOL2, E.PY_START, counter2) + sys.monitoring.set_events(TEST_TOOL, E.PY_START) + sys.monitoring.set_events(TEST_TOOL2, E.PY_START) + self.assertEqual(sys.monitoring.get_events(TEST_TOOL), E.PY_START) + self.assertEqual(sys.monitoring.get_events(TEST_TOOL2), E.PY_START) + self.assertEqual(sys.monitoring._all_events(), {'PY_START': (1 << TEST_TOOL) | (1 << TEST_TOOL2)}) + counter1.count = 0 + counter2.count = 0 + f1() + count1 = counter1.count + count2 = counter2.count + self.assertEqual((count1, count2), (1, 1)) + finally: + sys.monitoring.set_events(TEST_TOOL, 0) + sys.monitoring.set_events(TEST_TOOL2, 0) + sys.monitoring.register_callback(TEST_TOOL, E.PY_START, None) + sys.monitoring.register_callback(TEST_TOOL2, E.PY_START, None) + self.assertEqual(sys.monitoring._all_events(), {}) + + def test_three_same(self): + try: + self.assertEqual(sys.monitoring._all_events(), {}) + counter1 = CounterWithDisable() + counter2 = CounterWithDisable() + counter3 = CounterWithDisable() + sys.monitoring.register_callback(TEST_TOOL, E.PY_START, counter1) + sys.monitoring.register_callback(TEST_TOOL2, E.PY_START, counter2) + sys.monitoring.register_callback(TEST_TOOL3, E.PY_START, counter3) + sys.monitoring.set_events(TEST_TOOL, E.PY_START) + sys.monitoring.set_events(TEST_TOOL2, E.PY_START) + sys.monitoring.set_events(TEST_TOOL3, E.PY_START) + self.assertEqual(sys.monitoring.get_events(TEST_TOOL), E.PY_START) + self.assertEqual(sys.monitoring.get_events(TEST_TOOL2), E.PY_START) + self.assertEqual(sys.monitoring.get_events(TEST_TOOL3), E.PY_START) + self.assertEqual(sys.monitoring._all_events(), {'PY_START': (1 << TEST_TOOL) | (1 << TEST_TOOL2) | (1 << TEST_TOOL3)}) + counter1.count = 0 + counter2.count = 0 + counter3.count = 0 + f1() + count1 = counter1.count + count2 = counter2.count + count3 = counter3.count + self.assertEqual((count1, count2, count3), (1, 1, 1)) + finally: + sys.monitoring.set_events(TEST_TOOL, 0) + sys.monitoring.set_events(TEST_TOOL2, 0) + sys.monitoring.set_events(TEST_TOOL3, 0) + sys.monitoring.register_callback(TEST_TOOL, E.PY_START, None) + sys.monitoring.register_callback(TEST_TOOL2, E.PY_START, None) + sys.monitoring.register_callback(TEST_TOOL3, E.PY_START, None) + self.assertEqual(sys.monitoring._all_events(), {}) + + def test_two_different(self): + try: + self.assertEqual(sys.monitoring._all_events(), {}) + counter1 = CounterWithDisable() + counter2 = CounterWithDisable() + sys.monitoring.register_callback(TEST_TOOL, E.PY_START, counter1) + sys.monitoring.register_callback(TEST_TOOL2, E.PY_RETURN, counter2) + sys.monitoring.set_events(TEST_TOOL, E.PY_START) + sys.monitoring.set_events(TEST_TOOL2, E.PY_RETURN) + self.assertEqual(sys.monitoring.get_events(TEST_TOOL), E.PY_START) + self.assertEqual(sys.monitoring.get_events(TEST_TOOL2), E.PY_RETURN) + self.assertEqual(sys.monitoring._all_events(), {'PY_START': 1 << TEST_TOOL, 'PY_RETURN': 1 << TEST_TOOL2}) + counter1.count = 0 + counter2.count = 0 + f1() + count1 = counter1.count + count2 = counter2.count + self.assertEqual((count1, count2), (1, 1)) + finally: + sys.monitoring.set_events(TEST_TOOL, 0) + sys.monitoring.set_events(TEST_TOOL2, 0) + sys.monitoring.register_callback(TEST_TOOL, E.PY_START, None) + sys.monitoring.register_callback(TEST_TOOL2, E.PY_RETURN, None) + self.assertEqual(sys.monitoring._all_events(), {}) + + def test_two_with_disable(self): + try: + self.assertEqual(sys.monitoring._all_events(), {}) + counter1 = CounterWithDisable() + counter2 = CounterWithDisable() + sys.monitoring.register_callback(TEST_TOOL, E.PY_START, counter1) + sys.monitoring.register_callback(TEST_TOOL2, E.PY_START, counter2) + sys.monitoring.set_events(TEST_TOOL, E.PY_START) + sys.monitoring.set_events(TEST_TOOL2, E.PY_START) + self.assertEqual(sys.monitoring.get_events(TEST_TOOL), E.PY_START) + self.assertEqual(sys.monitoring.get_events(TEST_TOOL2), E.PY_START) + self.assertEqual(sys.monitoring._all_events(), {'PY_START': (1 << TEST_TOOL) | (1 << TEST_TOOL2)}) + counter1.count = 0 + counter2.count = 0 + counter1.disable = True + f1() + count1 = counter1.count + count2 = counter2.count + self.assertEqual((count1, count2), (1, 1)) + counter1.count = 0 + counter2.count = 0 + f1() + count1 = counter1.count + count2 = counter2.count + self.assertEqual((count1, count2), (0, 1)) + finally: + sys.monitoring.set_events(TEST_TOOL, 0) + sys.monitoring.set_events(TEST_TOOL2, 0) + sys.monitoring.register_callback(TEST_TOOL, E.PY_START, None) + sys.monitoring.register_callback(TEST_TOOL2, E.PY_START, None) + self.assertEqual(sys.monitoring._all_events(), {}) + sys.monitoring.restart_events() + +class LineMonitoringTest(MonitoringTestBase, unittest.TestCase): + + def test_lines_single(self): + try: + self.assertEqual(sys.monitoring._all_events(), {}) + events = [] + recorder = RecorderWithDisable(events) + sys.monitoring.register_callback(TEST_TOOL, E.LINE, recorder) + sys.monitoring.set_events(TEST_TOOL, E.LINE) + f1() + sys.monitoring.set_events(TEST_TOOL, 0) + sys.monitoring.register_callback(TEST_TOOL, E.LINE, None) + start = LineMonitoringTest.test_lines_single.__code__.co_firstlineno + self.assertEqual(events, [start+7, 14, start+8]) + finally: + sys.monitoring.set_events(TEST_TOOL, 0) + sys.monitoring.register_callback(TEST_TOOL, E.LINE, None) + self.assertEqual(sys.monitoring._all_events(), {}) + sys.monitoring.restart_events() + + def test_lines_loop(self): + try: + self.assertEqual(sys.monitoring._all_events(), {}) + events = [] + recorder = RecorderWithDisable(events) + sys.monitoring.register_callback(TEST_TOOL, E.LINE, recorder) + sys.monitoring.set_events(TEST_TOOL, E.LINE) + floop() + sys.monitoring.set_events(TEST_TOOL, 0) + sys.monitoring.register_callback(TEST_TOOL, E.LINE, None) + start = LineMonitoringTest.test_lines_loop.__code__.co_firstlineno + self.assertEqual(events, [start+7, 21, 22, 22, 21, start+8]) + finally: + sys.monitoring.set_events(TEST_TOOL, 0) + sys.monitoring.register_callback(TEST_TOOL, E.LINE, None) + self.assertEqual(sys.monitoring._all_events(), {}) + sys.monitoring.restart_events() + + def test_lines_two(self): + try: + self.assertEqual(sys.monitoring._all_events(), {}) + events = [] + recorder = RecorderWithDisable(events) + events2 = [] + recorder2 = RecorderWithDisable(events2) + sys.monitoring.register_callback(TEST_TOOL, E.LINE, recorder) + sys.monitoring.register_callback(TEST_TOOL2, E.LINE, recorder2) + sys.monitoring.set_events(TEST_TOOL, E.LINE); sys.monitoring.set_events(TEST_TOOL2, E.LINE) + f1() + sys.monitoring.set_events(TEST_TOOL, 0); sys.monitoring.set_events(TEST_TOOL2, 0) + sys.monitoring.register_callback(TEST_TOOL, E.LINE, None) + sys.monitoring.register_callback(TEST_TOOL2, E.LINE, None) + start = LineMonitoringTest.test_lines_two.__code__.co_firstlineno + expected = [start+10, 14, start+11] + self.assertEqual(events, expected) + self.assertEqual(events2, expected) + finally: + sys.monitoring.set_events(TEST_TOOL, 0) + sys.monitoring.set_events(TEST_TOOL2, 0) + sys.monitoring.register_callback(TEST_TOOL, E.LINE, None) + sys.monitoring.register_callback(TEST_TOOL2, E.LINE, None) + self.assertEqual(sys.monitoring._all_events(), {}) + sys.monitoring.restart_events() + + def check_lines(self, func, expected, tool=TEST_TOOL): + try: + self.assertEqual(sys.monitoring._all_events(), {}) + events = [] + recorder = RecorderWithDisable(events) + sys.monitoring.register_callback(tool, E.LINE, recorder) + sys.monitoring.set_events(tool, E.LINE) + func() + sys.monitoring.set_events(tool, 0) + sys.monitoring.register_callback(tool, E.LINE, None) + lines = [ line - func.__code__.co_firstlineno for line in events[1:-1] ] + self.assertEqual(lines, expected) + finally: + sys.monitoring.set_events(tool, 0) + + + def test_linear(self): + + def func(): + line = 1 + line = 2 + line = 3 + line = 4 + line = 5 + + self.check_lines(func, [1,2,3,4,5]) + + def test_branch(self): + def func(): + if "true".startswith("t"): + line = 2 + line = 3 + else: + line = 5 + line = 6 + + self.check_lines(func, [1,2,3,6]) + + def test_try_except(self): + + def func1(): + try: + line = 2 + line = 3 + except: + line = 5 + line = 6 + + self.check_lines(func1, [1,2,3,6]) + + def func2(): + try: + line = 2 + raise 3 + except: + line = 5 + line = 6 + + self.check_lines(func2, [1,2,3,4,5,6]) + + +class ExceptionRecorder: + + event_type = E.RAISE + + def __init__(self, events): + self.events = events + + def __call__(self, code, offset, exc): + self.events.append(("raise", type(exc))) + +class CheckEvents(MonitoringTestBase, unittest.TestCase): + + def check_events(self, func, expected, tool=TEST_TOOL, recorders=(ExceptionRecorder,)): + try: + self.assertEqual(sys.monitoring._all_events(), {}) + event_list = [] + all_events = 0 + for recorder in recorders: + ev = recorder.event_type + sys.monitoring.register_callback(tool, ev, recorder(event_list)) + all_events |= ev + sys.monitoring.set_events(tool, all_events) + func() + sys.monitoring.set_events(tool, 0) + for recorder in recorders: + sys.monitoring.register_callback(tool, recorder.event_type, None) + self.assertEqual(event_list, expected) + finally: + sys.monitoring.set_events(tool, 0) + for recorder in recorders: + sys.monitoring.register_callback(tool, recorder.event_type, None) + +class StopiterationRecorder(ExceptionRecorder): + + event_type = E.STOP_ITERATION + +class ExceptionMontoringTest(CheckEvents): + + recorder = ExceptionRecorder + + def test_simple_try_except(self): + + def func1(): + try: + line = 2 + raise KeyError + except: + line = 5 + line = 6 + + self.check_events(func1, [("raise", KeyError)]) + + def gen(): + yield 1 + return 2 + + def implicit_stop_iteration(): + for _ in gen(): + pass + + self.check_events(implicit_stop_iteration, [("raise", StopIteration)], recorders=(StopiterationRecorder,)) + +class LineRecorder: + + event_type = E.LINE + + + def __init__(self, events): + self.events = events + + def __call__(self, code, line): + self.events.append(("line", code.co_name, line - code.co_firstlineno)) + +class CallRecorder: + + event_type = E.CALL + + def __init__(self, events): + self.events = events + + def __call__(self, code, offset, func, arg): + self.events.append(("call", func.__name__, arg)) + +class CEventRecorder: + + def __init__(self, events): + self.events = events + + def __call__(self, code, offset, func, arg): + self.events.append((self.event_name, func.__name__, arg)) + +class CReturnRecorder(CEventRecorder): + + event_type = E.C_RETURN + event_name = "C return" + +class CRaiseRecorder(CEventRecorder): + + event_type = E.C_RAISE + event_name = "C raise" + +MANY_RECORDERS = ExceptionRecorder, CallRecorder, LineRecorder, CReturnRecorder, CRaiseRecorder + +class TestManyEvents(CheckEvents): + + def test_simple(self): + + def func1(): + line1 = 1 + line2 = 2 + line3 = 3 + + self.check_events(func1, recorders = MANY_RECORDERS, expected = [ + ('line', 'check_events', 10), + ('call', 'func1', sys.monitoring.MISSING), + ('line', 'func1', 1), + ('line', 'func1', 2), + ('line', 'func1', 3), + ('line', 'check_events', 11), + ('call', 'set_events', 2)]) + + def test_c_call(self): + + def func2(): + line1 = 1 + [].append(2) + line3 = 3 + + self.check_events(func2, recorders = MANY_RECORDERS, expected = [ + ('line', 'check_events', 10), + ('call', 'func2', sys.monitoring.MISSING), + ('line', 'func2', 1), + ('line', 'func2', 2), + ('call', 'append', [2]), + ('C return', 'append', [2]), + ('line', 'func2', 3), + ('line', 'check_events', 11), + ('call', 'set_events', 2)]) + + def test_try_except(self): + + def func3(): + try: + line = 2 + raise KeyError + except: + line = 5 + line = 6 + + self.check_events(func3, recorders = MANY_RECORDERS, expected = [ + ('line', 'check_events', 10), + ('call', 'func3', sys.monitoring.MISSING), + ('line', 'func3', 1), + ('line', 'func3', 2), + ('line', 'func3', 3), + ('raise', KeyError), + ('line', 'func3', 4), + ('line', 'func3', 5), + ('line', 'func3', 6), + ('line', 'check_events', 11), + ('call', 'set_events', 2)]) + +class InstructionRecorder: + + event_type = E.INSTRUCTION + + def __init__(self, events): + self.events = events + + def __call__(self, code, offset): + # Filter out instructions in check_events to lower noise + if code.co_name != "check_events": + self.events.append(("instruction", code.co_name, offset)) + + +LINE_AND_INSTRUCTION_RECORDERS = InstructionRecorder, LineRecorder + +class TestLineAndInstructionEvents(CheckEvents): + maxDiff = None + + def test_simple(self): + + def func1(): + line1 = 1 + line2 = 2 + line3 = 3 + + self.check_events(func1, recorders = LINE_AND_INSTRUCTION_RECORDERS, expected = [ + ('line', 'check_events', 10), + ('line', 'func1', 1), + ('instruction', 'func1', 2), + ('instruction', 'func1', 4), + ('line', 'func1', 2), + ('instruction', 'func1', 6), + ('instruction', 'func1', 8), + ('line', 'func1', 3), + ('instruction', 'func1', 10), + ('instruction', 'func1', 12), + ('instruction', 'func1', 14), + ('line', 'check_events', 11)]) + + def test_c_call(self): + + def func2(): + line1 = 1 + [].append(2) + line3 = 3 + + self.check_events(func2, recorders = LINE_AND_INSTRUCTION_RECORDERS, expected = [ + ('line', 'check_events', 10), + ('line', 'func2', 1), + ('instruction', 'func2', 2), + ('instruction', 'func2', 4), + ('line', 'func2', 2), + ('instruction', 'func2', 6), + ('instruction', 'func2', 8), + ('instruction', 'func2', 28), + ('instruction', 'func2', 30), + ('instruction', 'func2', 38), + ('line', 'func2', 3), + ('instruction', 'func2', 40), + ('instruction', 'func2', 42), + ('instruction', 'func2', 44), + ('line', 'check_events', 11)]) + + def test_try_except(self): + + def func3(): + try: + line = 2 + raise KeyError + except: + line = 5 + line = 6 + + self.check_events(func3, recorders = LINE_AND_INSTRUCTION_RECORDERS, expected = [ + ('line', 'check_events', 10), + ('line', 'func3', 1), + ('instruction', 'func3', 2), + ('line', 'func3', 2), + ('instruction', 'func3', 4), + ('instruction', 'func3', 6), + ('line', 'func3', 3), + ('instruction', 'func3', 8), + ('instruction', 'func3', 18), + ('instruction', 'func3', 20), + ('line', 'func3', 4), + ('instruction', 'func3', 22), + ('line', 'func3', 5), + ('instruction', 'func3', 24), + ('instruction', 'func3', 26), + ('instruction', 'func3', 28), + ('line', 'func3', 6), + ('instruction', 'func3', 30), + ('instruction', 'func3', 32), + ('instruction', 'func3', 34), + ('line', 'check_events', 11)]) + +class TestInstallIncrementallly(MonitoringTestBase, unittest.TestCase): + + def check_events(self, func, must_include, tool=TEST_TOOL, recorders=(ExceptionRecorder,)): + try: + self.assertEqual(sys.monitoring._all_events(), {}) + event_list = [] + all_events = 0 + for recorder in recorders: + all_events |= recorder.event_type + sys.monitoring.set_events(tool, all_events) + for recorder in recorders: + sys.monitoring.register_callback(tool, recorder.event_type, recorder(event_list)) + func() + sys.monitoring.set_events(tool, 0) + for recorder in recorders: + sys.monitoring.register_callback(tool, recorder.event_type, None) + for line in must_include: + self.assertIn(line, event_list) + finally: + sys.monitoring.set_events(tool, 0) + for recorder in recorders: + sys.monitoring.register_callback(tool, recorder.event_type, None) + + @staticmethod + def func1(): + line1 = 1 + + MUST_INCLUDE_LI = [ + ('instruction', 'func1', 2), + ('line', 'func1', 1), + ('instruction', 'func1', 4), + ('instruction', 'func1', 6)] + + def test_line_then_instruction(self): + recorders = [ LineRecorder, InstructionRecorder ] + self.check_events(self.func1, + recorders = recorders, must_include = self.EXPECTED_LI) + + def test_instruction_then_line(self): + recorders = [ InstructionRecorder, LineRecorderLowNoise ] + self.check_events(self.func1, + recorders = recorders, must_include = self.EXPECTED_LI) + + @staticmethod + def func2(): + len(()) + + MUST_INCLUDE_CI = [ + ('instruction', 'func2', 2), + ('call', 'func2', sys.monitoring.MISSING), + ('call', 'len', ()), + ('instruction', 'func2', 12), + ('instruction', 'func2', 14)] + + + + def test_line_then_instruction(self): + recorders = [ CallRecorder, InstructionRecorder ] + self.check_events(self.func2, + recorders = recorders, must_include = self.MUST_INCLUDE_CI) + + def test_instruction_then_line(self): + recorders = [ InstructionRecorder, CallRecorder ] + self.check_events(self.func2, + recorders = recorders, must_include = self.MUST_INCLUDE_CI) + +class TestLocalEvents(MonitoringTestBase, unittest.TestCase): + + def check_events(self, func, expected, tool=TEST_TOOL, recorders=(ExceptionRecorder,)): + try: + self.assertEqual(sys.monitoring._all_events(), {}) + event_list = [] + all_events = 0 + for recorder in recorders: + ev = recorder.event_type + sys.monitoring.register_callback(tool, ev, recorder(event_list)) + all_events |= ev + sys.monitoring.set_local_events(tool, func.__code__, all_events) + func() + sys.monitoring.set_local_events(tool, func.__code__, 0) + for recorder in recorders: + sys.monitoring.register_callback(tool, recorder.event_type, None) + self.assertEqual(event_list, expected) + finally: + sys.monitoring.set_local_events(tool, func.__code__, 0) + for recorder in recorders: + sys.monitoring.register_callback(tool, recorder.event_type, None) + + + def test_simple(self): + + def func1(): + line1 = 1 + line2 = 2 + line3 = 3 + + self.check_events(func1, recorders = MANY_RECORDERS, expected = [ + ('line', 'func1', 1), + ('line', 'func1', 2), + ('line', 'func1', 3)]) + + def test_c_call(self): + + def func2(): + line1 = 1 + [].append(2) + line3 = 3 + + self.check_events(func2, recorders = MANY_RECORDERS, expected = [ + ('line', 'func2', 1), + ('line', 'func2', 2), + ('call', 'append', [2]), + ('C return', 'append', [2]), + ('line', 'func2', 3)]) + + def test_try_except(self): + + def func3(): + try: + line = 2 + raise KeyError + except: + line = 5 + line = 6 + + self.check_events(func3, recorders = MANY_RECORDERS, expected = [ + ('line', 'func3', 1), + ('line', 'func3', 2), + ('line', 'func3', 3), + ('raise', KeyError), + ('line', 'func3', 4), + ('line', 'func3', 5), + ('line', 'func3', 6)]) + + +def line_from_offset(code, offset): + for start, end, line in code.co_lines(): + if start <= offset < end: + return line - code.co_firstlineno + return -1 + +class JumpRecorder: + + event_type = E.JUMP + name = "jump" + + def __init__(self, events): + self.events = events + + def __call__(self, code, from_, to): + from_line = line_from_offset(code, from_) + to_line = line_from_offset(code, to) + self.events.append((self.name, code.co_name, from_line, to_line)) + + +class BranchRecorder(JumpRecorder): + + event_type = E.BRANCH + name = "branch" + + +JUMP_AND_BRANCH_RECORDERS = JumpRecorder, BranchRecorder +JUMP_BRANCH_AND_LINE_RECORDERS = JumpRecorder, BranchRecorder, LineRecorder + +class TestBranchAndJumpEvents(CheckEvents): + maxDiff = None + + def test_loop(self): + + def func(): + x = 1 + for a in range(2): + if a: + x = 4 + else: + x = 6 + + self.check_events(func, recorders = JUMP_AND_BRANCH_RECORDERS, expected = [ + ('branch', 'func', 2, 2), + ('branch', 'func', 3, 6), + ('jump', 'func', 6, 2), + ('branch', 'func', 2, 2), + ('branch', 'func', 3, 4), + ('jump', 'func', 4, 2), + ('branch', 'func', 2, 2)]) + + + self.check_events(func, recorders = JUMP_BRANCH_AND_LINE_RECORDERS, expected = [ + ('line', 'check_events', 10), + ('line', 'func', 1), + ('line', 'func', 2), + ('branch', 'func', 2, 2), + ('line', 'func', 3), + ('branch', 'func', 3, 6), + ('line', 'func', 6), + ('jump', 'func', 6, 2), + ('branch', 'func', 2, 2), + ('line', 'func', 3), + ('branch', 'func', 3, 4), + ('line', 'func', 4), + ('jump', 'func', 4, 2), + ('branch', 'func', 2, 2), + ('line', 'func', 2), + ('line', 'check_events', 11)]) + + +class TestSetGetEvents(MonitoringTestBase, unittest.TestCase): + + def test_global(self): + sys.monitoring.set_events(TEST_TOOL, E.PY_START) + self.assertEqual(sys.monitoring.get_events(TEST_TOOL), E.PY_START) + sys.monitoring.set_events(TEST_TOOL2, E.PY_START) + self.assertEqual(sys.monitoring.get_events(TEST_TOOL2), E.PY_START) + sys.monitoring.set_events(TEST_TOOL, 0) + self.assertEqual(sys.monitoring.get_events(TEST_TOOL), 0) + sys.monitoring.set_events(TEST_TOOL2,0) + self.assertEqual(sys.monitoring.get_events(TEST_TOOL2), 0) + + def test_local(self): + code = f1.__code__ + sys.monitoring.set_local_events(TEST_TOOL, code, E.PY_START) + self.assertEqual(sys.monitoring.get_local_events(TEST_TOOL, code), E.PY_START) + sys.monitoring.set_local_events(TEST_TOOL2, code, E.PY_START) + self.assertEqual(sys.monitoring.get_local_events(TEST_TOOL2, code), E.PY_START) + sys.monitoring.set_local_events(TEST_TOOL, code, 0) + self.assertEqual(sys.monitoring.get_local_events(TEST_TOOL, code), 0) + sys.monitoring.set_local_events(TEST_TOOL2, code, 0) + self.assertEqual(sys.monitoring.get_local_events(TEST_TOOL2, code), 0) + +class TestUninitialized(unittest.TestCase, MonitoringTestBase): + + @staticmethod + def f(): + pass + + def test_get_local_events_uninitialized(self): + self.assertEqual(sys.monitoring.get_local_events(TEST_TOOL, self.f.__code__), 0) diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py index 08c8a7a1f94b95..0e57c165ca98ea 100644 --- a/Lib/test/test_ntpath.py +++ b/Lib/test/test_ntpath.py @@ -1,6 +1,7 @@ import inspect import ntpath import os +import string import sys import unittest import warnings @@ -168,6 +169,7 @@ def test_splitroot(self): # gh-81790: support device namespace, including UNC drives. tester('ntpath.splitroot("//?/c:")', ("//?/c:", "", "")) + tester('ntpath.splitroot("//./c:")', ("//./c:", "", "")) tester('ntpath.splitroot("//?/c:/")', ("//?/c:", "/", "")) tester('ntpath.splitroot("//?/c:/dir")', ("//?/c:", "/", "dir")) tester('ntpath.splitroot("//?/UNC")', ("//?/UNC", "", "")) @@ -178,8 +180,12 @@ def test_splitroot(self): tester('ntpath.splitroot("//?/VOLUME{00000000-0000-0000-0000-000000000000}/spam")', ('//?/VOLUME{00000000-0000-0000-0000-000000000000}', '/', 'spam')) tester('ntpath.splitroot("//?/BootPartition/")', ("//?/BootPartition", "/", "")) + tester('ntpath.splitroot("//./BootPartition/")', ("//./BootPartition", "/", "")) + tester('ntpath.splitroot("//./PhysicalDrive0")', ("//./PhysicalDrive0", "", "")) + tester('ntpath.splitroot("//./nul")', ("//./nul", "", "")) tester('ntpath.splitroot("\\\\?\\c:")', ("\\\\?\\c:", "", "")) + tester('ntpath.splitroot("\\\\.\\c:")', ("\\\\.\\c:", "", "")) tester('ntpath.splitroot("\\\\?\\c:\\")', ("\\\\?\\c:", "\\", "")) tester('ntpath.splitroot("\\\\?\\c:\\dir")', ("\\\\?\\c:", "\\", "dir")) tester('ntpath.splitroot("\\\\?\\UNC")', ("\\\\?\\UNC", "", "")) @@ -192,6 +198,9 @@ def test_splitroot(self): tester('ntpath.splitroot("\\\\?\\VOLUME{00000000-0000-0000-0000-000000000000}\\spam")', ('\\\\?\\VOLUME{00000000-0000-0000-0000-000000000000}', '\\', 'spam')) tester('ntpath.splitroot("\\\\?\\BootPartition\\")', ("\\\\?\\BootPartition", "\\", "")) + tester('ntpath.splitroot("\\\\.\\BootPartition\\")', ("\\\\.\\BootPartition", "\\", "")) + tester('ntpath.splitroot("\\\\.\\PhysicalDrive0")', ("\\\\.\\PhysicalDrive0", "", "")) + tester('ntpath.splitroot("\\\\.\\nul")', ("\\\\.\\nul", "", "")) # gh-96290: support partial/invalid UNC drives tester('ntpath.splitroot("//")', ("//", "", "")) # empty server & missing share @@ -299,6 +308,11 @@ def test_join(self): tester("ntpath.join('//computer/share', 'a', 'b')", '//computer/share\\a\\b') tester("ntpath.join('//computer/share', 'a/b')", '//computer/share\\a/b') + tester("ntpath.join('\\\\', 'computer')", '\\\\computer') + tester("ntpath.join('\\\\computer\\', 'share')", '\\\\computer\\share') + tester("ntpath.join('\\\\computer\\share\\', 'a')", '\\\\computer\\share\\a') + tester("ntpath.join('\\\\computer\\share\\a\\', 'b')", '\\\\computer\\share\\a\\b') + def test_normpath(self): tester("ntpath.normpath('A//////././//.//B')", r'A\B') tester("ntpath.normpath('A/./B')", r'A\B') @@ -374,6 +388,12 @@ def test_realpath_basic(self): self.assertPathEqual(ntpath.realpath(os.fsencode(ABSTFN + "1")), os.fsencode(ABSTFN)) + # gh-88013: call ntpath.realpath with binary drive name may raise a + # TypeError. The drive should not exist to reproduce the bug. + drives = {f"{c}:\\" for c in string.ascii_uppercase} - set(os.listdrives()) + d = drives.pop().encode() + self.assertEqual(ntpath.realpath(d), d) + @os_helper.skip_unless_symlink @unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname') def test_realpath_strict(self): diff --git a/Lib/test/test_opcache.py b/Lib/test/test_opcache.py index e39b7260624899..57fed5d09fd7b8 100644 --- a/Lib/test/test_opcache.py +++ b/Lib/test/test_opcache.py @@ -1,6 +1,29 @@ import unittest +class TestLoadSuperAttrCache(unittest.TestCase): + def test_descriptor_not_double_executed_on_spec_fail(self): + calls = [] + class Descriptor: + def __get__(self, instance, owner): + calls.append((instance, owner)) + return lambda: 1 + + class C: + d = Descriptor() + + class D(C): + def f(self): + return super().d() + + d = D() + + self.assertEqual(d.f(), 1) # warmup + calls.clear() + self.assertEqual(d.f(), 1) # try to specialize + self.assertEqual(calls, [(d, D)]) + + class TestLoadAttrCache(unittest.TestCase): def test_descriptor_added_after_optimization(self): class Descriptor: diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index 74ece3ffb4ed17..584cc05ca82a55 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -3214,6 +3214,14 @@ def kill_process(pid): @support.requires_subprocess() class SpawnTests(unittest.TestCase): + @staticmethod + def quote_args(args): + # On Windows, os.spawn* simply joins arguments with spaces: + # arguments need to be quoted + if os.name != 'nt': + return args + return [f'"{arg}"' if " " in arg.strip() else arg for arg in args] + def create_args(self, *, with_env=False, use_bytes=False): self.exitcode = 17 @@ -3234,115 +3242,118 @@ def create_args(self, *, with_env=False, use_bytes=False): with open(filename, "w", encoding="utf-8") as fp: fp.write(code) - args = [sys.executable, filename] + program = sys.executable + args = self.quote_args([program, filename]) if use_bytes: + program = os.fsencode(program) args = [os.fsencode(a) for a in args] self.env = {os.fsencode(k): os.fsencode(v) for k, v in self.env.items()} - return args + return program, args @requires_os_func('spawnl') def test_spawnl(self): - args = self.create_args() - exitcode = os.spawnl(os.P_WAIT, args[0], *args) + program, args = self.create_args() + exitcode = os.spawnl(os.P_WAIT, program, *args) self.assertEqual(exitcode, self.exitcode) @requires_os_func('spawnle') def test_spawnle(self): - args = self.create_args(with_env=True) - exitcode = os.spawnle(os.P_WAIT, args[0], *args, self.env) + program, args = self.create_args(with_env=True) + exitcode = os.spawnle(os.P_WAIT, program, *args, self.env) self.assertEqual(exitcode, self.exitcode) @requires_os_func('spawnlp') def test_spawnlp(self): - args = self.create_args() - exitcode = os.spawnlp(os.P_WAIT, args[0], *args) + program, args = self.create_args() + exitcode = os.spawnlp(os.P_WAIT, program, *args) self.assertEqual(exitcode, self.exitcode) @requires_os_func('spawnlpe') def test_spawnlpe(self): - args = self.create_args(with_env=True) - exitcode = os.spawnlpe(os.P_WAIT, args[0], *args, self.env) + program, args = self.create_args(with_env=True) + exitcode = os.spawnlpe(os.P_WAIT, program, *args, self.env) self.assertEqual(exitcode, self.exitcode) @requires_os_func('spawnv') def test_spawnv(self): - args = self.create_args() - exitcode = os.spawnv(os.P_WAIT, args[0], args) + program, args = self.create_args() + exitcode = os.spawnv(os.P_WAIT, program, args) self.assertEqual(exitcode, self.exitcode) # Test for PyUnicode_FSConverter() - exitcode = os.spawnv(os.P_WAIT, FakePath(args[0]), args) + exitcode = os.spawnv(os.P_WAIT, FakePath(program), args) self.assertEqual(exitcode, self.exitcode) @requires_os_func('spawnve') def test_spawnve(self): - args = self.create_args(with_env=True) - exitcode = os.spawnve(os.P_WAIT, args[0], args, self.env) + program, args = self.create_args(with_env=True) + exitcode = os.spawnve(os.P_WAIT, program, args, self.env) self.assertEqual(exitcode, self.exitcode) @requires_os_func('spawnvp') def test_spawnvp(self): - args = self.create_args() - exitcode = os.spawnvp(os.P_WAIT, args[0], args) + program, args = self.create_args() + exitcode = os.spawnvp(os.P_WAIT, program, args) self.assertEqual(exitcode, self.exitcode) @requires_os_func('spawnvpe') def test_spawnvpe(self): - args = self.create_args(with_env=True) - exitcode = os.spawnvpe(os.P_WAIT, args[0], args, self.env) + program, args = self.create_args(with_env=True) + exitcode = os.spawnvpe(os.P_WAIT, program, args, self.env) self.assertEqual(exitcode, self.exitcode) @requires_os_func('spawnv') def test_nowait(self): - args = self.create_args() - pid = os.spawnv(os.P_NOWAIT, args[0], args) + program, args = self.create_args() + pid = os.spawnv(os.P_NOWAIT, program, args) support.wait_process(pid, exitcode=self.exitcode) @requires_os_func('spawnve') def test_spawnve_bytes(self): # Test bytes handling in parse_arglist and parse_envlist (#28114) - args = self.create_args(with_env=True, use_bytes=True) - exitcode = os.spawnve(os.P_WAIT, args[0], args, self.env) + program, args = self.create_args(with_env=True, use_bytes=True) + exitcode = os.spawnve(os.P_WAIT, program, args, self.env) self.assertEqual(exitcode, self.exitcode) @requires_os_func('spawnl') def test_spawnl_noargs(self): - args = self.create_args() - self.assertRaises(ValueError, os.spawnl, os.P_NOWAIT, args[0]) - self.assertRaises(ValueError, os.spawnl, os.P_NOWAIT, args[0], '') + program, __ = self.create_args() + self.assertRaises(ValueError, os.spawnl, os.P_NOWAIT, program) + self.assertRaises(ValueError, os.spawnl, os.P_NOWAIT, program, '') @requires_os_func('spawnle') def test_spawnle_noargs(self): - args = self.create_args() - self.assertRaises(ValueError, os.spawnle, os.P_NOWAIT, args[0], {}) - self.assertRaises(ValueError, os.spawnle, os.P_NOWAIT, args[0], '', {}) + program, __ = self.create_args() + self.assertRaises(ValueError, os.spawnle, os.P_NOWAIT, program, {}) + self.assertRaises(ValueError, os.spawnle, os.P_NOWAIT, program, '', {}) @requires_os_func('spawnv') def test_spawnv_noargs(self): - args = self.create_args() - self.assertRaises(ValueError, os.spawnv, os.P_NOWAIT, args[0], ()) - self.assertRaises(ValueError, os.spawnv, os.P_NOWAIT, args[0], []) - self.assertRaises(ValueError, os.spawnv, os.P_NOWAIT, args[0], ('',)) - self.assertRaises(ValueError, os.spawnv, os.P_NOWAIT, args[0], ['']) + program, __ = self.create_args() + self.assertRaises(ValueError, os.spawnv, os.P_NOWAIT, program, ()) + self.assertRaises(ValueError, os.spawnv, os.P_NOWAIT, program, []) + self.assertRaises(ValueError, os.spawnv, os.P_NOWAIT, program, ('',)) + self.assertRaises(ValueError, os.spawnv, os.P_NOWAIT, program, ['']) @requires_os_func('spawnve') def test_spawnve_noargs(self): - args = self.create_args() - self.assertRaises(ValueError, os.spawnve, os.P_NOWAIT, args[0], (), {}) - self.assertRaises(ValueError, os.spawnve, os.P_NOWAIT, args[0], [], {}) - self.assertRaises(ValueError, os.spawnve, os.P_NOWAIT, args[0], ('',), {}) - self.assertRaises(ValueError, os.spawnve, os.P_NOWAIT, args[0], [''], {}) + program, __ = self.create_args() + self.assertRaises(ValueError, os.spawnve, os.P_NOWAIT, program, (), {}) + self.assertRaises(ValueError, os.spawnve, os.P_NOWAIT, program, [], {}) + self.assertRaises(ValueError, os.spawnve, os.P_NOWAIT, program, ('',), {}) + self.assertRaises(ValueError, os.spawnve, os.P_NOWAIT, program, [''], {}) def _test_invalid_env(self, spawn): - args = [sys.executable, '-c', 'pass'] + program = sys.executable + args = self.quote_args([program, '-c', 'pass']) # null character in the environment variable name newenv = os.environ.copy() newenv["FRUIT\0VEGETABLE"] = "cabbage" try: - exitcode = spawn(os.P_WAIT, args[0], args, newenv) + exitcode = spawn(os.P_WAIT, program, args, newenv) except ValueError: pass else: @@ -3352,7 +3363,7 @@ def _test_invalid_env(self, spawn): newenv = os.environ.copy() newenv["FRUIT"] = "orange\0VEGETABLE=cabbage" try: - exitcode = spawn(os.P_WAIT, args[0], args, newenv) + exitcode = spawn(os.P_WAIT, program, args, newenv) except ValueError: pass else: @@ -3362,7 +3373,7 @@ def _test_invalid_env(self, spawn): newenv = os.environ.copy() newenv["FRUIT=ORANGE"] = "lemon" try: - exitcode = spawn(os.P_WAIT, args[0], args, newenv) + exitcode = spawn(os.P_WAIT, program, args, newenv) except ValueError: pass else: @@ -3375,10 +3386,11 @@ def _test_invalid_env(self, spawn): fp.write('import sys, os\n' 'if os.getenv("FRUIT") != "orange=lemon":\n' ' raise AssertionError') - args = [sys.executable, filename] + + args = self.quote_args([program, filename]) newenv = os.environ.copy() newenv["FRUIT"] = "orange=lemon" - exitcode = spawn(os.P_WAIT, args[0], args, newenv) + exitcode = spawn(os.P_WAIT, program, args, newenv) self.assertEqual(exitcode, 0) @requires_os_func('spawnve') diff --git a/Lib/test/test_pathlib.py b/Lib/test/test_pathlib.py index f05dead5886743..76cfadeedcea84 100644 --- a/Lib/test/test_pathlib.py +++ b/Lib/test/test_pathlib.py @@ -13,6 +13,7 @@ from unittest import mock from test.support import import_helper +from test.support import set_recursion_limit from test.support import is_emscripten, is_wasi from test.support import os_helper from test.support.os_helper import TESTFN, FakePath @@ -23,118 +24,18 @@ grp = pwd = None -class _BaseFlavourTest(object): - - def _check_parse_parts(self, arg, expected): - f = self.cls._parse_parts - sep = self.flavour.sep - altsep = self.flavour.altsep - actual = f([x.replace('/', sep) for x in arg]) - self.assertEqual(actual, expected) - if altsep: - actual = f([x.replace('/', altsep) for x in arg]) - self.assertEqual(actual, expected) - - def test_parse_parts_common(self): - check = self._check_parse_parts - sep = self.flavour.sep - # Unanchored parts. - check([], ('', '', [])) - check(['a'], ('', '', ['a'])) - check(['a/'], ('', '', ['a'])) - check(['a', 'b'], ('', '', ['a', 'b'])) - # Expansion. - check(['a/b'], ('', '', ['a', 'b'])) - check(['a/b/'], ('', '', ['a', 'b'])) - check(['a', 'b/c', 'd'], ('', '', ['a', 'b', 'c', 'd'])) - # Collapsing and stripping excess slashes. - check(['a', 'b//c', 'd'], ('', '', ['a', 'b', 'c', 'd'])) - check(['a', 'b/c/', 'd'], ('', '', ['a', 'b', 'c', 'd'])) - # Eliminating standalone dots. - check(['.'], ('', '', [])) - check(['.', '.', 'b'], ('', '', ['b'])) - check(['a', '.', 'b'], ('', '', ['a', 'b'])) - check(['a', '.', '.'], ('', '', ['a'])) - # The first part is anchored. - check(['/a/b'], ('', sep, [sep, 'a', 'b'])) - check(['/a', 'b'], ('', sep, [sep, 'a', 'b'])) - check(['/a/', 'b'], ('', sep, [sep, 'a', 'b'])) - # Ignoring parts before an anchored part. - check(['a', '/b', 'c'], ('', sep, [sep, 'b', 'c'])) - check(['a', '/b', '/c'], ('', sep, [sep, 'c'])) - - -class PosixFlavourTest(_BaseFlavourTest, unittest.TestCase): - cls = pathlib.PurePosixPath - flavour = pathlib.PurePosixPath._flavour - - def test_parse_parts(self): - check = self._check_parse_parts - # Collapsing of excess leading slashes, except for the double-slash - # special case. - check(['//a', 'b'], ('', '//', ['//', 'a', 'b'])) - check(['///a', 'b'], ('', '/', ['/', 'a', 'b'])) - check(['////a', 'b'], ('', '/', ['/', 'a', 'b'])) - # Paths which look like NT paths aren't treated specially. - check(['c:a'], ('', '', ['c:a'])) - check(['c:\\a'], ('', '', ['c:\\a'])) - check(['\\a'], ('', '', ['\\a'])) - - -class NTFlavourTest(_BaseFlavourTest, unittest.TestCase): - cls = pathlib.PureWindowsPath - flavour = pathlib.PureWindowsPath._flavour - - def test_parse_parts(self): - check = self._check_parse_parts - # First part is anchored. - check(['c:'], ('c:', '', ['c:'])) - check(['c:/'], ('c:', '\\', ['c:\\'])) - check(['/'], ('', '\\', ['\\'])) - check(['c:a'], ('c:', '', ['c:', 'a'])) - check(['c:/a'], ('c:', '\\', ['c:\\', 'a'])) - check(['/a'], ('', '\\', ['\\', 'a'])) - # UNC paths. - check(['//a/b'], ('\\\\a\\b', '\\', ['\\\\a\\b\\'])) - check(['//a/b/'], ('\\\\a\\b', '\\', ['\\\\a\\b\\'])) - check(['//a/b/c'], ('\\\\a\\b', '\\', ['\\\\a\\b\\', 'c'])) - # Second part is anchored, so that the first part is ignored. - check(['a', 'Z:b', 'c'], ('Z:', '', ['Z:', 'b', 'c'])) - check(['a', 'Z:/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c'])) - # UNC paths. - check(['a', '//b/c', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd'])) - # Collapsing and stripping excess slashes. - check(['a', 'Z://b//c/', 'd/'], ('Z:', '\\', ['Z:\\', 'b', 'c', 'd'])) - # UNC paths. - check(['a', '//b/c//', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd'])) - # Extended paths. - check(['//?/c:/'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\'])) - check(['//?/c:/a'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'a'])) - check(['//?/c:/a', '/b'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'b'])) - # Extended UNC paths (format is "\\?\UNC\server\share"). - check(['//?/UNC/b/c'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\'])) - check(['//?/UNC/b/c/d'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\', 'd'])) - # Second part has a root but not drive. - check(['a', '/b', 'c'], ('', '\\', ['\\', 'b', 'c'])) - check(['Z:/a', '/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c'])) - check(['//?/Z:/a', '/b', 'c'], ('\\\\?\\Z:', '\\', ['\\\\?\\Z:\\', 'b', 'c'])) - # Joining with the same drive => the first path is appended to if - # the second path is relative. - check(['c:/a/b', 'c:x/y'], ('c:', '\\', ['c:\\', 'a', 'b', 'x', 'y'])) - check(['c:/a/b', 'c:/x/y'], ('c:', '\\', ['c:\\', 'x', 'y'])) - # Paths to files with NTFS alternate data streams - check(['./c:s'], ('', '', ['c:s'])) - check(['cc:s'], ('', '', ['cc:s'])) - check(['C:c:s'], ('C:', '', ['C:', 'c:s'])) - check(['C:/c:s'], ('C:', '\\', ['C:\\', 'c:s'])) - check(['D:a', './c:b'], ('D:', '', ['D:', 'a', 'c:b'])) - check(['D:/a', './c:b'], ('D:', '\\', ['D:\\', 'a', 'c:b'])) - - # # Tests for the pure classes. # +class _BasePurePathSubclass(object): + init_called = False + + def __init__(self, *args): + super().__init__(*args) + self.init_called = True + + class _BasePurePathTest(object): # Keys are canonical paths, values are list of tuples of arguments @@ -220,6 +121,61 @@ def test_str_subclass_common(self): self._check_str_subclass('a/b.txt') self._check_str_subclass('/a/b.txt') + def test_init_called_common(self): + class P(_BasePurePathSubclass, self.cls): + pass + p = P('foo', 'bar') + self.assertTrue((p / 'foo').init_called) + self.assertTrue(('foo' / p).init_called) + self.assertTrue(p.joinpath('foo').init_called) + self.assertTrue(p.with_name('foo').init_called) + self.assertTrue(p.with_stem('foo').init_called) + self.assertTrue(p.with_suffix('.foo').init_called) + self.assertTrue(p.relative_to('foo').init_called) + self.assertTrue(p.parent.init_called) + for parent in p.parents: + self.assertTrue(parent.init_called) + + def _get_drive_root_parts(self, parts): + path = self.cls(*parts) + return path.drive, path.root, path.parts + + def _check_drive_root_parts(self, arg, *expected): + sep = self.flavour.sep + actual = self._get_drive_root_parts([x.replace('/', sep) for x in arg]) + self.assertEqual(actual, expected) + if altsep := self.flavour.altsep: + actual = self._get_drive_root_parts([x.replace('/', altsep) for x in arg]) + self.assertEqual(actual, expected) + + def test_drive_root_parts_common(self): + check = self._check_drive_root_parts + sep = self.flavour.sep + # Unanchored parts. + check((), '', '', ()) + check(('a',), '', '', ('a',)) + check(('a/',), '', '', ('a',)) + check(('a', 'b'), '', '', ('a', 'b')) + # Expansion. + check(('a/b',), '', '', ('a', 'b')) + check(('a/b/',), '', '', ('a', 'b')) + check(('a', 'b/c', 'd'), '', '', ('a', 'b', 'c', 'd')) + # Collapsing and stripping excess slashes. + check(('a', 'b//c', 'd'), '', '', ('a', 'b', 'c', 'd')) + check(('a', 'b/c/', 'd'), '', '', ('a', 'b', 'c', 'd')) + # Eliminating standalone dots. + check(('.',), '', '', ()) + check(('.', '.', 'b'), '', '', ('b',)) + check(('a', '.', 'b'), '', '', ('a', 'b')) + check(('a', '.', '.'), '', '', ('a',)) + # The first part is anchored. + check(('/a/b',), '', sep, (sep, 'a', 'b')) + check(('/a', 'b'), '', sep, (sep, 'a', 'b')) + check(('/a/', 'b'), '', sep, (sep, 'a', 'b')) + # Ignoring parts before an anchored part. + check(('a', '/b', 'c'), '', sep, (sep, 'b', 'c')) + check(('a', '/b', '/c'), '', sep, (sep, 'c')) + def test_join_common(self): P = self.cls p = P('a/b') @@ -390,8 +346,6 @@ def test_parts_common(self): p = P('a/b') parts = p.parts self.assertEqual(parts, ('a', 'b')) - # The object gets reused. - self.assertIs(parts, p.parts) # When the path is absolute, the anchor is a separate part. p = P('/a/b') parts = p.parts @@ -744,6 +698,18 @@ def test_pickling_common(self): class PurePosixPathTest(_BasePurePathTest, unittest.TestCase): cls = pathlib.PurePosixPath + def test_drive_root_parts(self): + check = self._check_drive_root_parts + # Collapsing of excess leading slashes, except for the double-slash + # special case. + check(('//a', 'b'), '', '//', ('//', 'a', 'b')) + check(('///a', 'b'), '', '/', ('/', 'a', 'b')) + check(('////a', 'b'), '', '/', ('/', 'a', 'b')) + # Paths which look like NT paths aren't treated specially. + check(('c:a',), '', '', ('c:a',)) + check(('c:\\a',), '', '', ('c:\\a',)) + check(('\\a',), '', '', ('\\a',)) + def test_root(self): P = self.cls self.assertEqual(P('/a/b').root, '/') @@ -834,6 +800,68 @@ class PureWindowsPathTest(_BasePurePathTest, unittest.TestCase): ], }) + def test_drive_root_parts(self): + check = self._check_drive_root_parts + # First part is anchored. + check(('c:',), 'c:', '', ('c:',)) + check(('c:/',), 'c:', '\\', ('c:\\',)) + check(('/',), '', '\\', ('\\',)) + check(('c:a',), 'c:', '', ('c:', 'a')) + check(('c:/a',), 'c:', '\\', ('c:\\', 'a')) + check(('/a',), '', '\\', ('\\', 'a')) + # UNC paths. + check(('//',), '\\\\', '', ('\\\\',)) + check(('//a',), '\\\\a', '', ('\\\\a',)) + check(('//a/',), '\\\\a\\', '', ('\\\\a\\',)) + check(('//a/b',), '\\\\a\\b', '\\', ('\\\\a\\b\\',)) + check(('//a/b/',), '\\\\a\\b', '\\', ('\\\\a\\b\\',)) + check(('//a/b/c',), '\\\\a\\b', '\\', ('\\\\a\\b\\', 'c')) + # Second part is anchored, so that the first part is ignored. + check(('a', 'Z:b', 'c'), 'Z:', '', ('Z:', 'b', 'c')) + check(('a', 'Z:/b', 'c'), 'Z:', '\\', ('Z:\\', 'b', 'c')) + # UNC paths. + check(('a', '//b/c', 'd'), '\\\\b\\c', '\\', ('\\\\b\\c\\', 'd')) + # Collapsing and stripping excess slashes. + check(('a', 'Z://b//c/', 'd/'), 'Z:', '\\', ('Z:\\', 'b', 'c', 'd')) + # UNC paths. + check(('a', '//b/c//', 'd'), '\\\\b\\c', '\\', ('\\\\b\\c\\', 'd')) + # Extended paths. + check(('//./c:',), '\\\\.\\c:', '', ('\\\\.\\c:',)) + check(('//?/c:/',), '\\\\?\\c:', '\\', ('\\\\?\\c:\\',)) + check(('//?/c:/a',), '\\\\?\\c:', '\\', ('\\\\?\\c:\\', 'a')) + check(('//?/c:/a', '/b'), '\\\\?\\c:', '\\', ('\\\\?\\c:\\', 'b')) + # Extended UNC paths (format is "\\?\UNC\server\share"). + check(('//?',), '\\\\?', '', ('\\\\?',)) + check(('//?/',), '\\\\?\\', '', ('\\\\?\\',)) + check(('//?/UNC',), '\\\\?\\UNC', '', ('\\\\?\\UNC',)) + check(('//?/UNC/',), '\\\\?\\UNC\\', '', ('\\\\?\\UNC\\',)) + check(('//?/UNC/b',), '\\\\?\\UNC\\b', '', ('\\\\?\\UNC\\b',)) + check(('//?/UNC/b/',), '\\\\?\\UNC\\b\\', '', ('\\\\?\\UNC\\b\\',)) + check(('//?/UNC/b/c',), '\\\\?\\UNC\\b\\c', '\\', ('\\\\?\\UNC\\b\\c\\',)) + check(('//?/UNC/b/c/',), '\\\\?\\UNC\\b\\c', '\\', ('\\\\?\\UNC\\b\\c\\',)) + check(('//?/UNC/b/c/d',), '\\\\?\\UNC\\b\\c', '\\', ('\\\\?\\UNC\\b\\c\\', 'd')) + # UNC device paths + check(('//./BootPartition/',), '\\\\.\\BootPartition', '\\', ('\\\\.\\BootPartition\\',)) + check(('//?/BootPartition/',), '\\\\?\\BootPartition', '\\', ('\\\\?\\BootPartition\\',)) + check(('//./PhysicalDrive0',), '\\\\.\\PhysicalDrive0', '', ('\\\\.\\PhysicalDrive0',)) + check(('//?/Volume{}/',), '\\\\?\\Volume{}', '\\', ('\\\\?\\Volume{}\\',)) + check(('//./nul',), '\\\\.\\nul', '', ('\\\\.\\nul',)) + # Second part has a root but not drive. + check(('a', '/b', 'c'), '', '\\', ('\\', 'b', 'c')) + check(('Z:/a', '/b', 'c'), 'Z:', '\\', ('Z:\\', 'b', 'c')) + check(('//?/Z:/a', '/b', 'c'), '\\\\?\\Z:', '\\', ('\\\\?\\Z:\\', 'b', 'c')) + # Joining with the same drive => the first path is appended to if + # the second path is relative. + check(('c:/a/b', 'c:x/y'), 'c:', '\\', ('c:\\', 'a', 'b', 'x', 'y')) + check(('c:/a/b', 'c:/x/y'), 'c:', '\\', ('c:\\', 'x', 'y')) + # Paths to files with NTFS alternate data streams + check(('./c:s',), '', '', ('c:s',)) + check(('cc:s',), '', '', ('cc:s',)) + check(('C:c:s',), 'C:', '', ('C:', 'c:s')) + check(('C:/c:s',), 'C:', '\\', ('C:\\', 'c:s')) + check(('D:a', './c:b'), 'D:', '', ('D:', 'a', 'c:b')) + check(('D:/a', './c:b'), 'D:', '\\', ('D:\\', 'a', 'c:b')) + def test_str(self): p = self.cls('a/b/c') self.assertEqual(str(p), 'a\\b\\c') @@ -1360,6 +1388,13 @@ def test_join(self): self.assertEqual(pp, P('C:/a/b/dd:s')) pp = p.joinpath(P('E:d:s')) self.assertEqual(pp, P('E:d:s')) + # Joining onto a UNC path with no root + pp = P('//').joinpath('server') + self.assertEqual(pp, P('//server')) + pp = P('//server').joinpath('share') + self.assertEqual(pp, P('//server/share')) + pp = P('//./BootPartition').joinpath('Windows') + self.assertEqual(pp, P('//./BootPartition/Windows')) def test_div(self): # Basically the same as joinpath(). @@ -2667,20 +2702,20 @@ def setUp(self): del self.sub2_tree[1][:1] def test_walk_topdown(self): - all = list(self.walk_path.walk()) - - self.assertEqual(len(all), 4) - # We can't know which order SUB1 and SUB2 will appear in. - # Not flipped: TESTFN, SUB1, SUB11, SUB2 - # flipped: TESTFN, SUB2, SUB1, SUB11 - flipped = all[0][1][0] != "SUB1" - all[0][1].sort() - all[3 - 2 * flipped][-1].sort() - all[3 - 2 * flipped][1].sort() - self.assertEqual(all[0], (self.walk_path, ["SUB1", "SUB2"], ["tmp1"])) - self.assertEqual(all[1 + flipped], (self.sub1_path, ["SUB11"], ["tmp2"])) - self.assertEqual(all[2 + flipped], (self.sub11_path, [], [])) - self.assertEqual(all[3 - 2 * flipped], self.sub2_tree) + walker = self.walk_path.walk() + entry = next(walker) + entry[1].sort() # Ensure we visit SUB1 before SUB2 + self.assertEqual(entry, (self.walk_path, ["SUB1", "SUB2"], ["tmp1"])) + entry = next(walker) + self.assertEqual(entry, (self.sub1_path, ["SUB11"], ["tmp2"])) + entry = next(walker) + self.assertEqual(entry, (self.sub11_path, [], [])) + entry = next(walker) + entry[1].sort() + entry[2].sort() + self.assertEqual(entry, self.sub2_tree) + with self.assertRaises(StopIteration): + next(walker) def test_walk_prune(self, walk_path=None): if walk_path is None: @@ -2704,24 +2739,37 @@ def test_file_like_path(self): self.test_walk_prune(FakePath(self.walk_path).__fspath__()) def test_walk_bottom_up(self): - all = list(self.walk_path.walk( top_down=False)) - - self.assertEqual(len(all), 4, all) - # We can't know which order SUB1 and SUB2 will appear in. - # Not flipped: SUB11, SUB1, SUB2, TESTFN - # flipped: SUB2, SUB11, SUB1, TESTFN - flipped = all[3][1][0] != "SUB1" - all[3][1].sort() - all[2 - 2 * flipped][-1].sort() - all[2 - 2 * flipped][1].sort() - self.assertEqual(all[3], - (self.walk_path, ["SUB1", "SUB2"], ["tmp1"])) - self.assertEqual(all[flipped], - (self.sub11_path, [], [])) - self.assertEqual(all[flipped + 1], - (self.sub1_path, ["SUB11"], ["tmp2"])) - self.assertEqual(all[2 - 2 * flipped], - self.sub2_tree) + seen_testfn = seen_sub1 = seen_sub11 = seen_sub2 = False + for path, dirnames, filenames in self.walk_path.walk(top_down=False): + if path == self.walk_path: + self.assertFalse(seen_testfn) + self.assertTrue(seen_sub1) + self.assertTrue(seen_sub2) + self.assertEqual(sorted(dirnames), ["SUB1", "SUB2"]) + self.assertEqual(filenames, ["tmp1"]) + seen_testfn = True + elif path == self.sub1_path: + self.assertFalse(seen_testfn) + self.assertFalse(seen_sub1) + self.assertTrue(seen_sub11) + self.assertEqual(dirnames, ["SUB11"]) + self.assertEqual(filenames, ["tmp2"]) + seen_sub1 = True + elif path == self.sub11_path: + self.assertFalse(seen_sub1) + self.assertFalse(seen_sub11) + self.assertEqual(dirnames, []) + self.assertEqual(filenames, []) + seen_sub11 = True + elif path == self.sub2_path: + self.assertFalse(seen_testfn) + self.assertFalse(seen_sub2) + self.assertEqual(sorted(dirnames), sorted(self.sub2_tree[1])) + self.assertEqual(sorted(filenames), sorted(self.sub2_tree[2])) + seen_sub2 = True + else: + raise AssertionError(f"Unexpected path: {path}") + self.assertTrue(seen_testfn) @os_helper.skip_unless_symlink def test_walk_follow_symlinks(self): @@ -2793,6 +2841,18 @@ def test_walk_many_open_files(self): self.assertEqual(next(it), expected) path = path / 'd' + def test_walk_above_recursion_limit(self): + recursion_limit = 40 + # directory_depth > recursion_limit + directory_depth = recursion_limit + 10 + base = pathlib.Path(os_helper.TESTFN, 'deep') + path = pathlib.Path(base, *(['d'] * directory_depth)) + path.mkdir(parents=True) + + with set_recursion_limit(recursion_limit): + list(base.walk()) + list(base.walk(top_down=False)) + class PathTest(_BasePathTest, unittest.TestCase): cls = pathlib.Path diff --git a/Lib/test/test_patma.py b/Lib/test/test_patma.py index 0ed54079c99b30..3dbd19dfffd318 100644 --- a/Lib/test/test_patma.py +++ b/Lib/test/test_patma.py @@ -3165,6 +3165,19 @@ def f(command): # 0 self.assertListEqual(self._trace(f, "go x"), [1, 2, 3]) self.assertListEqual(self._trace(f, "spam"), [1, 2, 3]) + def test_unreachable_code(self): + def f(command): # 0 + match command: # 1 + case 1: # 2 + if False: # 3 + return 1 # 4 + case _: # 5 + if False: # 6 + return 0 # 7 + + self.assertListEqual(self._trace(f, 1), [1, 2, 3]) + self.assertListEqual(self._trace(f, 0), [1, 2, 5, 6]) + def test_parser_deeply_nested_patterns(self): # Deeply nested patterns can cause exponential backtracking when parsing. # See gh-93671 for more information. diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index d91bd0b2f03a0f..2f712a10257984 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -240,9 +240,11 @@ def test_pdb_breakpoint_commands(): >>> with PdbTestInput([ # doctest: +NORMALIZE_WHITESPACE ... 'break 3', + ... 'break 4, +', ... 'disable 1', ... 'ignore 1 10', ... 'condition 1 1 < 2', + ... 'condition 1 1 <', ... 'break 4', ... 'break 4', ... 'break', @@ -264,6 +266,8 @@ def test_pdb_breakpoint_commands(): ... 'commands 10', # out of range ... 'commands a', # display help ... 'commands 4', # already deleted + ... 'break 6, undefined', # condition causing `NameError` during evaluation + ... 'continue', # will stop, ignoring runtime error ... 'continue', ... ]): ... test_function() @@ -271,12 +275,16 @@ def test_pdb_breakpoint_commands(): -> print(1) (Pdb) break 3 Breakpoint 1 at <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>:3 + (Pdb) break 4, + + *** Invalid condition +: SyntaxError: invalid syntax (Pdb) disable 1 Disabled breakpoint 1 at <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>:3 (Pdb) ignore 1 10 Will ignore next 10 crossings of breakpoint 1. (Pdb) condition 1 1 < 2 New condition set for breakpoint 1. + (Pdb) condition 1 1 < + *** Invalid condition 1 <: SyntaxError: invalid syntax (Pdb) break 4 Breakpoint 2 at <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>:4 (Pdb) break 4 @@ -331,8 +339,13 @@ def test_pdb_breakpoint_commands(): end (Pdb) commands 4 *** cannot set commands: Breakpoint 4 already deleted + (Pdb) break 6, undefined + Breakpoint 5 at <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>:6 (Pdb) continue 3 + > <doctest test.test_pdb.test_pdb_breakpoint_commands[0]>(6)test_function() + -> print(4) + (Pdb) continue 4 """ @@ -574,6 +587,165 @@ def test_pdb_whatis_command(): (Pdb) continue """ +def test_pdb_display_command(): + """Test display command + + >>> def test_function(): + ... a = 0 + ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + ... a = 1 + ... a = 2 + ... a = 3 + ... a = 4 + + >>> with PdbTestInput([ # doctest: +ELLIPSIS + ... 'display +', + ... 'display', + ... 'display a', + ... 'n', + ... 'display', + ... 'undisplay a', + ... 'n', + ... 'display a', + ... 'undisplay', + ... 'display a < 1', + ... 'n', + ... 'display undefined', + ... 'continue', + ... ]): + ... test_function() + > <doctest test.test_pdb.test_pdb_display_command[0]>(4)test_function() + -> a = 1 + (Pdb) display + + *** Unable to display +: SyntaxError: invalid syntax + (Pdb) display + No expression is being displayed + (Pdb) display a + display a: 0 + (Pdb) n + > <doctest test.test_pdb.test_pdb_display_command[0]>(5)test_function() + -> a = 2 + display a: 1 [old: 0] + (Pdb) display + Currently displaying: + a: 1 + (Pdb) undisplay a + (Pdb) n + > <doctest test.test_pdb.test_pdb_display_command[0]>(6)test_function() + -> a = 3 + (Pdb) display a + display a: 2 + (Pdb) undisplay + (Pdb) display a < 1 + display a < 1: False + (Pdb) n + > <doctest test.test_pdb.test_pdb_display_command[0]>(7)test_function() + -> a = 4 + (Pdb) display undefined + display undefined: ** raised NameError: name 'undefined' is not defined ** + (Pdb) continue + """ + +def test_pdb_alias_command(): + """Test alias command + + >>> class A: + ... def __init__(self): + ... self.attr1 = 10 + ... self.attr2 = 'str' + ... def method(self): + ... pass + + >>> def test_function(): + ... o = A() + ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + ... o.method() + + >>> with PdbTestInput([ # doctest: +ELLIPSIS + ... 'alias pi for k in %1.__dict__.keys(): print(f"%1.{k} = {%1.__dict__[k]}")', + ... 'alias ps pi self', + ... 'pi o', + ... 's', + ... 'ps', + ... 'continue', + ... ]): + ... test_function() + > <doctest test.test_pdb.test_pdb_alias_command[1]>(4)test_function() + -> o.method() + (Pdb) alias pi for k in %1.__dict__.keys(): print(f"%1.{k} = {%1.__dict__[k]}") + (Pdb) alias ps pi self + (Pdb) pi o + o.attr1 = 10 + o.attr2 = str + (Pdb) s + --Call-- + > <doctest test.test_pdb.test_pdb_alias_command[0]>(5)method() + -> def method(self): + (Pdb) ps + self.attr1 = 10 + self.attr2 = str + (Pdb) continue + """ + +def test_pdb_where_command(): + """Test where command + + >>> def g(): + ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + + >>> def f(): + ... g(); + + >>> def test_function(): + ... f() + + >>> with PdbTestInput([ # doctest: +ELLIPSIS + ... 'w', + ... 'where', + ... 'u', + ... 'w', + ... 'continue', + ... ]): + ... test_function() + --Return-- + > <doctest test.test_pdb.test_pdb_where_command[0]>(2)g()->None + -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + (Pdb) w + ... + <doctest test.test_pdb.test_pdb_where_command[3]>(8)<module>() + -> test_function() + <doctest test.test_pdb.test_pdb_where_command[2]>(2)test_function() + -> f() + <doctest test.test_pdb.test_pdb_where_command[1]>(2)f() + -> g(); + > <doctest test.test_pdb.test_pdb_where_command[0]>(2)g()->None + -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + (Pdb) where + ... + <doctest test.test_pdb.test_pdb_where_command[3]>(8)<module>() + -> test_function() + <doctest test.test_pdb.test_pdb_where_command[2]>(2)test_function() + -> f() + <doctest test.test_pdb.test_pdb_where_command[1]>(2)f() + -> g(); + > <doctest test.test_pdb.test_pdb_where_command[0]>(2)g()->None + -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + (Pdb) u + > <doctest test.test_pdb.test_pdb_where_command[1]>(2)f() + -> g(); + (Pdb) w + ... + <doctest test.test_pdb.test_pdb_where_command[3]>(8)<module>() + -> test_function() + <doctest test.test_pdb.test_pdb_where_command[2]>(2)test_function() + -> f() + > <doctest test.test_pdb.test_pdb_where_command[1]>(2)f() + -> g(); + <doctest test.test_pdb.test_pdb_where_command[0]>(2)g()->None + -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + (Pdb) continue + """ + def test_post_mortem(): """Test post mortem traceback debugging. @@ -1503,6 +1675,51 @@ def test_pdb_issue_gh_101673(): (Pdb) continue """ +def test_pdb_issue_gh_103225(): + """See GH-103225 + + Make sure longlist uses 1-based line numbers in frames that correspond to a module + + >>> with PdbTestInput([ # doctest: +NORMALIZE_WHITESPACE + ... 'longlist', + ... 'continue' + ... ]): + ... a = 1 + ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + ... b = 2 + > <doctest test.test_pdb.test_pdb_issue_gh_103225[0]>(7)<module>() + -> b = 2 + (Pdb) longlist + 1 with PdbTestInput([ # doctest: +NORMALIZE_WHITESPACE + 2 'longlist', + 3 'continue' + 4 ]): + 5 a = 1 + 6 import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + 7 -> b = 2 + (Pdb) continue + """ + +def test_pdb_issue_gh_101517(): + """See GH-101517 + + Make sure pdb doesn't crash when the exception is caught in a try/except* block + + >>> def test_function(): + ... try: + ... raise KeyError + ... except* Exception as e: + ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + + >>> with PdbTestInput([ # doctest: +NORMALIZE_WHITESPACE + ... 'continue' + ... ]): + ... test_function() + > <doctest test.test_pdb.test_pdb_issue_gh_101517[0]>(5)test_function() + -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + (Pdb) continue + """ + @support.requires_subprocess() class PdbTestCase(unittest.TestCase): @@ -2179,6 +2396,12 @@ def _create_fake_frozen_module(): # verify that pdb found the source of the "frozen" function self.assertIn('x = "Sentinel string for gh-93696"', stdout, "Sentinel statement not found") + def test_non_utf8_encoding(self): + script_dir = os.path.join(os.path.dirname(__file__), 'encoded_modules') + for filename in os.listdir(script_dir): + if filename.endswith(".py"): + self._run_pdb([os.path.join(script_dir, filename)], 'q') + class ChecklineTests(unittest.TestCase): def setUp(self): linecache.clearcache() # Pdb.checkline() uses linecache.getline() diff --git a/Lib/test/test_peepholer.py b/Lib/test/test_peepholer.py index 9ff017da53c2b1..bf7fc421a9df0a 100644 --- a/Lib/test/test_peepholer.py +++ b/Lib/test/test_peepholer.py @@ -52,10 +52,6 @@ def check_jump_targets(self, code): tgt.opname == 'RETURN_VALUE'): self.fail(f'{instr.opname} at {instr.offset} ' f'jumps to {tgt.opname} at {tgt.offset}') - # JUMP_IF_*_OR_POP jump to conditional jump - if '_OR_POP' in instr.opname and 'JUMP_IF_' in tgt.opname: - self.fail(f'{instr.opname} at {instr.offset} ' - f'jumps to {tgt.opname} at {tgt.offset}') def check_lnotab(self, code): "Check that the lnotab byte offsets are sensible." @@ -384,38 +380,36 @@ def f(): def test_elim_jump_to_uncond_jump3(self): # Intentionally use two-line expressions to test issue37213. - # JUMP_IF_FALSE_OR_POP to JUMP_IF_FALSE_OR_POP --> JUMP_IF_FALSE_OR_POP to non-jump + # POP_JUMP_IF_FALSE to POP_JUMP_IF_FALSE --> POP_JUMP_IF_FALSE to non-jump def f(a, b, c): return ((a and b) and c) self.check_jump_targets(f) self.check_lnotab(f) - self.assertEqual(count_instr_recursively(f, 'JUMP_IF_FALSE_OR_POP'), 2) - # JUMP_IF_TRUE_OR_POP to JUMP_IF_TRUE_OR_POP --> JUMP_IF_TRUE_OR_POP to non-jump + self.assertEqual(count_instr_recursively(f, 'POP_JUMP_IF_FALSE'), 2) + # POP_JUMP_IF_TRUE to POP_JUMP_IF_TRUE --> POP_JUMP_IF_TRUE to non-jump def f(a, b, c): return ((a or b) or c) self.check_jump_targets(f) self.check_lnotab(f) - self.assertEqual(count_instr_recursively(f, 'JUMP_IF_TRUE_OR_POP'), 2) + self.assertEqual(count_instr_recursively(f, 'POP_JUMP_IF_TRUE'), 2) # JUMP_IF_FALSE_OR_POP to JUMP_IF_TRUE_OR_POP --> POP_JUMP_IF_FALSE to non-jump def f(a, b, c): return ((a and b) or c) self.check_jump_targets(f) self.check_lnotab(f) - self.assertNotInBytecode(f, 'JUMP_IF_FALSE_OR_POP') - self.assertInBytecode(f, 'JUMP_IF_TRUE_OR_POP') - self.assertInBytecode(f, 'POP_JUMP_IF_FALSE') - # JUMP_IF_TRUE_OR_POP to JUMP_IF_FALSE_OR_POP --> POP_JUMP_IF_TRUE to non-jump + self.assertEqual(count_instr_recursively(f, 'POP_JUMP_IF_FALSE'), 1) + self.assertEqual(count_instr_recursively(f, 'POP_JUMP_IF_TRUE'), 1) + # POP_JUMP_IF_TRUE to POP_JUMP_IF_FALSE --> POP_JUMP_IF_TRUE to non-jump def f(a, b, c): return ((a or b) and c) self.check_jump_targets(f) self.check_lnotab(f) - self.assertNotInBytecode(f, 'JUMP_IF_TRUE_OR_POP') - self.assertInBytecode(f, 'JUMP_IF_FALSE_OR_POP') - self.assertInBytecode(f, 'POP_JUMP_IF_TRUE') + self.assertEqual(count_instr_recursively(f, 'POP_JUMP_IF_FALSE'), 1) + self.assertEqual(count_instr_recursively(f, 'POP_JUMP_IF_TRUE'), 1) def test_elim_jump_to_uncond_jump4(self): def f(): @@ -816,7 +810,7 @@ def f(): self.assertInBytecode(f, 'LOAD_FAST', "a73") def test_setting_lineno_no_undefined(self): - code = textwrap.dedent(f"""\ + code = textwrap.dedent("""\ def f(): x = y = 2 if not x: @@ -848,7 +842,7 @@ def trace(frame, event, arg): self.assertEqual(f.__code__.co_code, co_code) def test_setting_lineno_one_undefined(self): - code = textwrap.dedent(f"""\ + code = textwrap.dedent("""\ def f(): x = y = 2 if not x: @@ -882,7 +876,7 @@ def trace(frame, event, arg): self.assertEqual(f.__code__.co_code, co_code) def test_setting_lineno_two_undefined(self): - code = textwrap.dedent(f"""\ + code = textwrap.dedent("""\ def f(): x = y = 2 if not x: diff --git a/Lib/test/test_pickle.py b/Lib/test/test_pickle.py index 80e7a4d23a4ba8..1a55da39bdc58d 100644 --- a/Lib/test/test_pickle.py +++ b/Lib/test/test_pickle.py @@ -293,6 +293,34 @@ class CustomCPicklerClass(_pickle.Pickler, AbstractCustomPicklerClass): pass pickler_class = CustomCPicklerClass + @support.cpython_only + class HeapTypesTests(unittest.TestCase): + def setUp(self): + pickler = _pickle.Pickler(io.BytesIO()) + unpickler = _pickle.Unpickler(io.BytesIO()) + + self._types = ( + _pickle.Pickler, + _pickle.Unpickler, + type(pickler.memo), + type(unpickler.memo), + + # We cannot test the _pickle.Pdata; + # there's no way to get to it. + ) + + def test_have_gc(self): + import gc + for tp in self._types: + with self.subTest(tp=tp): + self.assertTrue(gc.is_tracked(tp)) + + def test_immutable(self): + for tp in self._types: + with self.subTest(tp=tp): + with self.assertRaisesRegex(TypeError, "immutable"): + tp.foo = "bar" + @support.cpython_only class SizeofTests(unittest.TestCase): check_sizeof = support.check_sizeof diff --git a/Lib/test/test_pkgutil.py b/Lib/test/test_pkgutil.py index 0cc99e0cc22763..4d9f5db3c6b3cf 100644 --- a/Lib/test/test_pkgutil.py +++ b/Lib/test/test_pkgutil.py @@ -541,14 +541,6 @@ def check_deprecated(self): "Python 3.12; use 'importlib' instead", DeprecationWarning)) - def test_importer_deprecated(self): - with self.check_deprecated(): - pkgutil.ImpImporter("") - - def test_loader_deprecated(self): - with self.check_deprecated(): - pkgutil.ImpLoader("", "", "", "") - def test_get_loader_avoids_emulation(self): with check_warnings() as w: self.assertIsNotNone(pkgutil.get_loader("sys")) diff --git a/Lib/test/test_platform.py b/Lib/test/test_platform.py index 72942dda342418..216973350319fe 100644 --- a/Lib/test/test_platform.py +++ b/Lib/test/test_platform.py @@ -123,10 +123,6 @@ def test_sys_version(self): for input, output in ( ('2.4.3 (#1, Jun 21 2006, 13:54:21) \n[GCC 3.3.4 (pre 3.3.5 20040809)]', ('CPython', '2.4.3', '', '', '1', 'Jun 21 2006 13:54:21', 'GCC 3.3.4 (pre 3.3.5 20040809)')), - ('IronPython 1.0.60816 on .NET 2.0.50727.42', - ('IronPython', '1.0.60816', '', '', '', '', '.NET 2.0.50727.42')), - ('IronPython 1.0 (1.0.61005.1977) on .NET 2.0.50727.42', - ('IronPython', '1.0.0', '', '', '', '', '.NET 2.0.50727.42')), ('2.4.3 (truncation, date, t) \n[GCC]', ('CPython', '2.4.3', '', '', 'truncation', 'date t', 'GCC')), ('2.4.3 (truncation, date, ) \n[GCC]', @@ -161,20 +157,11 @@ def test_sys_version(self): ('r261:67515', 'Dec 6 2008 15:26:00'), 'GCC 4.0.1 (Apple Computer, Inc. build 5370)'), - ("IronPython 2.0 (2.0.0.0) on .NET 2.0.50727.3053", None, "cli") + ("3.10.8 (tags/v3.10.8:aaaf517424, Feb 14 2023, 16:28:12) [GCC 9.4.0]", + None, "linux") : - ("IronPython", "2.0.0", "", "", ("", ""), - ".NET 2.0.50727.3053"), - - ("2.6.1 (IronPython 2.6.1 (2.6.10920.0) on .NET 2.0.50727.1433)", None, "cli") - : - ("IronPython", "2.6.1", "", "", ("", ""), - ".NET 2.0.50727.1433"), - - ("2.7.4 (IronPython 2.7.4 (2.7.0.40) on Mono 4.0.30319.1 (32-bit))", None, "cli") - : - ("IronPython", "2.7.4", "", "", ("", ""), - "Mono 4.0.30319.1 (32-bit)"), + ('CPython', '3.10.8', '', '', + ('tags/v3.10.8:aaaf517424', 'Feb 14 2023 16:28:12'), 'GCC 9.4.0'), ("2.5 (trunk:6107, Mar 26 2009, 13:02:18) \n[Java HotSpot(TM) Client VM (\"Apple Computer, Inc.\")]", ('Jython', 'trunk', '6107'), "java1.5.0_16") @@ -205,6 +192,9 @@ def test_sys_version(self): self.assertEqual(platform.python_build(), info[4]) self.assertEqual(platform.python_compiler(), info[5]) + with self.assertRaises(ValueError): + platform._sys_version('2. 4.3 (truncation) \n[GCC]') + def test_system_alias(self): res = platform.system_alias( platform.system(), diff --git a/Lib/test/test_plistlib.py b/Lib/test/test_plistlib.py index 6b457440be5430..b08ababa341cfe 100644 --- a/Lib/test/test_plistlib.py +++ b/Lib/test/test_plistlib.py @@ -925,7 +925,7 @@ def test_large_timestamp(self): # Issue #26709: 32-bit timestamp out of range for ts in -2**31-1, 2**31: with self.subTest(ts=ts): - d = (datetime.datetime.utcfromtimestamp(0) + + d = (datetime.datetime(1970, 1, 1, 0, 0) + datetime.timedelta(seconds=ts)) data = plistlib.dumps(d, fmt=plistlib.FMT_BINARY) self.assertEqual(plistlib.loads(data), d) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py index 77f42f7f9c937b..444f8abe4607b7 100644 --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -231,6 +231,9 @@ def test_register_at_fork(self): with self.assertRaises(TypeError, msg="Invalid arg was allowed"): # Ensure a combination of valid and invalid is an error. os.register_at_fork(before=None, after_in_parent=lambda: 3) + with self.assertRaises(TypeError, msg="At least one argument is required"): + # when no arg is passed + os.register_at_fork() with self.assertRaises(TypeError, msg="Invalid arg was allowed"): # Ensure a combination of valid and invalid is an error. os.register_at_fork(before=lambda: None, after_in_child='') diff --git a/Lib/test/test_profile.py b/Lib/test/test_profile.py index d97fe447c38b01..a1dfc9abbb8ef7 100644 --- a/Lib/test/test_profile.py +++ b/Lib/test/test_profile.py @@ -178,7 +178,7 @@ def main(): 8 63.976 7.997 79.960 9.995 profilee.py:98(subhelper)""" _ProfileOutput['print_callers'] = """\ :0(append) <- profilee.py:73(helper1)(4) 119.964 -:0(exc_info) <- profilee.py:73(helper1)(4) 119.964 +:0(exception) <- profilee.py:73(helper1)(4) 119.964 :0(hasattr) <- profilee.py:73(helper1)(4) 119.964 profilee.py:88(helper2)(8) 399.912 profilee.py:110(__getattr__) <- :0(hasattr)(12) 11.964 diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py index baae4efc2ad789..ac49fbae847726 100644 --- a/Lib/test/test_regrtest.py +++ b/Lib/test/test_regrtest.py @@ -1120,6 +1120,160 @@ def test_fail_once(self): self.check_executed_tests(output, [testname], rerun={testname: "test_fail_once"}) + def test_rerun_setup_class_hook_failure(self): + # FAILURE then FAILURE + code = textwrap.dedent(""" + import unittest + + class ExampleTests(unittest.TestCase): + @classmethod + def setUpClass(self): + raise RuntimeError('Fail') + + def test_success(self): + return + """) + testname = self.create_test(code=code) + + output = self.run_tests("-w", testname, exitcode=EXITCODE_BAD_TEST) + self.check_executed_tests(output, testname, + failed=[testname], + rerun={testname: "ExampleTests"}) + + def test_rerun_teardown_class_hook_failure(self): + # FAILURE then FAILURE + code = textwrap.dedent(""" + import unittest + + class ExampleTests(unittest.TestCase): + @classmethod + def tearDownClass(self): + raise RuntimeError('Fail') + + def test_success(self): + return + """) + testname = self.create_test(code=code) + + output = self.run_tests("-w", testname, exitcode=EXITCODE_BAD_TEST) + self.check_executed_tests(output, testname, + failed=[testname], + rerun={testname: "ExampleTests"}) + + def test_rerun_setup_module_hook_failure(self): + # FAILURE then FAILURE + code = textwrap.dedent(""" + import unittest + + def setUpModule(): + raise RuntimeError('Fail') + + class ExampleTests(unittest.TestCase): + def test_success(self): + return + """) + testname = self.create_test(code=code) + + output = self.run_tests("-w", testname, exitcode=EXITCODE_BAD_TEST) + self.check_executed_tests(output, testname, + failed=[testname], + rerun={testname: testname}) + + def test_rerun_teardown_module_hook_failure(self): + # FAILURE then FAILURE + code = textwrap.dedent(""" + import unittest + + def tearDownModule(): + raise RuntimeError('Fail') + + class ExampleTests(unittest.TestCase): + def test_success(self): + return + """) + testname = self.create_test(code=code) + + output = self.run_tests("-w", testname, exitcode=EXITCODE_BAD_TEST) + self.check_executed_tests(output, testname, + failed=[testname], + rerun={testname: testname}) + + def test_rerun_setup_hook_failure(self): + # FAILURE then FAILURE + code = textwrap.dedent(""" + import unittest + + class ExampleTests(unittest.TestCase): + def setUp(self): + raise RuntimeError('Fail') + + def test_success(self): + return + """) + testname = self.create_test(code=code) + + output = self.run_tests("-w", testname, exitcode=EXITCODE_BAD_TEST) + self.check_executed_tests(output, testname, + failed=[testname], + rerun={testname: "test_success"}) + + def test_rerun_teardown_hook_failure(self): + # FAILURE then FAILURE + code = textwrap.dedent(""" + import unittest + + class ExampleTests(unittest.TestCase): + def tearDown(self): + raise RuntimeError('Fail') + + def test_success(self): + return + """) + testname = self.create_test(code=code) + + output = self.run_tests("-w", testname, exitcode=EXITCODE_BAD_TEST) + self.check_executed_tests(output, testname, + failed=[testname], + rerun={testname: "test_success"}) + + def test_rerun_async_setup_hook_failure(self): + # FAILURE then FAILURE + code = textwrap.dedent(""" + import unittest + + class ExampleTests(unittest.IsolatedAsyncioTestCase): + async def asyncSetUp(self): + raise RuntimeError('Fail') + + async def test_success(self): + return + """) + testname = self.create_test(code=code) + + output = self.run_tests("-w", testname, exitcode=EXITCODE_BAD_TEST) + self.check_executed_tests(output, testname, + failed=[testname], + rerun={testname: "test_success"}) + + def test_rerun_async_teardown_hook_failure(self): + # FAILURE then FAILURE + code = textwrap.dedent(""" + import unittest + + class ExampleTests(unittest.IsolatedAsyncioTestCase): + async def asyncTearDown(self): + raise RuntimeError('Fail') + + async def test_success(self): + return + """) + testname = self.create_test(code=code) + + output = self.run_tests("-w", testname, exitcode=EXITCODE_BAD_TEST) + self.check_executed_tests(output, testname, + failed=[testname], + rerun={testname: "test_success"}) + def test_no_tests_ran(self): code = textwrap.dedent(""" import unittest diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index 8fe62216ecdca0..36f0b8a31a3715 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -23,6 +23,7 @@ unregister_unpack_format, get_unpack_formats, SameFileError, _GiveupOnFastCopy) import tarfile +import warnings import zipfile try: import posix @@ -32,6 +33,7 @@ from test import support from test.support import os_helper from test.support.os_helper import TESTFN, FakePath +from test.support import warnings_helper TESTFN2 = TESTFN + "2" TESTFN_SRC = TESTFN + "_SRC" @@ -195,7 +197,7 @@ def test_rmtree_works_on_bytes(self): shutil.rmtree(victim) @os_helper.skip_unless_symlink - def test_rmtree_fails_on_symlink(self): + def test_rmtree_fails_on_symlink_onerror(self): tmp = self.mkdtemp() dir_ = os.path.join(tmp, 'dir') os.mkdir(dir_) @@ -207,12 +209,32 @@ def test_rmtree_fails_on_symlink(self): errors = [] def onerror(*args): errors.append(args) - shutil.rmtree(link, onerror=onerror) + with self.assertWarns(DeprecationWarning): + shutil.rmtree(link, onerror=onerror) self.assertEqual(len(errors), 1) self.assertIs(errors[0][0], os.path.islink) self.assertEqual(errors[0][1], link) self.assertIsInstance(errors[0][2][1], OSError) + @os_helper.skip_unless_symlink + def test_rmtree_fails_on_symlink_onexc(self): + tmp = self.mkdtemp() + dir_ = os.path.join(tmp, 'dir') + os.mkdir(dir_) + link = os.path.join(tmp, 'link') + os.symlink(dir_, link) + self.assertRaises(OSError, shutil.rmtree, link) + self.assertTrue(os.path.exists(dir_)) + self.assertTrue(os.path.lexists(link)) + errors = [] + def onexc(*args): + errors.append(args) + shutil.rmtree(link, onexc=onexc) + self.assertEqual(len(errors), 1) + self.assertIs(errors[0][0], os.path.islink) + self.assertEqual(errors[0][1], link) + self.assertIsInstance(errors[0][2], OSError) + @os_helper.skip_unless_symlink def test_rmtree_works_on_symlinks(self): tmp = self.mkdtemp() @@ -236,7 +258,7 @@ def test_rmtree_works_on_symlinks(self): self.assertTrue(os.path.exists(file1)) @unittest.skipUnless(_winapi, 'only relevant on Windows') - def test_rmtree_fails_on_junctions(self): + def test_rmtree_fails_on_junctions_onerror(self): tmp = self.mkdtemp() dir_ = os.path.join(tmp, 'dir') os.mkdir(dir_) @@ -249,12 +271,33 @@ def test_rmtree_fails_on_junctions(self): errors = [] def onerror(*args): errors.append(args) - shutil.rmtree(link, onerror=onerror) + with self.assertWarns(DeprecationWarning): + shutil.rmtree(link, onerror=onerror) self.assertEqual(len(errors), 1) self.assertIs(errors[0][0], os.path.islink) self.assertEqual(errors[0][1], link) self.assertIsInstance(errors[0][2][1], OSError) + @unittest.skipUnless(_winapi, 'only relevant on Windows') + def test_rmtree_fails_on_junctions_onexc(self): + tmp = self.mkdtemp() + dir_ = os.path.join(tmp, 'dir') + os.mkdir(dir_) + link = os.path.join(tmp, 'link') + _winapi.CreateJunction(dir_, link) + self.addCleanup(os_helper.unlink, link) + self.assertRaises(OSError, shutil.rmtree, link) + self.assertTrue(os.path.exists(dir_)) + self.assertTrue(os.path.lexists(link)) + errors = [] + def onexc(*args): + errors.append(args) + shutil.rmtree(link, onexc=onexc) + self.assertEqual(len(errors), 1) + self.assertIs(errors[0][0], os.path.islink) + self.assertEqual(errors[0][1], link) + self.assertIsInstance(errors[0][2], OSError) + @unittest.skipUnless(_winapi, 'only relevant on Windows') def test_rmtree_works_on_junctions(self): tmp = self.mkdtemp() @@ -277,7 +320,7 @@ def test_rmtree_works_on_junctions(self): self.assertTrue(os.path.exists(dir3)) self.assertTrue(os.path.exists(file1)) - def test_rmtree_errors(self): + def test_rmtree_errors_onerror(self): # filename is guaranteed not to exist filename = tempfile.mktemp(dir=self.mkdtemp()) self.assertRaises(FileNotFoundError, shutil.rmtree, filename) @@ -298,7 +341,8 @@ def test_rmtree_errors(self): errors = [] def onerror(*args): errors.append(args) - shutil.rmtree(filename, onerror=onerror) + with self.assertWarns(DeprecationWarning): + shutil.rmtree(filename, onerror=onerror) self.assertEqual(len(errors), 2) self.assertIs(errors[0][0], os.scandir) self.assertEqual(errors[0][1], filename) @@ -309,6 +353,37 @@ def onerror(*args): self.assertIsInstance(errors[1][2][1], NotADirectoryError) self.assertEqual(errors[1][2][1].filename, filename) + def test_rmtree_errors_onexc(self): + # filename is guaranteed not to exist + filename = tempfile.mktemp(dir=self.mkdtemp()) + self.assertRaises(FileNotFoundError, shutil.rmtree, filename) + # test that ignore_errors option is honored + shutil.rmtree(filename, ignore_errors=True) + + # existing file + tmpdir = self.mkdtemp() + write_file((tmpdir, "tstfile"), "") + filename = os.path.join(tmpdir, "tstfile") + with self.assertRaises(NotADirectoryError) as cm: + shutil.rmtree(filename) + self.assertEqual(cm.exception.filename, filename) + self.assertTrue(os.path.exists(filename)) + # test that ignore_errors option is honored + shutil.rmtree(filename, ignore_errors=True) + self.assertTrue(os.path.exists(filename)) + errors = [] + def onexc(*args): + errors.append(args) + shutil.rmtree(filename, onexc=onexc) + self.assertEqual(len(errors), 2) + self.assertIs(errors[0][0], os.scandir) + self.assertEqual(errors[0][1], filename) + self.assertIsInstance(errors[0][2], NotADirectoryError) + self.assertEqual(errors[0][2].filename, filename) + self.assertIs(errors[1][0], os.rmdir) + self.assertEqual(errors[1][1], filename) + self.assertIsInstance(errors[1][2], NotADirectoryError) + self.assertEqual(errors[1][2].filename, filename) @unittest.skipIf(sys.platform[:6] == 'cygwin', "This test can't be run on Cygwin (issue #1071513).") @@ -336,7 +411,8 @@ def test_on_error(self): self.addCleanup(os.chmod, self.child_file_path, old_child_file_mode) self.addCleanup(os.chmod, self.child_dir_path, old_child_dir_mode) - shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror) + with self.assertWarns(DeprecationWarning): + shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror) # Test whether onerror has actually been called. self.assertEqual(self.errorState, 3, "Expected call to onerror function did not happen.") @@ -368,6 +444,105 @@ def check_args_to_onerror(self, func, arg, exc): self.assertTrue(issubclass(exc[0], OSError)) self.errorState = 3 + @unittest.skipIf(sys.platform[:6] == 'cygwin', + "This test can't be run on Cygwin (issue #1071513).") + @os_helper.skip_if_dac_override + @os_helper.skip_unless_working_chmod + def test_on_exc(self): + self.errorState = 0 + os.mkdir(TESTFN) + self.addCleanup(shutil.rmtree, TESTFN) + + self.child_file_path = os.path.join(TESTFN, 'a') + self.child_dir_path = os.path.join(TESTFN, 'b') + os_helper.create_empty_file(self.child_file_path) + os.mkdir(self.child_dir_path) + old_dir_mode = os.stat(TESTFN).st_mode + old_child_file_mode = os.stat(self.child_file_path).st_mode + old_child_dir_mode = os.stat(self.child_dir_path).st_mode + # Make unwritable. + new_mode = stat.S_IREAD|stat.S_IEXEC + os.chmod(self.child_file_path, new_mode) + os.chmod(self.child_dir_path, new_mode) + os.chmod(TESTFN, new_mode) + + self.addCleanup(os.chmod, TESTFN, old_dir_mode) + self.addCleanup(os.chmod, self.child_file_path, old_child_file_mode) + self.addCleanup(os.chmod, self.child_dir_path, old_child_dir_mode) + + shutil.rmtree(TESTFN, onexc=self.check_args_to_onexc) + # Test whether onexc has actually been called. + self.assertEqual(self.errorState, 3, + "Expected call to onexc function did not happen.") + + def check_args_to_onexc(self, func, arg, exc): + # test_rmtree_errors deliberately runs rmtree + # on a directory that is chmod 500, which will fail. + # This function is run when shutil.rmtree fails. + # 99.9% of the time it initially fails to remove + # a file in the directory, so the first time through + # func is os.remove. + # However, some Linux machines running ZFS on + # FUSE experienced a failure earlier in the process + # at os.listdir. The first failure may legally + # be either. + if self.errorState < 2: + if func is os.unlink: + self.assertEqual(arg, self.child_file_path) + elif func is os.rmdir: + self.assertEqual(arg, self.child_dir_path) + else: + self.assertIs(func, os.listdir) + self.assertIn(arg, [TESTFN, self.child_dir_path]) + self.assertTrue(isinstance(exc, OSError)) + self.errorState += 1 + else: + self.assertEqual(func, os.rmdir) + self.assertEqual(arg, TESTFN) + self.assertTrue(isinstance(exc, OSError)) + self.errorState = 3 + + @unittest.skipIf(sys.platform[:6] == 'cygwin', + "This test can't be run on Cygwin (issue #1071513).") + @os_helper.skip_if_dac_override + @os_helper.skip_unless_working_chmod + def test_both_onerror_and_onexc(self): + onerror_called = False + onexc_called = False + + def onerror(*args): + nonlocal onerror_called + onerror_called = True + + def onexc(*args): + nonlocal onexc_called + onexc_called = True + + os.mkdir(TESTFN) + self.addCleanup(shutil.rmtree, TESTFN) + + self.child_file_path = os.path.join(TESTFN, 'a') + self.child_dir_path = os.path.join(TESTFN, 'b') + os_helper.create_empty_file(self.child_file_path) + os.mkdir(self.child_dir_path) + old_dir_mode = os.stat(TESTFN).st_mode + old_child_file_mode = os.stat(self.child_file_path).st_mode + old_child_dir_mode = os.stat(self.child_dir_path).st_mode + # Make unwritable. + new_mode = stat.S_IREAD|stat.S_IEXEC + os.chmod(self.child_file_path, new_mode) + os.chmod(self.child_dir_path, new_mode) + os.chmod(TESTFN, new_mode) + + self.addCleanup(os.chmod, TESTFN, old_dir_mode) + self.addCleanup(os.chmod, self.child_file_path, old_child_file_mode) + self.addCleanup(os.chmod, self.child_dir_path, old_child_dir_mode) + + with self.assertWarns(DeprecationWarning): + shutil.rmtree(TESTFN, onerror=onerror, onexc=onexc) + self.assertTrue(onexc_called) + self.assertFalse(onerror_called) + def test_rmtree_does_not_choke_on_failing_lstat(self): try: orig_lstat = os.lstat @@ -1667,12 +1842,14 @@ def test_register_archive_format(self): ### shutil.unpack_archive - def check_unpack_archive(self, format): - self.check_unpack_archive_with_converter(format, lambda path: path) - self.check_unpack_archive_with_converter(format, pathlib.Path) - self.check_unpack_archive_with_converter(format, FakePath) + def check_unpack_archive(self, format, **kwargs): + self.check_unpack_archive_with_converter( + format, lambda path: path, **kwargs) + self.check_unpack_archive_with_converter( + format, pathlib.Path, **kwargs) + self.check_unpack_archive_with_converter(format, FakePath, **kwargs) - def check_unpack_archive_with_converter(self, format, converter): + def check_unpack_archive_with_converter(self, format, converter, **kwargs): root_dir, base_dir = self._create_files() expected = rlistdir(root_dir) expected.remove('outer') @@ -1682,36 +1859,48 @@ def check_unpack_archive_with_converter(self, format, converter): # let's try to unpack it now tmpdir2 = self.mkdtemp() - unpack_archive(converter(filename), converter(tmpdir2)) + unpack_archive(converter(filename), converter(tmpdir2), **kwargs) self.assertEqual(rlistdir(tmpdir2), expected) # and again, this time with the format specified tmpdir3 = self.mkdtemp() - unpack_archive(converter(filename), converter(tmpdir3), format=format) + unpack_archive(converter(filename), converter(tmpdir3), format=format, + **kwargs) self.assertEqual(rlistdir(tmpdir3), expected) - self.assertRaises(shutil.ReadError, unpack_archive, converter(TESTFN)) - self.assertRaises(ValueError, unpack_archive, converter(TESTFN), format='xxx') + with self.assertRaises(shutil.ReadError): + unpack_archive(converter(TESTFN), **kwargs) + with self.assertRaises(ValueError): + unpack_archive(converter(TESTFN), format='xxx', **kwargs) + + def check_unpack_tarball(self, format): + self.check_unpack_archive(format, filter='fully_trusted') + self.check_unpack_archive(format, filter='data') + with warnings_helper.check_warnings( + ('Python 3.14', DeprecationWarning)): + self.check_unpack_archive(format) def test_unpack_archive_tar(self): - self.check_unpack_archive('tar') + self.check_unpack_tarball('tar') @support.requires_zlib() def test_unpack_archive_gztar(self): - self.check_unpack_archive('gztar') + self.check_unpack_tarball('gztar') @support.requires_bz2() def test_unpack_archive_bztar(self): - self.check_unpack_archive('bztar') + self.check_unpack_tarball('bztar') @support.requires_lzma() @unittest.skipIf(AIX and not _maxdataOK(), "AIX MAXDATA must be 0x20000000 or larger") def test_unpack_archive_xztar(self): - self.check_unpack_archive('xztar') + self.check_unpack_tarball('xztar') @support.requires_zlib() def test_unpack_archive_zip(self): self.check_unpack_archive('zip') + with self.assertRaises(TypeError): + self.check_unpack_archive('zip', filter='data') def test_unpack_registry(self): @@ -1860,18 +2049,68 @@ def test_relative_cmd(self): rv = shutil.which(relpath, path=base_dir) self.assertIsNone(rv) - def test_cwd(self): + @unittest.skipUnless(sys.platform != "win32", + "test is for non win32") + def test_cwd_non_win32(self): # Issue #16957 base_dir = os.path.dirname(self.dir) with os_helper.change_cwd(path=self.dir): rv = shutil.which(self.file, path=base_dir) - if sys.platform == "win32": - # Windows: current directory implicitly on PATH + # non-win32: shouldn't match in the current directory. + self.assertIsNone(rv) + + @unittest.skipUnless(sys.platform == "win32", + "test is for win32") + def test_cwd_win32(self): + base_dir = os.path.dirname(self.dir) + with os_helper.change_cwd(path=self.dir): + with unittest.mock.patch('shutil._win_path_needs_curdir', return_value=True): + rv = shutil.which(self.file, path=base_dir) + # Current directory implicitly on PATH self.assertEqual(rv, os.path.join(self.curdir, self.file)) - else: - # Other platforms: shouldn't match in the current directory. + with unittest.mock.patch('shutil._win_path_needs_curdir', return_value=False): + rv = shutil.which(self.file, path=base_dir) + # Current directory not on PATH self.assertIsNone(rv) + @unittest.skipUnless(sys.platform == "win32", + "test is for win32") + def test_cwd_win32_added_before_all_other_path(self): + base_dir = pathlib.Path(os.fsdecode(self.dir)) + + elsewhere_in_path_dir = base_dir / 'dir1' + elsewhere_in_path_dir.mkdir() + match_elsewhere_in_path = elsewhere_in_path_dir / 'hello.exe' + match_elsewhere_in_path.touch() + + exe_in_cwd = base_dir / 'hello.exe' + exe_in_cwd.touch() + + with os_helper.change_cwd(path=base_dir): + with unittest.mock.patch('shutil._win_path_needs_curdir', return_value=True): + rv = shutil.which('hello.exe', path=elsewhere_in_path_dir) + + self.assertEqual(os.path.abspath(rv), os.path.abspath(exe_in_cwd)) + + @unittest.skipUnless(sys.platform == "win32", + "test is for win32") + def test_pathext_match_before_path_full_match(self): + base_dir = pathlib.Path(os.fsdecode(self.dir)) + dir1 = base_dir / 'dir1' + dir2 = base_dir / 'dir2' + dir1.mkdir() + dir2.mkdir() + + pathext_match = dir1 / 'hello.com.exe' + path_match = dir2 / 'hello.com' + pathext_match.touch() + path_match.touch() + + test_path = os.pathsep.join([str(dir1), str(dir2)]) + assert os.path.basename(shutil.which( + 'hello.com', path=test_path, mode = os.F_OK + )).lower() == 'hello.com.exe' + @os_helper.skip_if_dac_override def test_non_matching_mode(self): # Set the file read-only and ask for writeable files. @@ -2005,6 +2244,32 @@ def test_pathext_with_empty_str(self): rv = shutil.which(program, path=self.temp_dir) self.assertEqual(rv, temp_filexyz.name) + # See GH-75586 + @unittest.skipUnless(sys.platform == "win32", 'test specific to Windows') + def test_pathext_applied_on_files_in_path(self): + with os_helper.EnvironmentVarGuard() as env: + env["PATH"] = self.temp_dir + env["PATHEXT"] = ".test" + + test_path = pathlib.Path(self.temp_dir) / "test_program.test" + test_path.touch(mode=0o755) + + self.assertEqual(shutil.which("test_program"), str(test_path)) + + # See GH-75586 + @unittest.skipUnless(sys.platform == "win32", 'test specific to Windows') + def test_win_path_needs_curdir(self): + with unittest.mock.patch('_winapi.NeedCurrentDirectoryForExePath', return_value=True) as need_curdir_mock: + self.assertTrue(shutil._win_path_needs_curdir('dontcare', os.X_OK)) + need_curdir_mock.assert_called_once_with('dontcare') + need_curdir_mock.reset_mock() + self.assertTrue(shutil._win_path_needs_curdir('dontcare', 0)) + need_curdir_mock.assert_not_called() + + with unittest.mock.patch('_winapi.NeedCurrentDirectoryForExePath', return_value=False) as need_curdir_mock: + self.assertFalse(shutil._win_path_needs_curdir('dontcare', os.X_OK)) + need_curdir_mock.assert_called_once_with('dontcare') + class TestWhichBytes(TestWhich): def setUp(self): diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index a313da29b4a4fd..bb7bf436d2d721 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -8,6 +8,7 @@ import array import contextlib import errno +import gc import io import itertools import math @@ -836,6 +837,12 @@ def requireSocket(*args): class GeneralModuleTests(unittest.TestCase): + @unittest.skipUnless(_socket is not None, 'need _socket module') + def test_socket_type(self): + self.assertTrue(gc.is_tracked(_socket.socket)) + with self.assertRaisesRegex(TypeError, "immutable"): + _socket.socket.foo = 1 + def test_SocketType_is_socketobject(self): import _socket self.assertTrue(socket.SocketType is _socket.socket) @@ -2562,8 +2569,7 @@ def testHyperVConstants(self): socket.HV_GUID_LOOPBACK def testCreateHyperVSocketWithUnknownProtoFailure(self): - expected = "A protocol was specified in the socket function call " \ - "that does not support the semantics of the socket type requested" + expected = r"\[WinError 10041\]" with self.assertRaisesRegex(OSError, expected): socket.socket(socket.AF_HYPERV, socket.SOCK_STREAM) @@ -5492,10 +5498,10 @@ def alarm_handler(signal, frame): self.fail("caught timeout instead of Alarm") except Alarm: pass - except: + except BaseException as e: self.fail("caught other exception instead of Alarm:" " %s(%s):\n%s" % - (sys.exc_info()[:2] + (traceback.format_exc(),))) + (type(e), e, traceback.format_exc())) else: self.fail("nothing caught") finally: diff --git a/Lib/test/test_socketserver.py b/Lib/test/test_socketserver.py index 2fa5069423327a..c81d559cde315d 100644 --- a/Lib/test/test_socketserver.py +++ b/Lib/test/test_socketserver.py @@ -47,16 +47,8 @@ def receive(sock, n, timeout=test.support.SHORT_TIMEOUT): else: raise RuntimeError("timed out on %r" % (sock,)) -if HAVE_UNIX_SOCKETS and HAVE_FORKING: - class ForkingUnixStreamServer(socketserver.ForkingMixIn, - socketserver.UnixStreamServer): - pass - - class ForkingUnixDatagramServer(socketserver.ForkingMixIn, - socketserver.UnixDatagramServer): - pass - +@test.support.requires_fork() @contextlib.contextmanager def simple_subprocess(testcase): """Tests that a custom child process is not waited on (Issue 1540386)""" @@ -211,7 +203,7 @@ def test_ThreadingUnixStreamServer(self): @requires_forking def test_ForkingUnixStreamServer(self): with simple_subprocess(self): - self.run_server(ForkingUnixStreamServer, + self.run_server(socketserver.ForkingUnixStreamServer, socketserver.StreamRequestHandler, self.stream_examine) @@ -247,7 +239,7 @@ def test_ThreadingUnixDatagramServer(self): @requires_unix_sockets @requires_forking def test_ForkingUnixDatagramServer(self): - self.run_server(ForkingUnixDatagramServer, + self.run_server(socketserver.ForkingUnixDatagramServer, socketserver.DatagramRequestHandler, self.dgram_examine) diff --git a/Lib/test/test_sqlite3/test_dbapi.py b/Lib/test/test_sqlite3/test_dbapi.py index 695e213cdc7b75..1bb0e13e356e78 100644 --- a/Lib/test/test_sqlite3/test_dbapi.py +++ b/Lib/test/test_sqlite3/test_dbapi.py @@ -577,6 +577,30 @@ def test_connection_bad_reinit(self): cx.executemany, "insert into t values(?)", ((v,) for v in range(3))) + def test_connection_config(self): + op = sqlite.SQLITE_DBCONFIG_ENABLE_FKEY + with memory_database() as cx: + with self.assertRaisesRegex(ValueError, "unknown"): + cx.getconfig(-1) + + # Toggle and verify. + old = cx.getconfig(op) + new = not old + cx.setconfig(op, new) + self.assertEqual(cx.getconfig(op), new) + + cx.setconfig(op) # defaults to True + self.assertTrue(cx.getconfig(op)) + + # Check that foreign key support was actually enabled. + with cx: + cx.executescript(""" + create table t(t integer primary key); + create table u(u, foreign key(u) references t(t)); + """) + with self.assertRaisesRegex(sqlite.IntegrityError, "constraint"): + cx.execute("insert into u values(0)") + class UninitialisedConnectionTests(unittest.TestCase): def setUp(self): @@ -606,7 +630,6 @@ def test_serialize_deserialize(self): with cx: cx.execute("create table t(t)") data = cx.serialize() - self.assertEqual(len(data), 8192) # Remove test table, verify that it was removed. with cx: diff --git a/Lib/test/test_sqlite3/test_regression.py b/Lib/test/test_sqlite3/test_regression.py index ad83a97c8c40d6..7e8221e7227e6e 100644 --- a/Lib/test/test_sqlite3/test_regression.py +++ b/Lib/test/test_sqlite3/test_regression.py @@ -491,21 +491,21 @@ def tearDown(self): def test_recursive_cursor_init(self): conv = lambda x: self.cur.__init__(self.con) with patch.dict(sqlite.converters, {"INIT": conv}): - self.cur.execute(f'select x as "x [INIT]", x from test') + self.cur.execute('select x as "x [INIT]", x from test') self.assertRaisesRegex(sqlite.ProgrammingError, self.msg, self.cur.fetchall) def test_recursive_cursor_close(self): conv = lambda x: self.cur.close() with patch.dict(sqlite.converters, {"CLOSE": conv}): - self.cur.execute(f'select x as "x [CLOSE]", x from test') + self.cur.execute('select x as "x [CLOSE]", x from test') self.assertRaisesRegex(sqlite.ProgrammingError, self.msg, self.cur.fetchall) def test_recursive_cursor_iter(self): conv = lambda x, l=[]: self.cur.fetchone() if l else l.append(None) with patch.dict(sqlite.converters, {"ITER": conv}): - self.cur.execute(f'select x as "x [ITER]", x from test') + self.cur.execute('select x as "x [ITER]", x from test') self.assertRaisesRegex(sqlite.ProgrammingError, self.msg, self.cur.fetchall) diff --git a/Lib/test/test_sqlite3/test_types.py b/Lib/test/test_sqlite3/test_types.py index 5e0ff353cbbd6b..fde5f888e64009 100644 --- a/Lib/test/test_sqlite3/test_types.py +++ b/Lib/test/test_sqlite3/test_types.py @@ -517,7 +517,7 @@ def test_sqlite_timestamp(self): self.assertEqual(ts, ts2) def test_sql_timestamp(self): - now = datetime.datetime.utcnow() + now = datetime.datetime.now(tz=datetime.UTC) self.cur.execute("insert into test(ts) values (current_timestamp)") self.cur.execute("select ts from test") with self.assertWarnsRegex(DeprecationWarning, "converter"): diff --git a/Lib/test/test_sqlite3/test_userfunctions.py b/Lib/test/test_sqlite3/test_userfunctions.py index 0970b0378ad615..632d657d416fd4 100644 --- a/Lib/test/test_sqlite3/test_userfunctions.py +++ b/Lib/test/test_sqlite3/test_userfunctions.py @@ -562,7 +562,7 @@ def test_win_exception_in_finalize(self): # callback errors to sqlite3_step(); this implies that OperationalError # is _not_ raised. with patch.object(WindowSumInt, "finalize", side_effect=BadWindow): - name = f"exception_in_finalize" + name = "exception_in_finalize" self.con.create_window_function(name, 1, WindowSumInt) self.cur.execute(self.query % name) self.cur.fetchall() diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py index d4eb2d2e81fe0f..d46ce5e60e2141 100644 --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -151,7 +151,6 @@ def data_file(*name): OP_SINGLE_ECDH_USE = getattr(ssl, "OP_SINGLE_ECDH_USE", 0) OP_CIPHER_SERVER_PREFERENCE = getattr(ssl, "OP_CIPHER_SERVER_PREFERENCE", 0) OP_ENABLE_MIDDLEBOX_COMPAT = getattr(ssl, "OP_ENABLE_MIDDLEBOX_COMPAT", 0) -OP_IGNORE_UNEXPECTED_EOF = getattr(ssl, "OP_IGNORE_UNEXPECTED_EOF", 0) # Ubuntu has patched OpenSSL and changed behavior of security level 2 # see https://bugs.python.org/issue41561#msg389003 @@ -254,7 +253,7 @@ def wrapper(*args, **kw): def handle_error(prefix): - exc_format = ' '.join(traceback.format_exception(*sys.exc_info())) + exc_format = ' '.join(traceback.format_exception(sys.exception())) if support.verbose: sys.stdout.write(prefix + exc_format) @@ -958,8 +957,7 @@ def test_options(self): # SSLContext also enables these by default default |= (OP_NO_COMPRESSION | OP_CIPHER_SERVER_PREFERENCE | OP_SINGLE_DH_USE | OP_SINGLE_ECDH_USE | - OP_ENABLE_MIDDLEBOX_COMPAT | - OP_IGNORE_UNEXPECTED_EOF) + OP_ENABLE_MIDDLEBOX_COMPAT) self.assertEqual(default, ctx.options) with warnings_helper.check_warnings(): ctx.options |= ssl.OP_NO_TLSv1 @@ -1291,6 +1289,8 @@ def test_load_verify_cadata(self): "not enough data: cadata does not contain a certificate" ): ctx.load_verify_locations(cadata=b"broken") + with self.assertRaises(ssl.SSLError): + ctx.load_verify_locations(cadata=cacert_der + b"A") @unittest.skipIf(Py_DEBUG_WIN32, "Avoid mixing debug/release CRT on Windows") def test_load_dh_params(self): @@ -2084,13 +2084,13 @@ def test_bio_handshake(self): self.assertIs(sslobj._sslobj.owner, sslobj) self.assertIsNone(sslobj.cipher()) self.assertIsNone(sslobj.version()) - self.assertIsNotNone(sslobj.shared_ciphers()) + self.assertIsNone(sslobj.shared_ciphers()) self.assertRaises(ValueError, sslobj.getpeercert) if 'tls-unique' in ssl.CHANNEL_BINDING_TYPES: self.assertIsNone(sslobj.get_channel_binding('tls-unique')) self.ssl_io_loop(sock, incoming, outgoing, sslobj.do_handshake) self.assertTrue(sslobj.cipher()) - self.assertIsNotNone(sslobj.shared_ciphers()) + self.assertIsNone(sslobj.shared_ciphers()) self.assertIsNotNone(sslobj.version()) self.assertTrue(sslobj.getpeercert()) if 'tls-unique' in ssl.CHANNEL_BINDING_TYPES: @@ -2120,6 +2120,20 @@ def test_bio_read_write_data(self): self.assertEqual(buf, b'foo\n') self.ssl_io_loop(sock, incoming, outgoing, sslobj.unwrap) + def test_transport_eof(self): + client_context, server_context, hostname = testing_context() + with socket.socket(socket.AF_INET) as sock: + sock.connect(self.server_addr) + incoming = ssl.MemoryBIO() + outgoing = ssl.MemoryBIO() + sslobj = client_context.wrap_bio(incoming, outgoing, + server_hostname=hostname) + self.ssl_io_loop(sock, incoming, outgoing, sslobj.do_handshake) + + # Simulate EOF from the transport. + incoming.write_eof() + self.assertRaises(ssl.SSLEOFError, sslobj.read) + @support.requires_resource('network') class NetworkedTests(unittest.TestCase): @@ -4039,7 +4053,7 @@ def cb_wrong_return_type(ssl_sock, server_name, initial_context): def test_shared_ciphers(self): client_context, server_context, hostname = testing_context() client_context.set_ciphers("AES128:AES256") - server_context.set_ciphers("AES256") + server_context.set_ciphers("AES256:eNULL") expected_algs = [ "AES256", "AES-256", # TLS 1.3 ciphers are always enabled diff --git a/Lib/test/test_strptime.py b/Lib/test/test_strptime.py index e3fcabef946116..810c5a36e02f41 100644 --- a/Lib/test/test_strptime.py +++ b/Lib/test/test_strptime.py @@ -242,6 +242,16 @@ def test_ValueError(self): # 5. Julian/ordinal day (%j) is specified with %G, but not %Y with self.assertRaises(ValueError): _strptime._strptime("1999 256", "%G %j") + # 6. Invalid ISO weeks + invalid_iso_weeks = [ + "2019-00-1", + "2019-54-1", + "2021-53-1", + ] + for invalid_iso_dtstr in invalid_iso_weeks: + with self.subTest(invalid_iso_dtstr): + with self.assertRaises(ValueError): + _strptime._strptime(invalid_iso_dtstr, "%G-%V-%u") def test_strptime_exception_context(self): diff --git a/Lib/test/test_subclassinit.py b/Lib/test/test_subclassinit.py index 0ad7d17fbd4ddd..310473a4a2fe58 100644 --- a/Lib/test/test_subclassinit.py +++ b/Lib/test/test_subclassinit.py @@ -134,30 +134,28 @@ class Descriptor: def __set_name__(self, owner, name): 1/0 - with self.assertRaises(RuntimeError) as cm: + with self.assertRaises(ZeroDivisionError) as cm: class NotGoingToWork: attr = Descriptor() - exc = cm.exception - self.assertRegex(str(exc), r'\bNotGoingToWork\b') - self.assertRegex(str(exc), r'\battr\b') - self.assertRegex(str(exc), r'\bDescriptor\b') - self.assertIsInstance(exc.__cause__, ZeroDivisionError) + notes = cm.exception.__notes__ + self.assertRegex(str(notes), r'\bNotGoingToWork\b') + self.assertRegex(str(notes), r'\battr\b') + self.assertRegex(str(notes), r'\bDescriptor\b') def test_set_name_wrong(self): class Descriptor: def __set_name__(self): pass - with self.assertRaises(RuntimeError) as cm: + with self.assertRaises(TypeError) as cm: class NotGoingToWork: attr = Descriptor() - exc = cm.exception - self.assertRegex(str(exc), r'\bNotGoingToWork\b') - self.assertRegex(str(exc), r'\battr\b') - self.assertRegex(str(exc), r'\bDescriptor\b') - self.assertIsInstance(exc.__cause__, TypeError) + notes = cm.exception.__notes__ + self.assertRegex(str(notes), r'\bNotGoingToWork\b') + self.assertRegex(str(notes), r'\battr\b') + self.assertRegex(str(notes), r'\bDescriptor\b') def test_set_name_lookup(self): resolved = [] diff --git a/Lib/test/test_super.py b/Lib/test/test_super.py index a68b38cf79d537..698ab48f48eaa1 100644 --- a/Lib/test/test_super.py +++ b/Lib/test/test_super.py @@ -1,6 +1,8 @@ """Unit tests for zero-argument super() & related machinery.""" import unittest +from unittest.mock import patch +from test import shadowed_super class A: @@ -283,17 +285,28 @@ def f(self): def test_obscure_super_errors(self): def f(): super() - self.assertRaises(RuntimeError, f) + with self.assertRaisesRegex(RuntimeError, r"no arguments"): + f() + + class C: + def f(): + super() + with self.assertRaisesRegex(RuntimeError, r"no arguments"): + C.f() + def f(x): del x super() - self.assertRaises(RuntimeError, f, None) + with self.assertRaisesRegex(RuntimeError, r"arg\[0\] deleted"): + f(None) + class X: def f(x): nonlocal __class__ del __class__ super() - self.assertRaises(RuntimeError, X().f) + with self.assertRaisesRegex(RuntimeError, r"empty __class__ cell"): + X().f() def test_cell_as_self(self): class X: @@ -325,6 +338,78 @@ def test_super_argtype(self): with self.assertRaisesRegex(TypeError, "argument 1 must be a type"): super(1, int) + def test_shadowed_global(self): + self.assertEqual(shadowed_super.C().method(), "truly super") + + def test_shadowed_local(self): + class super: + msg = "quite super" + + class C: + def method(self): + return super().msg + + self.assertEqual(C().method(), "quite super") + + def test_shadowed_dynamic(self): + class MySuper: + msg = "super super" + + class C: + def method(self): + return super().msg + + with patch(f"{__name__}.super", MySuper) as m: + self.assertEqual(C().method(), "super super") + + def test_shadowed_dynamic_two_arg(self): + call_args = [] + class MySuper: + def __init__(self, *args): + call_args.append(args) + msg = "super super" + + class C: + def method(self): + return super(1, 2).msg + + with patch(f"{__name__}.super", MySuper) as m: + self.assertEqual(C().method(), "super super") + self.assertEqual(call_args, [(1, 2)]) + + def test_attribute_error(self): + class C: + def method(self): + return super().msg + + with self.assertRaisesRegex(AttributeError, "'super' object has no attribute 'msg'"): + C().method() + + def test_bad_first_arg(self): + class C: + def method(self): + return super(1, self).method() + + with self.assertRaisesRegex(TypeError, "argument 1 must be a type"): + C().method() + + def test_super___class__(self): + class C: + def method(self): + return super().__class__ + + self.assertEqual(C().method(), super) + + def test_super_subclass___class__(self): + class mysuper(super): + pass + + class C: + def method(self): + return mysuper(C, self).__class__ + + self.assertEqual(C().method(), mysuper) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index f23653558a9119..f959bbb4400702 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -1853,6 +1853,30 @@ def f(x: *b) Traceback (most recent call last): ... SyntaxError: invalid syntax + +Invalid bytes literals: + + >>> b"Ā" + Traceback (most recent call last): + ... + b"Ā" + ^^^ + SyntaxError: bytes can only contain ASCII literal characters + + >>> b"абвгде" + Traceback (most recent call last): + ... + b"абвгде" + ^^^^^^^^ + SyntaxError: bytes can only contain ASCII literal characters + + >>> b"abc ъющый" # first 3 letters are ascii + Traceback (most recent call last): + ... + b"abc ъющый" + ^^^^^^^^^^^ + SyntaxError: bytes can only contain ASCII literal characters + """ import re diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index b839985def9a12..611cd27ecf1240 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -385,7 +385,8 @@ def test_refcount(self): self.assertRaises(TypeError, sys.getrefcount) c = sys.getrefcount(None) n = None - self.assertEqual(sys.getrefcount(None), c+1) + # Singleton refcnts don't change + self.assertEqual(sys.getrefcount(None), c) del n self.assertEqual(sys.getrefcount(None), c) if hasattr(sys, "gettotalrefcount"): @@ -532,13 +533,13 @@ def g456(): main_id = threading.get_ident() self.assertIn(main_id, d) self.assertIn(thread_id, d) - self.assertEqual((None, None, None), d.pop(main_id)) + self.assertEqual(None, d.pop(main_id)) # Verify that the captured thread frame is blocked in g456, called # from f123. This is a little tricky, since various bits of # threading.py are also in the thread's call stack. - exc_type, exc_value, exc_tb = d.pop(thread_id) - stack = traceback.extract_stack(exc_tb.tb_frame) + exc_value = d.pop(thread_id) + stack = traceback.extract_stack(exc_value.__traceback__.tb_frame) for i, (filename, lineno, funcname, sourceline) in enumerate(stack): if funcname == "f123": break @@ -1445,7 +1446,7 @@ class C(object): pass def func(): return sys._getframe() x = func() - check(x, size('3Pi3c7P2ic??2P')) + check(x, size('3Pii3c7P2ic??2P')) # function def func(): pass check(func, size('14Pi')) @@ -1556,7 +1557,7 @@ def delx(self): del self.__x '10P' # PySequenceMethods '2P' # PyBufferProcs '6P' - '1P' # Specializer cache + '1PI' # Specializer cache ) class newstyleclass(object): pass # Separate block for PyDictKeysObject with 8 keys and 5 entries @@ -1649,8 +1650,8 @@ def test_pythontypes(self): check(_ast.AST(), size('P')) try: raise TypeError - except TypeError: - tb = sys.exc_info()[2] + except TypeError as e: + tb = e.__traceback__ # traceback if tb is not None: check(tb, size('2P2i')) diff --git a/Lib/test/test_sys_settrace.py b/Lib/test/test_sys_settrace.py index 4907c930e143d5..980321e169b9e5 100644 --- a/Lib/test/test_sys_settrace.py +++ b/Lib/test/test_sys_settrace.py @@ -2808,5 +2808,65 @@ def foo(*args): sys.settrace(sys.gettrace()) +class TestLinesAfterTraceStarted(TraceTestCase): + + def test_events(self): + tracer = Tracer() + sys._getframe().f_trace = tracer.trace + sys.settrace(tracer.trace) + line = 4 + line = 5 + sys.settrace(None) + self.compare_events( + TestLinesAfterTraceStarted.test_events.__code__.co_firstlineno, + tracer.events, [ + (4, 'line'), + (5, 'line'), + (6, 'line')]) + + +class TestSetLocalTrace(TraceTestCase): + + def test_with_branches(self): + + def tracefunc(frame, event, arg): + if frame.f_code.co_name == "func": + frame.f_trace = tracefunc + line = frame.f_lineno - frame.f_code.co_firstlineno + events.append((line, event)) + return tracefunc + + def func(arg = 1): + N = 1 + if arg >= 2: + not_reached = 3 + else: + reached = 5 + if arg >= 3: + not_reached = 7 + else: + reached = 9 + the_end = 10 + + EXPECTED_EVENTS = [ + (0, 'call'), + (1, 'line'), + (2, 'line'), + (5, 'line'), + (6, 'line'), + (9, 'line'), + (10, 'line'), + (10, 'return'), + ] + + events = [] + sys.settrace(tracefunc) + sys._getframe().f_trace = tracefunc + func() + self.assertEqual(events, EXPECTED_EVENTS) + sys.settrace(None) + + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py index 75b60e9a50e78a..e8d322d20a5a8e 100644 --- a/Lib/test/test_tarfile.py +++ b/Lib/test/test_tarfile.py @@ -2,9 +2,13 @@ import os import io from hashlib import sha256 -from contextlib import contextmanager +from contextlib import contextmanager, ExitStack from random import Random import pathlib +import shutil +import re +import warnings +import stat import unittest import unittest.mock @@ -13,6 +17,7 @@ from test import support from test.support import os_helper from test.support import script_helper +from test.support import warnings_helper # Check for our compression modules. try: @@ -108,7 +113,7 @@ def test_fileobj_regular_file(self): "regular file extraction failed") def test_fileobj_readlines(self): - self.tar.extract("ustar/regtype", TEMPDIR) + self.tar.extract("ustar/regtype", TEMPDIR, filter='data') tarinfo = self.tar.getmember("ustar/regtype") with open(os.path.join(TEMPDIR, "ustar/regtype"), "r") as fobj1: lines1 = fobj1.readlines() @@ -126,7 +131,7 @@ def test_fileobj_readlines(self): "fileobj.readlines() failed") def test_fileobj_iter(self): - self.tar.extract("ustar/regtype", TEMPDIR) + self.tar.extract("ustar/regtype", TEMPDIR, filter='data') tarinfo = self.tar.getmember("ustar/regtype") with open(os.path.join(TEMPDIR, "ustar/regtype"), "r") as fobj1: lines1 = fobj1.readlines() @@ -136,7 +141,8 @@ def test_fileobj_iter(self): "fileobj.__iter__() failed") def test_fileobj_seek(self): - self.tar.extract("ustar/regtype", TEMPDIR) + self.tar.extract("ustar/regtype", TEMPDIR, + filter='data') with open(os.path.join(TEMPDIR, "ustar/regtype"), "rb") as fobj: data = fobj.read() @@ -467,7 +473,7 @@ def test_premature_end_of_archive(self): t = tar.next() with self.assertRaisesRegex(tarfile.ReadError, "unexpected end of data"): - tar.extract(t, TEMPDIR) + tar.extract(t, TEMPDIR, filter='data') with self.assertRaisesRegex(tarfile.ReadError, "unexpected end of data"): tar.extractfile(t).read() @@ -479,6 +485,13 @@ def test_length_zero_header(self): with tarfile.open(support.findfile('recursion.tar')) as tar: pass + def test_extractfile_name(self): + # gh-74468: TarFile.name must name a file, not a parent archive. + file = self.tar.getmember('ustar/regtype') + with self.tar.extractfile(file) as fobj: + self.assertEqual(fobj.name, 'ustar/regtype') + + class MiscReadTestBase(CommonReadTest): def requires_name_attribute(self): pass @@ -622,16 +635,16 @@ def test_find_members(self): def test_extract_hardlink(self): # Test hardlink extraction (e.g. bug #857297). with tarfile.open(tarname, errorlevel=1, encoding="iso8859-1") as tar: - tar.extract("ustar/regtype", TEMPDIR) + tar.extract("ustar/regtype", TEMPDIR, filter='data') self.addCleanup(os_helper.unlink, os.path.join(TEMPDIR, "ustar/regtype")) - tar.extract("ustar/lnktype", TEMPDIR) + tar.extract("ustar/lnktype", TEMPDIR, filter='data') self.addCleanup(os_helper.unlink, os.path.join(TEMPDIR, "ustar/lnktype")) with open(os.path.join(TEMPDIR, "ustar/lnktype"), "rb") as f: data = f.read() self.assertEqual(sha256sum(data), sha256_regtype) - tar.extract("ustar/symtype", TEMPDIR) + tar.extract("ustar/symtype", TEMPDIR, filter='data') self.addCleanup(os_helper.unlink, os.path.join(TEMPDIR, "ustar/symtype")) with open(os.path.join(TEMPDIR, "ustar/symtype"), "rb") as f: data = f.read() @@ -646,13 +659,14 @@ def test_extractall(self): os.mkdir(DIR) try: directories = [t for t in tar if t.isdir()] - tar.extractall(DIR, directories) + tar.extractall(DIR, directories, filter='fully_trusted') for tarinfo in directories: path = os.path.join(DIR, tarinfo.name) if sys.platform != "win32": # Win32 has no support for fine grained permissions. self.assertEqual(tarinfo.mode & 0o777, - os.stat(path).st_mode & 0o777) + os.stat(path).st_mode & 0o777, + tarinfo.name) def format_mtime(mtime): if isinstance(mtime, float): return "{} ({})".format(mtime, mtime.hex()) @@ -676,7 +690,7 @@ def test_extract_directory(self): try: with tarfile.open(tarname, encoding="iso8859-1") as tar: tarinfo = tar.getmember(dirtype) - tar.extract(tarinfo, path=DIR) + tar.extract(tarinfo, path=DIR, filter='fully_trusted') extracted = os.path.join(DIR, dirtype) self.assertEqual(os.path.getmtime(extracted), tarinfo.mtime) if sys.platform != "win32": @@ -689,7 +703,7 @@ def test_extractall_pathlike_name(self): with os_helper.temp_dir(DIR), \ tarfile.open(tarname, encoding="iso8859-1") as tar: directories = [t for t in tar if t.isdir()] - tar.extractall(DIR, directories) + tar.extractall(DIR, directories, filter='fully_trusted') for tarinfo in directories: path = DIR / tarinfo.name self.assertEqual(os.path.getmtime(path), tarinfo.mtime) @@ -700,7 +714,7 @@ def test_extract_pathlike_name(self): with os_helper.temp_dir(DIR), \ tarfile.open(tarname, encoding="iso8859-1") as tar: tarinfo = tar.getmember(dirtype) - tar.extract(tarinfo, path=DIR) + tar.extract(tarinfo, path=DIR, filter='fully_trusted') extracted = DIR / dirtype self.assertEqual(os.path.getmtime(extracted), tarinfo.mtime) @@ -1068,7 +1082,7 @@ class GNUReadTest(LongnameTest, ReadTest, unittest.TestCase): # an all platforms, and after that a test that will work only on # platforms/filesystems that prove to support sparse files. def _test_sparse_file(self, name): - self.tar.extract(name, TEMPDIR) + self.tar.extract(name, TEMPDIR, filter='data') filename = os.path.join(TEMPDIR, name) with open(filename, "rb") as fobj: data = fobj.read() @@ -1435,7 +1449,8 @@ def test_extractall_symlinks(self): with tarfile.open(temparchive, errorlevel=2) as tar: # this should not raise OSError: [Errno 17] File exists try: - tar.extractall(path=tempdir) + tar.extractall(path=tempdir, + filter='fully_trusted') except OSError: self.fail("extractall failed with symlinked files") finally: @@ -2540,6 +2555,15 @@ def make_simple_tarfile(self, tar_name): for tardata in files: tf.add(tardata, arcname=os.path.basename(tardata)) + def make_evil_tarfile(self, tar_name): + files = [support.findfile('tokenize_tests.txt')] + self.addCleanup(os_helper.unlink, tar_name) + with tarfile.open(tar_name, 'w') as tf: + benign = tarfile.TarInfo('benign') + tf.addfile(benign, fileobj=io.BytesIO(b'')) + evil = tarfile.TarInfo('../evil') + tf.addfile(evil, fileobj=io.BytesIO(b'')) + def test_bad_use(self): rc, out, err = self.tarfilecmd_failure() self.assertEqual(out, b'') @@ -2696,6 +2720,25 @@ def test_extract_command_verbose(self): finally: os_helper.rmtree(tarextdir) + def test_extract_command_filter(self): + self.make_evil_tarfile(tmpname) + # Make an inner directory, so the member named '../evil' + # is still extracted into `tarextdir` + destdir = os.path.join(tarextdir, 'dest') + os.mkdir(tarextdir) + try: + with os_helper.temp_cwd(destdir): + self.tarfilecmd_failure('-e', tmpname, + '-v', + '--filter', 'data') + out = self.tarfilecmd('-e', tmpname, + '-v', + '--filter', 'fully_trusted', + PYTHONIOENCODING='utf-8') + self.assertIn(b' file is extracted.', out) + finally: + os_helper.rmtree(tarextdir) + def test_extract_command_different_directory(self): self.make_simple_tarfile(tmpname) try: @@ -2779,7 +2822,7 @@ class LinkEmulationTest(ReadTest, unittest.TestCase): # symbolic or hard links tarfile tries to extract these types of members # as the regular files they point to. def _test_link_extraction(self, name): - self.tar.extract(name, TEMPDIR) + self.tar.extract(name, TEMPDIR, filter='fully_trusted') with open(os.path.join(TEMPDIR, name), "rb") as f: data = f.read() self.assertEqual(sha256sum(data), sha256_regtype) @@ -2911,8 +2954,10 @@ def test_extract_with_numeric_owner(self, mock_geteuid, mock_chmod, mock_chown): with self._setup_test(mock_geteuid) as (tarfl, filename_1, _, filename_2): - tarfl.extract(filename_1, TEMPDIR, numeric_owner=True) - tarfl.extract(filename_2 , TEMPDIR, numeric_owner=True) + tarfl.extract(filename_1, TEMPDIR, numeric_owner=True, + filter='fully_trusted') + tarfl.extract(filename_2 , TEMPDIR, numeric_owner=True, + filter='fully_trusted') # convert to filesystem paths f_filename_1 = os.path.join(TEMPDIR, filename_1) @@ -2930,7 +2975,8 @@ def test_extractall_with_numeric_owner(self, mock_geteuid, mock_chmod, mock_chown): with self._setup_test(mock_geteuid) as (tarfl, filename_1, dirname_1, filename_2): - tarfl.extractall(TEMPDIR, numeric_owner=True) + tarfl.extractall(TEMPDIR, numeric_owner=True, + filter='fully_trusted') # convert to filesystem paths f_filename_1 = os.path.join(TEMPDIR, filename_1) @@ -2955,7 +3001,8 @@ def test_extractall_with_numeric_owner(self, mock_geteuid, mock_chmod, def test_extract_without_numeric_owner(self, mock_geteuid, mock_chmod, mock_chown): with self._setup_test(mock_geteuid) as (tarfl, filename_1, _, _): - tarfl.extract(filename_1, TEMPDIR, numeric_owner=False) + tarfl.extract(filename_1, TEMPDIR, numeric_owner=False, + filter='fully_trusted') # convert to filesystem paths f_filename_1 = os.path.join(TEMPDIR, filename_1) @@ -2969,6 +3016,910 @@ def test_keyword_only(self, mock_geteuid): tarfl.extract, filename_1, TEMPDIR, False, True) +class ReplaceTests(ReadTest, unittest.TestCase): + def test_replace_name(self): + member = self.tar.getmember('ustar/regtype') + replaced = member.replace(name='misc/other') + self.assertEqual(replaced.name, 'misc/other') + self.assertEqual(member.name, 'ustar/regtype') + self.assertEqual(self.tar.getmember('ustar/regtype').name, + 'ustar/regtype') + + def test_replace_deep(self): + member = self.tar.getmember('pax/regtype1') + replaced = member.replace() + replaced.pax_headers['gname'] = 'not-bar' + self.assertEqual(member.pax_headers['gname'], 'bar') + self.assertEqual( + self.tar.getmember('pax/regtype1').pax_headers['gname'], 'bar') + + def test_replace_shallow(self): + member = self.tar.getmember('pax/regtype1') + replaced = member.replace(deep=False) + replaced.pax_headers['gname'] = 'not-bar' + self.assertEqual(member.pax_headers['gname'], 'not-bar') + self.assertEqual( + self.tar.getmember('pax/regtype1').pax_headers['gname'], 'not-bar') + + def test_replace_all(self): + member = self.tar.getmember('ustar/regtype') + for attr_name in ('name', 'mtime', 'mode', 'linkname', + 'uid', 'gid', 'uname', 'gname'): + with self.subTest(attr_name=attr_name): + replaced = member.replace(**{attr_name: None}) + self.assertEqual(getattr(replaced, attr_name), None) + self.assertNotEqual(getattr(member, attr_name), None) + + def test_replace_internal(self): + member = self.tar.getmember('ustar/regtype') + with self.assertRaises(TypeError): + member.replace(offset=123456789) + + +class NoneInfoExtractTests(ReadTest): + # These mainly check that all kinds of members are extracted successfully + # if some metadata is None. + # Some of the methods do additional spot checks. + + # We also test that the default filters can deal with None. + + extraction_filter = None + + @classmethod + def setUpClass(cls): + tar = tarfile.open(tarname, mode='r', encoding="iso8859-1") + cls.control_dir = pathlib.Path(TEMPDIR) / "extractall_ctrl" + tar.errorlevel = 0 + with ExitStack() as cm: + if cls.extraction_filter is None: + cm.enter_context(warnings.catch_warnings( + action="ignore", category=DeprecationWarning)) + tar.extractall(cls.control_dir, filter=cls.extraction_filter) + tar.close() + cls.control_paths = set( + p.relative_to(cls.control_dir) + for p in pathlib.Path(cls.control_dir).glob('**/*')) + + @classmethod + def tearDownClass(cls): + shutil.rmtree(cls.control_dir) + + def check_files_present(self, directory): + got_paths = set( + p.relative_to(directory) + for p in pathlib.Path(directory).glob('**/*')) + self.assertEqual(self.control_paths, got_paths) + + @contextmanager + def extract_with_none(self, *attr_names): + DIR = pathlib.Path(TEMPDIR) / "extractall_none" + self.tar.errorlevel = 0 + for member in self.tar.getmembers(): + for attr_name in attr_names: + setattr(member, attr_name, None) + with os_helper.temp_dir(DIR): + self.tar.extractall(DIR, filter='fully_trusted') + self.check_files_present(DIR) + yield DIR + + def test_extractall_none_mtime(self): + # mtimes of extracted files should be later than 'now' -- the mtime + # of a previously created directory. + now = pathlib.Path(TEMPDIR).stat().st_mtime + with self.extract_with_none('mtime') as DIR: + for path in pathlib.Path(DIR).glob('**/*'): + with self.subTest(path=path): + try: + mtime = path.stat().st_mtime + except OSError: + # Some systems can't stat symlinks, ignore those + if not path.is_symlink(): + raise + else: + self.assertGreaterEqual(path.stat().st_mtime, now) + + def test_extractall_none_mode(self): + # modes of directories and regular files should match the mode + # of a "normally" created directory or regular file + dir_mode = pathlib.Path(TEMPDIR).stat().st_mode + regular_file = pathlib.Path(TEMPDIR) / 'regular_file' + regular_file.write_text('') + regular_file_mode = regular_file.stat().st_mode + with self.extract_with_none('mode') as DIR: + for path in pathlib.Path(DIR).glob('**/*'): + with self.subTest(path=path): + if path.is_dir(): + self.assertEqual(path.stat().st_mode, dir_mode) + elif path.is_file(): + self.assertEqual(path.stat().st_mode, + regular_file_mode) + + def test_extractall_none_uid(self): + with self.extract_with_none('uid'): + pass + + def test_extractall_none_gid(self): + with self.extract_with_none('gid'): + pass + + def test_extractall_none_uname(self): + with self.extract_with_none('uname'): + pass + + def test_extractall_none_gname(self): + with self.extract_with_none('gname'): + pass + + def test_extractall_none_ownership(self): + with self.extract_with_none('uid', 'gid', 'uname', 'gname'): + pass + +class NoneInfoExtractTests_Data(NoneInfoExtractTests, unittest.TestCase): + extraction_filter = 'data' + +class NoneInfoExtractTests_FullyTrusted(NoneInfoExtractTests, + unittest.TestCase): + extraction_filter = 'fully_trusted' + +class NoneInfoExtractTests_Tar(NoneInfoExtractTests, unittest.TestCase): + extraction_filter = 'tar' + +class NoneInfoExtractTests_Default(NoneInfoExtractTests, + unittest.TestCase): + extraction_filter = None + +class NoneInfoTests_Misc(unittest.TestCase): + def test_add(self): + # When addfile() encounters None metadata, it raises a ValueError + bio = io.BytesIO() + for tarformat in (tarfile.USTAR_FORMAT, tarfile.GNU_FORMAT, + tarfile.PAX_FORMAT): + with self.subTest(tarformat=tarformat): + tar = tarfile.open(fileobj=bio, mode='w', format=tarformat) + tarinfo = tar.gettarinfo(tarname) + try: + tar.addfile(tarinfo) + except Exception: + if tarformat == tarfile.USTAR_FORMAT: + # In the old, limited format, adding might fail for + # reasons like the UID being too large + pass + else: + raise + else: + for attr_name in ('mtime', 'mode', 'uid', 'gid', + 'uname', 'gname'): + with self.subTest(attr_name=attr_name): + replaced = tarinfo.replace(**{attr_name: None}) + with self.assertRaisesRegex(ValueError, + f"{attr_name}"): + tar.addfile(replaced) + + def test_list(self): + # Change some metadata to None, then compare list() output + # word-for-word. We want list() to not raise, and to only change + # printout for the affected piece of metadata. + # (n.b.: some contents of the test archive are hardcoded.) + for attr_names in ({'mtime'}, {'mode'}, {'uid'}, {'gid'}, + {'uname'}, {'gname'}, + {'uid', 'uname'}, {'gid', 'gname'}): + with (self.subTest(attr_names=attr_names), + tarfile.open(tarname, encoding="iso8859-1") as tar): + tio_prev = io.TextIOWrapper(io.BytesIO(), 'ascii', newline='\n') + with support.swap_attr(sys, 'stdout', tio_prev): + tar.list() + for member in tar.getmembers(): + for attr_name in attr_names: + setattr(member, attr_name, None) + tio_new = io.TextIOWrapper(io.BytesIO(), 'ascii', newline='\n') + with support.swap_attr(sys, 'stdout', tio_new): + tar.list() + for expected, got in zip(tio_prev.detach().getvalue().split(), + tio_new.detach().getvalue().split()): + if attr_names == {'mtime'} and re.match(rb'2003-01-\d\d', expected): + self.assertEqual(got, b'????-??-??') + elif attr_names == {'mtime'} and re.match(rb'\d\d:\d\d:\d\d', expected): + self.assertEqual(got, b'??:??:??') + elif attr_names == {'mode'} and re.match( + rb'.([r-][w-][x-]){3}', expected): + self.assertEqual(got, b'??????????') + elif attr_names == {'uname'} and expected.startswith( + (b'tarfile/', b'lars/', b'foo/')): + exp_user, exp_group = expected.split(b'/') + got_user, got_group = got.split(b'/') + self.assertEqual(got_group, exp_group) + self.assertRegex(got_user, b'[0-9]+') + elif attr_names == {'gname'} and expected.endswith( + (b'/tarfile', b'/users', b'/bar')): + exp_user, exp_group = expected.split(b'/') + got_user, got_group = got.split(b'/') + self.assertEqual(got_user, exp_user) + self.assertRegex(got_group, b'[0-9]+') + elif attr_names == {'uid'} and expected.startswith( + (b'1000/')): + exp_user, exp_group = expected.split(b'/') + got_user, got_group = got.split(b'/') + self.assertEqual(got_group, exp_group) + self.assertEqual(got_user, b'None') + elif attr_names == {'gid'} and expected.endswith((b'/100')): + exp_user, exp_group = expected.split(b'/') + got_user, got_group = got.split(b'/') + self.assertEqual(got_user, exp_user) + self.assertEqual(got_group, b'None') + elif attr_names == {'uid', 'uname'} and expected.startswith( + (b'tarfile/', b'lars/', b'foo/', b'1000/')): + exp_user, exp_group = expected.split(b'/') + got_user, got_group = got.split(b'/') + self.assertEqual(got_group, exp_group) + self.assertEqual(got_user, b'None') + elif attr_names == {'gname', 'gid'} and expected.endswith( + (b'/tarfile', b'/users', b'/bar', b'/100')): + exp_user, exp_group = expected.split(b'/') + got_user, got_group = got.split(b'/') + self.assertEqual(got_user, exp_user) + self.assertEqual(got_group, b'None') + else: + # In other cases the output should be the same + self.assertEqual(expected, got) + +def _filemode_to_int(mode): + """Inverse of `stat.filemode` (for permission bits) + + Using mode strings rather than numbers makes the later tests more readable. + """ + str_mode = mode[1:] + result = ( + {'r': stat.S_IRUSR, '-': 0}[str_mode[0]] + | {'w': stat.S_IWUSR, '-': 0}[str_mode[1]] + | {'x': stat.S_IXUSR, '-': 0, + 's': stat.S_IXUSR | stat.S_ISUID, + 'S': stat.S_ISUID}[str_mode[2]] + | {'r': stat.S_IRGRP, '-': 0}[str_mode[3]] + | {'w': stat.S_IWGRP, '-': 0}[str_mode[4]] + | {'x': stat.S_IXGRP, '-': 0, + 's': stat.S_IXGRP | stat.S_ISGID, + 'S': stat.S_ISGID}[str_mode[5]] + | {'r': stat.S_IROTH, '-': 0}[str_mode[6]] + | {'w': stat.S_IWOTH, '-': 0}[str_mode[7]] + | {'x': stat.S_IXOTH, '-': 0, + 't': stat.S_IXOTH | stat.S_ISVTX, + 'T': stat.S_ISVTX}[str_mode[8]] + ) + # check we did this right + assert stat.filemode(result)[1:] == mode[1:] + + return result + +class ArchiveMaker: + """Helper to create a tar file with specific contents + + Usage: + + with ArchiveMaker() as t: + t.add('filename', ...) + + with t.open() as tar: + ... # `tar` is now a TarFile with 'filename' in it! + """ + def __init__(self): + self.bio = io.BytesIO() + + def __enter__(self): + self.tar_w = tarfile.TarFile(mode='w', fileobj=self.bio) + return self + + def __exit__(self, *exc): + self.tar_w.close() + self.contents = self.bio.getvalue() + self.bio = None + + def add(self, name, *, type=None, symlink_to=None, hardlink_to=None, + mode=None, **kwargs): + """Add a member to the test archive. Call within `with`.""" + name = str(name) + tarinfo = tarfile.TarInfo(name).replace(**kwargs) + if mode: + tarinfo.mode = _filemode_to_int(mode) + if symlink_to is not None: + type = tarfile.SYMTYPE + tarinfo.linkname = str(symlink_to) + if hardlink_to is not None: + type = tarfile.LNKTYPE + tarinfo.linkname = str(hardlink_to) + if name.endswith('/') and type is None: + type = tarfile.DIRTYPE + if type is not None: + tarinfo.type = type + if tarinfo.isreg(): + fileobj = io.BytesIO(bytes(tarinfo.size)) + else: + fileobj = None + self.tar_w.addfile(tarinfo, fileobj) + + def open(self, **kwargs): + """Open the resulting archive as TarFile. Call after `with`.""" + bio = io.BytesIO(self.contents) + return tarfile.open(fileobj=bio, **kwargs) + +# Under WASI, `os_helper.can_symlink` is False to make +# `skip_unless_symlink` skip symlink tests. " +# But in the following tests we use can_symlink to *determine* which +# behavior is expected. +# Like other symlink tests, skip these on WASI for now. +if support.is_wasi: + def symlink_test(f): + return unittest.skip("WASI: Skip symlink test for now")(f) +else: + def symlink_test(f): + return f + + +class TestExtractionFilters(unittest.TestCase): + + # A temporary directory for the extraction results. + # All files that "escape" the destination path should still end + # up in this directory. + outerdir = pathlib.Path(TEMPDIR) / 'outerdir' + + # The destination for the extraction, within `outerdir` + destdir = outerdir / 'dest' + + @contextmanager + def check_context(self, tar, filter): + """Extracts `tar` to `self.destdir` and allows checking the result + + If an error occurs, it must be checked using `expect_exception` + + Otherwise, all resulting files must be checked using `expect_file`, + except the destination directory itself and parent directories of + other files. + When checking directories, do so before their contents. + """ + with os_helper.temp_dir(self.outerdir): + try: + tar.extractall(self.destdir, filter=filter) + except Exception as exc: + self.raised_exception = exc + self.expected_paths = set() + else: + self.raised_exception = None + self.expected_paths = set(self.outerdir.glob('**/*')) + self.expected_paths.discard(self.destdir) + try: + yield + finally: + tar.close() + if self.raised_exception: + raise self.raised_exception + self.assertEqual(self.expected_paths, set()) + + def expect_file(self, name, type=None, symlink_to=None, mode=None): + """Check a single file. See check_context.""" + if self.raised_exception: + raise self.raised_exception + # use normpath() rather than resolve() so we don't follow symlinks + path = pathlib.Path(os.path.normpath(self.destdir / name)) + self.assertIn(path, self.expected_paths) + self.expected_paths.remove(path) + if mode is not None and os_helper.can_chmod(): + got = stat.filemode(stat.S_IMODE(path.stat().st_mode)) + self.assertEqual(got, mode) + if type is None and isinstance(name, str) and name.endswith('/'): + type = tarfile.DIRTYPE + if symlink_to is not None: + got = (self.destdir / name).readlink() + expected = pathlib.Path(symlink_to) + # The symlink might be the same (textually) as what we expect, + # but some systems change the link to an equivalent path, so + # we fall back to samefile(). + if expected != got: + self.assertTrue(got.samefile(expected)) + elif type == tarfile.REGTYPE or type is None: + self.assertTrue(path.is_file()) + elif type == tarfile.DIRTYPE: + self.assertTrue(path.is_dir()) + elif type == tarfile.FIFOTYPE: + self.assertTrue(path.is_fifo()) + else: + raise NotImplementedError(type) + for parent in path.parents: + self.expected_paths.discard(parent) + + def expect_exception(self, exc_type, message_re='.'): + with self.assertRaisesRegex(exc_type, message_re): + if self.raised_exception is not None: + raise self.raised_exception + self.raised_exception = None + + def test_benign_file(self): + with ArchiveMaker() as arc: + arc.add('benign.txt') + for filter in 'fully_trusted', 'tar', 'data': + with self.check_context(arc.open(), filter): + self.expect_file('benign.txt') + + def test_absolute(self): + # Test handling a member with an absolute path + # Inspired by 'absolute1' in https://github.com/jwilk/traversal-archives + with ArchiveMaker() as arc: + arc.add(self.outerdir / 'escaped.evil') + + with self.check_context(arc.open(), 'fully_trusted'): + self.expect_file('../escaped.evil') + + for filter in 'tar', 'data': + with self.check_context(arc.open(), filter): + if str(self.outerdir).startswith('/'): + # We strip leading slashes, as e.g. GNU tar does + # (without --absolute-filenames). + outerdir_stripped = str(self.outerdir).lstrip('/') + self.expect_file(f'{outerdir_stripped}/escaped.evil') + else: + # On this system, absolute paths don't have leading + # slashes. + # So, there's nothing to strip. We refuse to unpack + # to an absolute path, nonetheless. + self.expect_exception( + tarfile.AbsolutePathError, + """['"].*escaped.evil['"] has an absolute path""") + + @symlink_test + def test_parent_symlink(self): + # Test interplaying symlinks + # Inspired by 'dirsymlink2a' in jwilk/traversal-archives + with ArchiveMaker() as arc: + arc.add('current', symlink_to='.') + arc.add('parent', symlink_to='current/..') + arc.add('parent/evil') + + if os_helper.can_symlink(): + with self.check_context(arc.open(), 'fully_trusted'): + if self.raised_exception is not None: + # Windows will refuse to create a file that's a symlink to itself + # (and tarfile doesn't swallow that exception) + self.expect_exception(FileExistsError) + # The other cases will fail with this error too. + # Skip the rest of this test. + return + else: + self.expect_file('current', symlink_to='.') + self.expect_file('parent', symlink_to='current/..') + self.expect_file('../evil') + + with self.check_context(arc.open(), 'tar'): + self.expect_exception( + tarfile.OutsideDestinationError, + """'parent/evil' would be extracted to ['"].*evil['"], """ + + "which is outside the destination") + + with self.check_context(arc.open(), 'data'): + self.expect_exception( + tarfile.LinkOutsideDestinationError, + """'parent' would link to ['"].*outerdir['"], """ + + "which is outside the destination") + + else: + # No symlink support. The symlinks are ignored. + with self.check_context(arc.open(), 'fully_trusted'): + self.expect_file('parent/evil') + with self.check_context(arc.open(), 'tar'): + self.expect_file('parent/evil') + with self.check_context(arc.open(), 'data'): + self.expect_file('parent/evil') + + @symlink_test + def test_parent_symlink2(self): + # Test interplaying symlinks + # Inspired by 'dirsymlink2b' in jwilk/traversal-archives + with ArchiveMaker() as arc: + arc.add('current', symlink_to='.') + arc.add('current/parent', symlink_to='..') + arc.add('parent/evil') + + with self.check_context(arc.open(), 'fully_trusted'): + if os_helper.can_symlink(): + self.expect_file('current', symlink_to='.') + self.expect_file('parent', symlink_to='..') + self.expect_file('../evil') + else: + self.expect_file('current/') + self.expect_file('parent/evil') + + with self.check_context(arc.open(), 'tar'): + if os_helper.can_symlink(): + self.expect_exception( + tarfile.OutsideDestinationError, + "'parent/evil' would be extracted to " + + """['"].*evil['"], which is outside """ + + "the destination") + else: + self.expect_file('current/') + self.expect_file('parent/evil') + + with self.check_context(arc.open(), 'data'): + self.expect_exception( + tarfile.LinkOutsideDestinationError, + """'current/parent' would link to ['"].*['"], """ + + "which is outside the destination") + + @symlink_test + def test_absolute_symlink(self): + # Test symlink to an absolute path + # Inspired by 'dirsymlink' in jwilk/traversal-archives + with ArchiveMaker() as arc: + arc.add('parent', symlink_to=self.outerdir) + arc.add('parent/evil') + + with self.check_context(arc.open(), 'fully_trusted'): + if os_helper.can_symlink(): + self.expect_file('parent', symlink_to=self.outerdir) + self.expect_file('../evil') + else: + self.expect_file('parent/evil') + + with self.check_context(arc.open(), 'tar'): + if os_helper.can_symlink(): + self.expect_exception( + tarfile.OutsideDestinationError, + "'parent/evil' would be extracted to " + + """['"].*evil['"], which is outside """ + + "the destination") + else: + self.expect_file('parent/evil') + + with self.check_context(arc.open(), 'data'): + self.expect_exception( + tarfile.AbsoluteLinkError, + "'parent' is a symlink to an absolute path") + + @symlink_test + def test_sly_relative0(self): + # Inspired by 'relative0' in jwilk/traversal-archives + with ArchiveMaker() as arc: + arc.add('../moo', symlink_to='..//tmp/moo') + + try: + with self.check_context(arc.open(), filter='fully_trusted'): + if os_helper.can_symlink(): + if isinstance(self.raised_exception, FileExistsError): + # XXX TarFile happens to fail creating a parent + # directory. + # This might be a bug, but fixing it would hurt + # security. + # Note that e.g. GNU `tar` rejects '..' components, + # so you could argue this is an invalid archive and we + # just raise an bad type of exception. + self.expect_exception(FileExistsError) + else: + self.expect_file('../moo', symlink_to='..//tmp/moo') + else: + # The symlink can't be extracted and is ignored + pass + except FileExistsError: + pass + + for filter in 'tar', 'data': + with self.check_context(arc.open(), filter): + self.expect_exception( + tarfile.OutsideDestinationError, + "'../moo' would be extracted to " + + "'.*moo', which is outside " + + "the destination") + + @symlink_test + def test_sly_relative2(self): + # Inspired by 'relative2' in jwilk/traversal-archives + with ArchiveMaker() as arc: + arc.add('tmp/') + arc.add('tmp/../../moo', symlink_to='tmp/../..//tmp/moo') + + with self.check_context(arc.open(), 'fully_trusted'): + self.expect_file('tmp', type=tarfile.DIRTYPE) + if os_helper.can_symlink(): + self.expect_file('../moo', symlink_to='tmp/../../tmp/moo') + + for filter in 'tar', 'data': + with self.check_context(arc.open(), filter): + self.expect_exception( + tarfile.OutsideDestinationError, + "'tmp/../../moo' would be extracted to " + + """['"].*moo['"], which is outside the """ + + "destination") + + def test_modes(self): + # Test how file modes are extracted + # (Note that the modes are ignored on platforms without working chmod) + with ArchiveMaker() as arc: + arc.add('all_bits', mode='?rwsrwsrwt') + arc.add('perm_bits', mode='?rwxrwxrwx') + arc.add('exec_group_other', mode='?rw-rwxrwx') + arc.add('read_group_only', mode='?---r-----') + arc.add('no_bits', mode='?---------') + arc.add('dir/', mode='?---rwsrwt') + + # On some systems, setting the sticky bit is a no-op. + # Check if that's the case. + tmp_filename = os.path.join(TEMPDIR, "tmp.file") + with open(tmp_filename, 'w'): + pass + os.chmod(tmp_filename, os.stat(tmp_filename).st_mode | stat.S_ISVTX) + have_sticky_files = (os.stat(tmp_filename).st_mode & stat.S_ISVTX) + os.unlink(tmp_filename) + + os.mkdir(tmp_filename) + os.chmod(tmp_filename, os.stat(tmp_filename).st_mode | stat.S_ISVTX) + have_sticky_dirs = (os.stat(tmp_filename).st_mode & stat.S_ISVTX) + os.rmdir(tmp_filename) + + with self.check_context(arc.open(), 'fully_trusted'): + if have_sticky_files: + self.expect_file('all_bits', mode='?rwsrwsrwt') + else: + self.expect_file('all_bits', mode='?rwsrwsrwx') + self.expect_file('perm_bits', mode='?rwxrwxrwx') + self.expect_file('exec_group_other', mode='?rw-rwxrwx') + self.expect_file('read_group_only', mode='?---r-----') + self.expect_file('no_bits', mode='?---------') + if have_sticky_dirs: + self.expect_file('dir/', mode='?---rwsrwt') + else: + self.expect_file('dir/', mode='?---rwsrwx') + + with self.check_context(arc.open(), 'tar'): + self.expect_file('all_bits', mode='?rwxr-xr-x') + self.expect_file('perm_bits', mode='?rwxr-xr-x') + self.expect_file('exec_group_other', mode='?rw-r-xr-x') + self.expect_file('read_group_only', mode='?---r-----') + self.expect_file('no_bits', mode='?---------') + self.expect_file('dir/', mode='?---r-xr-x') + + with self.check_context(arc.open(), 'data'): + normal_dir_mode = stat.filemode(stat.S_IMODE( + self.outerdir.stat().st_mode)) + self.expect_file('all_bits', mode='?rwxr-xr-x') + self.expect_file('perm_bits', mode='?rwxr-xr-x') + self.expect_file('exec_group_other', mode='?rw-r--r--') + self.expect_file('read_group_only', mode='?rw-r-----') + self.expect_file('no_bits', mode='?rw-------') + self.expect_file('dir/', mode=normal_dir_mode) + + def test_pipe(self): + # Test handling of a special file + with ArchiveMaker() as arc: + arc.add('foo', type=tarfile.FIFOTYPE) + + for filter in 'fully_trusted', 'tar': + with self.check_context(arc.open(), filter): + if hasattr(os, 'mkfifo'): + self.expect_file('foo', type=tarfile.FIFOTYPE) + else: + # The pipe can't be extracted and is skipped. + pass + + with self.check_context(arc.open(), 'data'): + self.expect_exception( + tarfile.SpecialFileError, + "'foo' is a special file") + + def test_special_files(self): + # Creating device files is tricky. Instead of attempting that let's + # only check the filter result. + for special_type in tarfile.FIFOTYPE, tarfile.CHRTYPE, tarfile.BLKTYPE: + tarinfo = tarfile.TarInfo('foo') + tarinfo.type = special_type + trusted = tarfile.fully_trusted_filter(tarinfo, '') + self.assertIs(trusted, tarinfo) + tar = tarfile.tar_filter(tarinfo, '') + self.assertEqual(tar.type, special_type) + with self.assertRaises(tarfile.SpecialFileError) as cm: + tarfile.data_filter(tarinfo, '') + self.assertIsInstance(cm.exception.tarinfo, tarfile.TarInfo) + self.assertEqual(cm.exception.tarinfo.name, 'foo') + + def test_fully_trusted_filter(self): + # The 'fully_trusted' filter returns the original TarInfo objects. + with tarfile.TarFile.open(tarname) as tar: + for tarinfo in tar.getmembers(): + filtered = tarfile.fully_trusted_filter(tarinfo, '') + self.assertIs(filtered, tarinfo) + + def test_tar_filter(self): + # The 'tar' filter returns TarInfo objects with the same name/type. + # (It can also fail for particularly "evil" input, but we don't have + # that in the test archive.) + with tarfile.TarFile.open(tarname) as tar: + for tarinfo in tar.getmembers(): + filtered = tarfile.tar_filter(tarinfo, '') + self.assertIs(filtered.name, tarinfo.name) + self.assertIs(filtered.type, tarinfo.type) + + def test_data_filter(self): + # The 'data' filter either raises, or returns TarInfo with the same + # name/type. + with tarfile.TarFile.open(tarname) as tar: + for tarinfo in tar.getmembers(): + try: + filtered = tarfile.data_filter(tarinfo, '') + except tarfile.FilterError: + continue + self.assertIs(filtered.name, tarinfo.name) + self.assertIs(filtered.type, tarinfo.type) + + def test_default_filter_warns(self): + """Ensure the default filter warns""" + with ArchiveMaker() as arc: + arc.add('foo') + with warnings_helper.check_warnings( + ('Python 3.14', DeprecationWarning)): + with self.check_context(arc.open(), None): + self.expect_file('foo') + + def test_change_default_filter_on_instance(self): + tar = tarfile.TarFile(tarname, 'r') + def strict_filter(tarinfo, path): + if tarinfo.name == 'ustar/regtype': + return tarinfo + else: + return None + tar.extraction_filter = strict_filter + with self.check_context(tar, None): + self.expect_file('ustar/regtype') + + def test_change_default_filter_on_class(self): + def strict_filter(tarinfo, path): + if tarinfo.name == 'ustar/regtype': + return tarinfo + else: + return None + tar = tarfile.TarFile(tarname, 'r') + with support.swap_attr(tarfile.TarFile, 'extraction_filter', + staticmethod(strict_filter)): + with self.check_context(tar, None): + self.expect_file('ustar/regtype') + + def test_change_default_filter_on_subclass(self): + class TarSubclass(tarfile.TarFile): + def extraction_filter(self, tarinfo, path): + if tarinfo.name == 'ustar/regtype': + return tarinfo + else: + return None + + tar = TarSubclass(tarname, 'r') + with self.check_context(tar, None): + self.expect_file('ustar/regtype') + + def test_change_default_filter_to_string(self): + tar = tarfile.TarFile(tarname, 'r') + tar.extraction_filter = 'data' + with self.check_context(tar, None): + self.expect_exception(TypeError) + + def test_custom_filter(self): + def custom_filter(tarinfo, path): + self.assertIs(path, self.destdir) + if tarinfo.name == 'move_this': + return tarinfo.replace(name='moved') + if tarinfo.name == 'ignore_this': + return None + return tarinfo + + with ArchiveMaker() as arc: + arc.add('move_this') + arc.add('ignore_this') + arc.add('keep') + with self.check_context(arc.open(), custom_filter): + self.expect_file('moved') + self.expect_file('keep') + + def test_bad_filter_name(self): + with ArchiveMaker() as arc: + arc.add('foo') + with self.check_context(arc.open(), 'bad filter name'): + self.expect_exception(ValueError) + + def test_stateful_filter(self): + # Stateful filters should be possible. + # (This doesn't really test tarfile. Rather, it demonstrates + # that third parties can implement a stateful filter.) + class StatefulFilter: + def __enter__(self): + self.num_files_processed = 0 + return self + + def __call__(self, tarinfo, path): + try: + tarinfo = tarfile.data_filter(tarinfo, path) + except tarfile.FilterError: + return None + self.num_files_processed += 1 + return tarinfo + + def __exit__(self, *exc_info): + self.done = True + + with ArchiveMaker() as arc: + arc.add('good') + arc.add('bad', symlink_to='/') + arc.add('good') + with StatefulFilter() as custom_filter: + with self.check_context(arc.open(), custom_filter): + self.expect_file('good') + self.assertEqual(custom_filter.num_files_processed, 2) + self.assertEqual(custom_filter.done, True) + + def test_errorlevel(self): + def extracterror_filter(tarinfo, path): + raise tarfile.ExtractError('failed with ExtractError') + def filtererror_filter(tarinfo, path): + raise tarfile.FilterError('failed with FilterError') + def oserror_filter(tarinfo, path): + raise OSError('failed with OSError') + def tarerror_filter(tarinfo, path): + raise tarfile.TarError('failed with base TarError') + def valueerror_filter(tarinfo, path): + raise ValueError('failed with ValueError') + + with ArchiveMaker() as arc: + arc.add('file') + + # If errorlevel is 0, errors affected by errorlevel are ignored + + with self.check_context(arc.open(errorlevel=0), extracterror_filter): + self.expect_file('file') + + with self.check_context(arc.open(errorlevel=0), filtererror_filter): + self.expect_file('file') + + with self.check_context(arc.open(errorlevel=0), oserror_filter): + self.expect_file('file') + + with self.check_context(arc.open(errorlevel=0), tarerror_filter): + self.expect_exception(tarfile.TarError) + + with self.check_context(arc.open(errorlevel=0), valueerror_filter): + self.expect_exception(ValueError) + + # If 1, all fatal errors are raised + + with self.check_context(arc.open(errorlevel=1), extracterror_filter): + self.expect_file('file') + + with self.check_context(arc.open(errorlevel=1), filtererror_filter): + self.expect_exception(tarfile.FilterError) + + with self.check_context(arc.open(errorlevel=1), oserror_filter): + self.expect_exception(OSError) + + with self.check_context(arc.open(errorlevel=1), tarerror_filter): + self.expect_exception(tarfile.TarError) + + with self.check_context(arc.open(errorlevel=1), valueerror_filter): + self.expect_exception(ValueError) + + # If 2, all non-fatal errors are raised as well. + + with self.check_context(arc.open(errorlevel=2), extracterror_filter): + self.expect_exception(tarfile.ExtractError) + + with self.check_context(arc.open(errorlevel=2), filtererror_filter): + self.expect_exception(tarfile.FilterError) + + with self.check_context(arc.open(errorlevel=2), oserror_filter): + self.expect_exception(OSError) + + with self.check_context(arc.open(errorlevel=2), tarerror_filter): + self.expect_exception(tarfile.TarError) + + with self.check_context(arc.open(errorlevel=2), valueerror_filter): + self.expect_exception(ValueError) + + # We only handle ExtractionError, FilterError & OSError specially. + + with self.check_context(arc.open(errorlevel='boo!'), filtererror_filter): + self.expect_exception(TypeError) # errorlevel is not int + + def setUpModule(): os_helper.unlink(TEMPDIR) os.makedirs(TEMPDIR) diff --git a/Lib/test/test_tempfile.py b/Lib/test/test_tempfile.py index 7c2c8de7a2e6fc..db08fb1c7f2a42 100644 --- a/Lib/test/test_tempfile.py +++ b/Lib/test/test_tempfile.py @@ -12,6 +12,7 @@ import types import weakref import gc +import shutil from unittest import mock import unittest @@ -849,6 +850,15 @@ def test_for_tempdir_is_bytes_issue40701_api_warts(self): finally: tempfile.tempdir = orig_tempdir + def test_path_is_absolute(self): + # Test that the path returned by mkdtemp with a relative `dir` + # argument is absolute + try: + path = tempfile.mkdtemp(dir=".") + self.assertTrue(os.path.isabs(path)) + finally: + os.rmdir(path) + class TestMktemp(BaseTestCase): """Test mktemp().""" @@ -1015,7 +1025,7 @@ def use_closed(): self.assertRaises(ValueError, use_closed) def test_context_man_not_del_on_close_if_delete_on_close_false(self): - # Issue gh-58451: tempfile.NamedTemporaryFile is not particulary useful + # Issue gh-58451: tempfile.NamedTemporaryFile is not particularly useful # on Windows # A NamedTemporaryFile is NOT deleted when closed if # delete_on_close=False, but is deleted on context manager exit @@ -1607,7 +1617,7 @@ def test_explicit_cleanup(self): finally: os.rmdir(dir) - def test_explict_cleanup_ignore_errors(self): + def test_explicit_cleanup_ignore_errors(self): """Test that cleanup doesn't return an error when ignoring them.""" with tempfile.TemporaryDirectory() as working_dir: temp_dir = self.do_create( @@ -1837,6 +1847,11 @@ def test_flags(self): d.cleanup() self.assertFalse(os.path.exists(d.name)) + def test_delete_false(self): + with tempfile.TemporaryDirectory(delete=False) as working_dir: + pass + self.assertTrue(os.path.exists(working_dir)) + shutil.rmtree(working_dir) if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py index a39a267b403d83..fdd74c37e26235 100644 --- a/Lib/test/test_threading.py +++ b/Lib/test/test_threading.py @@ -1343,6 +1343,7 @@ def func(): import test.support test.support.run_in_subinterp_with_config( {subinterp_code!r}, + use_main_obmalloc=True, allow_fork=True, allow_exec=True, allow_threads={allowed}, diff --git a/Lib/test/test_tkinter/test_widgets.py b/Lib/test/test_tkinter/test_widgets.py index 64c9472706549b..ba4ef49078c5a7 100644 --- a/Lib/test/test_tkinter/test_widgets.py +++ b/Lib/test/test_tkinter/test_widgets.py @@ -1377,6 +1377,11 @@ class MenuTest(AbstractWidgetTest, unittest.TestCase): def create(self, **kwargs): return tkinter.Menu(self.root, **kwargs) + def test_indexcommand_none(self): + widget = self.create() + i = widget.index('none') + self.assertIsNone(i) + def test_configure_postcommand(self): widget = self.create() self.checkCommandParam(widget, 'postcommand') diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 63c2501cfe2338..911b53e5816588 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -11,7 +11,7 @@ from test.test_grammar import (VALID_UNDERSCORE_LITERALS, INVALID_UNDERSCORE_LITERALS) from test.support import os_helper -from test.support.script_helper import run_test_script, make_script +from test.support.script_helper import run_test_script, make_script, run_python_until_end import os import token @@ -1470,6 +1470,19 @@ def test_comment_at_the_end_of_the_source_without_newline(self): self.assertEqual(tok_name[tokens[i + 1].exact_type], tok_name[expected_tokens[i]]) self.assertEqual(tok_name[tokens[-1].exact_type], tok_name[token.ENDMARKER]) + def test_invalid_character_in_fstring_middle(self): + # See gh-103824 + script = b'''F""" + \xe5"""''' + + with os_helper.temp_dir() as temp_dir: + filename = os.path.join(temp_dir, "script.py") + with open(filename, 'wb') as file: + file.write(script) + rs, _ = run_python_until_end(filename) + self.assertIn(b"SyntaxError", rs.err) + + class UntokenizeTest(TestCase): def test_bad_input_order(self): @@ -1625,6 +1638,10 @@ def test_random_files(self): # 7 more testfiles fail. Remove them also until the failure is diagnosed. testfiles.remove(os.path.join(tempdir, "test_unicode_identifiers.py")) + + # TODO: Remove this once we can unparse PEP 701 syntax + testfiles.remove(os.path.join(tempdir, "test_fstring.py")) + for f in ('buffer', 'builtin', 'fileio', 'inspect', 'os', 'platform', 'sys'): testfiles.remove(os.path.join(tempdir, "test_%s.py") % f) @@ -1937,25 +1954,39 @@ def test_string(self): """) self.check_tokenize('f"abc"', """\ - STRING 'f"abc"' (1, 0) (1, 6) + FSTRING_START 'f"' (1, 0) (1, 2) + FSTRING_MIDDLE 'abc' (1, 2) (1, 5) + FSTRING_END '"' (1, 5) (1, 6) """) self.check_tokenize('fR"a{b}c"', """\ - STRING 'fR"a{b}c"' (1, 0) (1, 9) + FSTRING_START 'fR"' (1, 0) (1, 3) + FSTRING_MIDDLE 'a' (1, 3) (1, 4) + LBRACE '{' (1, 4) (1, 5) + NAME 'b' (1, 5) (1, 6) + RBRACE '}' (1, 6) (1, 7) + FSTRING_MIDDLE 'c' (1, 7) (1, 8) + FSTRING_END '"' (1, 8) (1, 9) """) self.check_tokenize('f"""abc"""', """\ - STRING 'f\"\"\"abc\"\"\"' (1, 0) (1, 10) + FSTRING_START 'f\"""' (1, 0) (1, 4) + FSTRING_MIDDLE 'abc' (1, 4) (1, 7) + FSTRING_END '\"""' (1, 7) (1, 10) """) self.check_tokenize(r'f"abc\ def"', """\ - STRING 'f"abc\\\\\\ndef"' (1, 0) (2, 4) + FSTRING_START \'f"\' (1, 0) (1, 2) + FSTRING_MIDDLE 'abc\\\\\\ndef' (1, 2) (2, 3) + FSTRING_END '"' (2, 3) (2, 4) """) self.check_tokenize(r'Rf"abc\ def"', """\ - STRING 'Rf"abc\\\\\\ndef"' (1, 0) (2, 4) + FSTRING_START 'Rf"' (1, 0) (1, 3) + FSTRING_MIDDLE 'abc\\\\\\ndef' (1, 3) (2, 3) + FSTRING_END '"' (2, 3) (2, 4) """) def test_function(self): diff --git a/Lib/test/test_tools/test_sundry.py b/Lib/test/test_tools/test_sundry.py index 6a3dc12781b2b6..3177fafb84a65b 100644 --- a/Lib/test/test_tools/test_sundry.py +++ b/Lib/test/test_tools/test_sundry.py @@ -1,4 +1,4 @@ -"""Tests for scripts in the Tools directory. +"""Tests for scripts in the Tools/scripts directory. This file contains extremely basic regression tests for the scripts found in the Tools directory of a Python checkout or tarball which don't have separate @@ -17,14 +17,7 @@ class TestSundryScripts(unittest.TestCase): # At least make sure the rest don't have syntax errors. When tests are # added for a script it should be added to the allowlist below. - # scripts that have independent tests. - allowlist = ['reindent'] - # scripts that can't be imported without running - denylist = ['make_ctype'] - # denylisted for other reasons - other = ['2to3'] - - skiplist = denylist + allowlist + other + skiplist = ['2to3'] # import logging registers "atfork" functions which keep indirectly the # logging module dictionary alive. Mock the function to be able to unload diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index 7ef93b3f0ac332..19a2be88d2c1bc 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -296,15 +296,15 @@ class PrintExceptionAtExit(object): def __init__(self): try: x = 1 / 0 - except Exception: - self.exc_info = sys.exc_info() - # self.exc_info[1] (traceback) contains frames: + except Exception as e: + self.exc = e + # self.exc.__traceback__ contains frames: # explicitly clear the reference to self in the current # frame to break a reference cycle self = None def __del__(self): - traceback.print_exception(*self.exc_info) + traceback.print_exception(self.exc) # Keep a reference in the module namespace to call the destructor # when the module is unloaded @@ -394,6 +394,8 @@ def get_exception(self, callable, slice_start=0, slice_end=-1): class CAPIExceptionFormattingMixin: + LEGACY = 0 + def get_exception(self, callable, slice_start=0, slice_end=-1): from _testcapi import exception_print try: @@ -401,11 +403,13 @@ def get_exception(self, callable, slice_start=0, slice_end=-1): self.fail("No exception thrown.") except Exception as e: with captured_output("stderr") as tbstderr: - exception_print(e) + exception_print(e, self.LEGACY) return tbstderr.getvalue().splitlines()[slice_start:slice_end] callable_line = get_exception.__code__.co_firstlineno + 3 +class CAPIExceptionFormattingLegacyMixin(CAPIExceptionFormattingMixin): + LEGACY = 1 @requires_debug_ranges() class TracebackErrorLocationCaretTestBase: @@ -798,12 +802,12 @@ def f(): )() actual = self.get_exception(f) expected = [ - f"Traceback (most recent call last):", + "Traceback (most recent call last):", f" File \"{__file__}\", line {self.callable_line}, in get_exception", - f" callable()", + " callable()", f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f", - f" .method", - f" ^^^^^^", + " .method", + " ^^^^^^", ] self.assertEqual(actual, expected) @@ -814,11 +818,11 @@ def f(): )() actual = self.get_exception(f) expected = [ - f"Traceback (most recent call last):", + "Traceback (most recent call last):", f" File \"{__file__}\", line {self.callable_line}, in get_exception", - f" callable()", + " callable()", f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f", - f" method", + " method", ] self.assertEqual(actual, expected) @@ -829,12 +833,12 @@ def f(): )() actual = self.get_exception(f) expected = [ - f"Traceback (most recent call last):", + "Traceback (most recent call last):", f" File \"{__file__}\", line {self.callable_line}, in get_exception", - f" callable()", + " callable()", f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f", - f" . method", - f" ^^^^^^", + " . method", + " ^^^^^^", ] self.assertEqual(actual, expected) @@ -844,11 +848,11 @@ def f(): actual = self.get_exception(f) expected = [ - f"Traceback (most recent call last):", + "Traceback (most recent call last):", f" File \"{__file__}\", line {self.callable_line}, in get_exception", - f" callable()", + " callable()", f" File \"{__file__}\", line {f.__code__.co_firstlineno + 1}, in f", - f" width", + " width", ] self.assertEqual(actual, expected) @@ -860,11 +864,11 @@ def f(): actual = self.get_exception(f) expected = [ - f"Traceback (most recent call last):", + "Traceback (most recent call last):", f" File \"{__file__}\", line {self.callable_line}, in get_exception", - f" callable()", + " callable()", f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f", - f" raise ValueError(width)", + " raise ValueError(width)", ] self.assertEqual(actual, expected) @@ -878,12 +882,12 @@ def f(): actual = self.get_exception(f) expected = [ - f"Traceback (most recent call last):", + "Traceback (most recent call last):", f" File \"{__file__}\", line {self.callable_line}, in get_exception", - f" callable()", + " callable()", f" File \"{__file__}\", line {f.__code__.co_firstlineno + 4}, in f", - f" print(1, www(", - f" ^^^^", + " print(1, www(", + " ^^^^", ] self.assertEqual(actual, expected) @@ -912,6 +916,16 @@ class CPythonTracebackErrorCaretTests( Same set of tests as above but with Python's internal traceback printing. """ +@cpython_only +@requires_debug_ranges() +class CPythonTracebackErrorCaretTests( + CAPIExceptionFormattingLegacyMixin, + TracebackErrorLocationCaretTestBase, + unittest.TestCase, +): + """ + Same set of tests as above but with Python's legacy internal traceback printing. + """ class TracebackFormatTests(unittest.TestCase): @@ -923,8 +937,8 @@ def check_traceback_format(self, cleanup_func=None): from _testcapi import traceback_print try: self.some_exception() - except KeyError: - type_, value, tb = sys.exc_info() + except KeyError as e: + tb = e.__traceback__ if cleanup_func is not None: # Clear the inner frames, not this one cleanup_func(tb.tb_next) @@ -1197,8 +1211,7 @@ def test_recursive_traceback_python(self): def test_recursive_traceback_cpython_internal(self): from _testcapi import exception_print def render_exc(): - exc_type, exc_value, exc_tb = sys.exc_info() - exception_print(exc_value) + exception_print(sys.exception()) self._check_recursive_traceback_display(render_exc) def test_format_stack(self): @@ -1228,8 +1241,8 @@ def __eq__(self, other): except UnhashableException: try: raise ex1 - except UnhashableException: - exc_type, exc_val, exc_tb = sys.exc_info() + except UnhashableException as e: + exc_val = e with captured_output("stderr") as stderr_f: exception_print(exc_val) @@ -1526,11 +1539,11 @@ def __repr__(self): e.__notes__ = BadThing() notes_repr = 'bad repr' - self.assertEqual(self.get_report(e), vanilla + notes_repr) + self.assertEqual(self.get_report(e), vanilla + notes_repr + '\n') e.__notes__ = Unprintable() err_msg = '<__notes__ repr() failed>' - self.assertEqual(self.get_report(e), vanilla + err_msg) + self.assertEqual(self.get_report(e), vanilla + err_msg + '\n') # non-string item in the __notes__ sequence e.__notes__ = [BadThing(), 'Final Note'] @@ -1542,6 +1555,14 @@ def __repr__(self): err_msg = '<note str() failed>' self.assertEqual(self.get_report(e), vanilla + err_msg + '\nFinal Note\n') + e.__notes__ = "please do not explode me" + err_msg = "'please do not explode me'" + self.assertEqual(self.get_report(e), vanilla + err_msg + '\n') + + e.__notes__ = b"please do not show me as numbers" + err_msg = "b'please do not show me as numbers'" + self.assertEqual(self.get_report(e), vanilla + err_msg + '\n') + def test_exception_with_note_with_multiple_notes(self): e = ValueError(42) vanilla = self.get_report(e) @@ -2133,8 +2154,8 @@ def assertEqualExcept(actual, expected, ignore): def test_extract_tb(self): try: self.last_raises5() - except Exception: - exc_type, exc_value, tb = sys.exc_info() + except Exception as e: + tb = e.__traceback__ def extract(**kwargs): return traceback.extract_tb(tb, **kwargs) @@ -2160,12 +2181,12 @@ def extract(**kwargs): def test_format_exception(self): try: self.last_raises5() - except Exception: - exc_type, exc_value, tb = sys.exc_info() + except Exception as e: + exc = e # [1:-1] to exclude "Traceback (...)" header and # exception type and value def extract(**kwargs): - return traceback.format_exception(exc_type, exc_value, tb, **kwargs)[1:-1] + return traceback.format_exception(exc, **kwargs)[1:-1] with support.swap_attr(sys, 'tracebacklimit', 1000): nolim = extract() @@ -2203,8 +2224,8 @@ def inner(): try: outer() - except: - type_, value, tb = sys.exc_info() + except BaseException as e: + tb = e.__traceback__ # Initial assertion: there's one local in the inner frame. inner_frame = tb.tb_next.tb_next.tb_next.tb_frame @@ -2282,8 +2303,8 @@ def deeper(): def test_walk_tb(self): try: 1/0 - except Exception: - _, _, tb = sys.exc_info() + except Exception as e: + tb = e.__traceback__ s = list(traceback.walk_tb(tb)) self.assertEqual(len(s), 1) @@ -2386,10 +2407,10 @@ def f(): def g(): try: f() - except: - return sys.exc_info() + except Exception as e: + return e.__traceback__ - exc_info = g() + tb = g() class Skip_G(traceback.StackSummary): def format_frame_summary(self, frame_summary): @@ -2398,7 +2419,7 @@ def format_frame_summary(self, frame_summary): return super().format_frame_summary(frame_summary) stack = Skip_G.extract( - traceback.walk_tb(exc_info[2])).format() + traceback.walk_tb(tb)).format() self.assertEqual(len(stack), 1) lno = f.__code__.co_firstlineno + 1 @@ -2416,17 +2437,17 @@ class TestTracebackException(unittest.TestCase): def test_smoke(self): try: 1/0 - except Exception: - exc_info = sys.exc_info() - exc = traceback.TracebackException(*exc_info) + except Exception as e: + exc_obj = e + exc = traceback.TracebackException.from_exception(e) expected_stack = traceback.StackSummary.extract( - traceback.walk_tb(exc_info[2])) + traceback.walk_tb(e.__traceback__)) self.assertEqual(None, exc.__cause__) self.assertEqual(None, exc.__context__) self.assertEqual(False, exc.__suppress_context__) self.assertEqual(expected_stack, exc.stack) - self.assertEqual(exc_info[0], exc.exc_type) - self.assertEqual(str(exc_info[1]), str(exc)) + self.assertEqual(type(exc_obj), exc.exc_type) + self.assertEqual(str(exc_obj), str(exc)) def test_from_exception(self): # Check all the parameters are accepted. @@ -2435,9 +2456,10 @@ def foo(): try: foo() except Exception as e: - exc_info = sys.exc_info() + exc_obj = e + tb = e.__traceback__ self.expected_stack = traceback.StackSummary.extract( - traceback.walk_tb(exc_info[2]), limit=1, lookup_lines=False, + traceback.walk_tb(tb), limit=1, lookup_lines=False, capture_locals=True) self.exc = traceback.TracebackException.from_exception( e, limit=1, lookup_lines=False, capture_locals=True) @@ -2447,50 +2469,50 @@ def foo(): self.assertEqual(None, exc.__context__) self.assertEqual(False, exc.__suppress_context__) self.assertEqual(expected_stack, exc.stack) - self.assertEqual(exc_info[0], exc.exc_type) - self.assertEqual(str(exc_info[1]), str(exc)) + self.assertEqual(type(exc_obj), exc.exc_type) + self.assertEqual(str(exc_obj), str(exc)) def test_cause(self): try: try: 1/0 finally: - exc_info_context = sys.exc_info() - exc_context = traceback.TracebackException(*exc_info_context) + exc = sys.exception() + exc_context = traceback.TracebackException.from_exception(exc) cause = Exception("cause") raise Exception("uh oh") from cause - except Exception: - exc_info = sys.exc_info() - exc = traceback.TracebackException(*exc_info) + except Exception as e: + exc_obj = e + exc = traceback.TracebackException.from_exception(e) expected_stack = traceback.StackSummary.extract( - traceback.walk_tb(exc_info[2])) + traceback.walk_tb(e.__traceback__)) exc_cause = traceback.TracebackException(Exception, cause, None) self.assertEqual(exc_cause, exc.__cause__) self.assertEqual(exc_context, exc.__context__) self.assertEqual(True, exc.__suppress_context__) self.assertEqual(expected_stack, exc.stack) - self.assertEqual(exc_info[0], exc.exc_type) - self.assertEqual(str(exc_info[1]), str(exc)) + self.assertEqual(type(exc_obj), exc.exc_type) + self.assertEqual(str(exc_obj), str(exc)) def test_context(self): try: try: 1/0 finally: - exc_info_context = sys.exc_info() - exc_context = traceback.TracebackException(*exc_info_context) + exc = sys.exception() + exc_context = traceback.TracebackException.from_exception(exc) raise Exception("uh oh") - except Exception: - exc_info = sys.exc_info() - exc = traceback.TracebackException(*exc_info) + except Exception as e: + exc_obj = e + exc = traceback.TracebackException.from_exception(e) expected_stack = traceback.StackSummary.extract( - traceback.walk_tb(exc_info[2])) + traceback.walk_tb(e.__traceback__)) self.assertEqual(None, exc.__cause__) self.assertEqual(exc_context, exc.__context__) self.assertEqual(False, exc.__suppress_context__) self.assertEqual(expected_stack, exc.stack) - self.assertEqual(exc_info[0], exc.exc_type) - self.assertEqual(str(exc_info[1]), str(exc)) + self.assertEqual(type(exc_obj), exc.exc_type) + self.assertEqual(str(exc_obj), str(exc)) def test_long_context_chain(self): def f(): @@ -2501,12 +2523,12 @@ def f(): try: f() - except RecursionError: - exc_info = sys.exc_info() + except RecursionError as e: + exc_obj = e else: self.fail("Exception not raised") - te = traceback.TracebackException(*exc_info) + te = traceback.TracebackException.from_exception(exc_obj) res = list(te.format()) # many ZeroDiv errors followed by the RecursionError @@ -2524,58 +2546,58 @@ def test_compact_with_cause(self): finally: cause = Exception("cause") raise Exception("uh oh") from cause - except Exception: - exc_info = sys.exc_info() - exc = traceback.TracebackException(*exc_info, compact=True) + except Exception as e: + exc_obj = e + exc = traceback.TracebackException.from_exception(exc_obj, compact=True) expected_stack = traceback.StackSummary.extract( - traceback.walk_tb(exc_info[2])) + traceback.walk_tb(exc_obj.__traceback__)) exc_cause = traceback.TracebackException(Exception, cause, None) self.assertEqual(exc_cause, exc.__cause__) self.assertEqual(None, exc.__context__) self.assertEqual(True, exc.__suppress_context__) self.assertEqual(expected_stack, exc.stack) - self.assertEqual(exc_info[0], exc.exc_type) - self.assertEqual(str(exc_info[1]), str(exc)) + self.assertEqual(type(exc_obj), exc.exc_type) + self.assertEqual(str(exc_obj), str(exc)) def test_compact_no_cause(self): try: try: 1/0 finally: - exc_info_context = sys.exc_info() - exc_context = traceback.TracebackException(*exc_info_context) + exc = sys.exception() + exc_context = traceback.TracebackException.from_exception(exc) raise Exception("uh oh") - except Exception: - exc_info = sys.exc_info() - exc = traceback.TracebackException(*exc_info, compact=True) + except Exception as e: + exc_obj = e + exc = traceback.TracebackException.from_exception(e, compact=True) expected_stack = traceback.StackSummary.extract( - traceback.walk_tb(exc_info[2])) + traceback.walk_tb(exc_obj.__traceback__)) self.assertEqual(None, exc.__cause__) self.assertEqual(exc_context, exc.__context__) self.assertEqual(False, exc.__suppress_context__) self.assertEqual(expected_stack, exc.stack) - self.assertEqual(exc_info[0], exc.exc_type) - self.assertEqual(str(exc_info[1]), str(exc)) + self.assertEqual(type(exc_obj), exc.exc_type) + self.assertEqual(str(exc_obj), str(exc)) def test_no_refs_to_exception_and_traceback_objects(self): try: 1/0 - except Exception: - exc_info = sys.exc_info() + except Exception as e: + exc_obj = e - refcnt1 = sys.getrefcount(exc_info[1]) - refcnt2 = sys.getrefcount(exc_info[2]) - exc = traceback.TracebackException(*exc_info) - self.assertEqual(sys.getrefcount(exc_info[1]), refcnt1) - self.assertEqual(sys.getrefcount(exc_info[2]), refcnt2) + refcnt1 = sys.getrefcount(exc_obj) + refcnt2 = sys.getrefcount(exc_obj.__traceback__) + exc = traceback.TracebackException.from_exception(exc_obj) + self.assertEqual(sys.getrefcount(exc_obj), refcnt1) + self.assertEqual(sys.getrefcount(exc_obj.__traceback__), refcnt2) def test_comparison_basic(self): try: 1/0 - except Exception: - exc_info = sys.exc_info() - exc = traceback.TracebackException(*exc_info) - exc2 = traceback.TracebackException(*exc_info) + except Exception as e: + exc_obj = e + exc = traceback.TracebackException.from_exception(exc_obj) + exc2 = traceback.TracebackException.from_exception(exc_obj) self.assertIsNot(exc, exc2) self.assertEqual(exc, exc2) self.assertNotEqual(exc, object()) @@ -2594,28 +2616,28 @@ def raise_with_locals(): try: raise_with_locals() - except Exception: - exc_info = sys.exc_info() + except Exception as e: + exc_obj = e - exc = traceback.TracebackException(*exc_info) - exc1 = traceback.TracebackException(*exc_info, limit=10) - exc2 = traceback.TracebackException(*exc_info, limit=2) + exc = traceback.TracebackException.from_exception(exc_obj) + exc1 = traceback.TracebackException.from_exception(exc_obj, limit=10) + exc2 = traceback.TracebackException.from_exception(exc_obj, limit=2) self.assertEqual(exc, exc1) # limit=10 gets all frames self.assertNotEqual(exc, exc2) # limit=2 truncates the output # locals change the output - exc3 = traceback.TracebackException(*exc_info, capture_locals=True) + exc3 = traceback.TracebackException.from_exception(exc_obj, capture_locals=True) self.assertNotEqual(exc, exc3) # there are no locals in the innermost frame - exc4 = traceback.TracebackException(*exc_info, limit=-1) - exc5 = traceback.TracebackException(*exc_info, limit=-1, capture_locals=True) + exc4 = traceback.TracebackException.from_exception(exc_obj, limit=-1) + exc5 = traceback.TracebackException.from_exception(exc_obj, limit=-1, capture_locals=True) self.assertEqual(exc4, exc5) # there are locals in the next-to-innermost frame - exc6 = traceback.TracebackException(*exc_info, limit=-2) - exc7 = traceback.TracebackException(*exc_info, limit=-2, capture_locals=True) + exc6 = traceback.TracebackException.from_exception(exc_obj, limit=-2) + exc7 = traceback.TracebackException.from_exception(exc_obj, limit=-2, capture_locals=True) self.assertNotEqual(exc6, exc7) def test_comparison_equivalent_exceptions_are_equal(self): @@ -2623,8 +2645,8 @@ def test_comparison_equivalent_exceptions_are_equal(self): for _ in range(2): try: 1/0 - except: - excs.append(traceback.TracebackException(*sys.exc_info())) + except Exception as e: + excs.append(traceback.TracebackException.from_exception(e)) self.assertEqual(excs[0], excs[1]) self.assertEqual(list(excs[0].format()), list(excs[1].format())) @@ -2640,9 +2662,9 @@ def __eq__(self, other): except UnhashableException: try: raise ex1 - except UnhashableException: - exc_info = sys.exc_info() - exc = traceback.TracebackException(*exc_info) + except UnhashableException as e: + exc_obj = e + exc = traceback.TracebackException.from_exception(exc_obj) formatted = list(exc.format()) self.assertIn('UnhashableException: ex2\n', formatted[2]) self.assertIn('UnhashableException: ex1\n', formatted[6]) @@ -2655,11 +2677,10 @@ def recurse(n): 1/0 try: recurse(10) - except Exception: - exc_info = sys.exc_info() - exc = traceback.TracebackException(*exc_info, limit=5) + except Exception as e: + exc = traceback.TracebackException.from_exception(e, limit=5) expected_stack = traceback.StackSummary.extract( - traceback.walk_tb(exc_info[2]), limit=5) + traceback.walk_tb(e.__traceback__), limit=5) self.assertEqual(expected_stack, exc.stack) def test_lookup_lines(self): @@ -2706,9 +2727,9 @@ def f(): x = 12 try: x/0 - except Exception: - return sys.exc_info() - exc = traceback.TracebackException(*f(), capture_locals=True) + except Exception as e: + return e + exc = traceback.TracebackException.from_exception(f(), capture_locals=True) output = StringIO() exc.print(file=output) self.assertEqual( @@ -2723,7 +2744,7 @@ def f(): class TestTracebackException_ExceptionGroups(unittest.TestCase): def setUp(self): super().setUp() - self.eg_info = self._get_exception_group() + self.eg = self._get_exception_group() def _get_exception_group(self): def f(): @@ -2753,26 +2774,26 @@ def g(v): except Exception as e: exc4 = e raise ExceptionGroup("eg2", [exc3, exc4]) - except ExceptionGroup: - return sys.exc_info() + except ExceptionGroup as eg: + return eg self.fail('Exception Not Raised') def test_exception_group_construction(self): - eg_info = self.eg_info - teg1 = traceback.TracebackException(*eg_info) - teg2 = traceback.TracebackException.from_exception(eg_info[1]) + eg = self.eg + teg1 = traceback.TracebackException(type(eg), eg, eg.__traceback__) + teg2 = traceback.TracebackException.from_exception(eg) self.assertIsNot(teg1, teg2) self.assertEqual(teg1, teg2) def test_exception_group_format_exception_only(self): - teg = traceback.TracebackException(*self.eg_info) + teg = traceback.TracebackException.from_exception(self.eg) formatted = ''.join(teg.format_exception_only()).split('\n') expected = "ExceptionGroup: eg2 (2 sub-exceptions)\n".split('\n') self.assertEqual(formatted, expected) def test_exception_group_format(self): - teg = traceback.TracebackException(*self.eg_info) + teg = traceback.TracebackException.from_exception(self.eg) formatted = ''.join(teg.format()).split('\n') lno_f = self.lno_f @@ -2831,26 +2852,26 @@ def test_max_group_width(self): formatted = ''.join(teg.format()).split('\n') expected = [ - f' | ExceptionGroup: eg (2 sub-exceptions)', - f' +-+---------------- 1 ----------------', - f' | ExceptionGroup: eg1 (3 sub-exceptions)', - f' +-+---------------- 1 ----------------', - f' | ValueError: 0', - f' +---------------- 2 ----------------', - f' | ValueError: 1', - f' +---------------- ... ----------------', - f' | and 1 more exception', - f' +------------------------------------', - f' +---------------- 2 ----------------', - f' | ExceptionGroup: eg2 (10 sub-exceptions)', - f' +-+---------------- 1 ----------------', - f' | TypeError: 0', - f' +---------------- 2 ----------------', - f' | TypeError: 1', - f' +---------------- ... ----------------', - f' | and 8 more exceptions', - f' +------------------------------------', - f''] + ' | ExceptionGroup: eg (2 sub-exceptions)', + ' +-+---------------- 1 ----------------', + ' | ExceptionGroup: eg1 (3 sub-exceptions)', + ' +-+---------------- 1 ----------------', + ' | ValueError: 0', + ' +---------------- 2 ----------------', + ' | ValueError: 1', + ' +---------------- ... ----------------', + ' | and 1 more exception', + ' +------------------------------------', + ' +---------------- 2 ----------------', + ' | ExceptionGroup: eg2 (10 sub-exceptions)', + ' +-+---------------- 1 ----------------', + ' | TypeError: 0', + ' +---------------- 2 ----------------', + ' | TypeError: 1', + ' +---------------- ... ----------------', + ' | and 8 more exceptions', + ' +------------------------------------', + ''] self.assertEqual(formatted, expected) @@ -2863,39 +2884,39 @@ def test_max_group_depth(self): formatted = ''.join(teg.format()).split('\n') expected = [ - f' | ExceptionGroup: exc (3 sub-exceptions)', - f' +-+---------------- 1 ----------------', - f' | ValueError: -2', - f' +---------------- 2 ----------------', - f' | ExceptionGroup: exc (3 sub-exceptions)', - f' +-+---------------- 1 ----------------', - f' | ValueError: -1', - f' +---------------- 2 ----------------', - f' | ... (max_group_depth is 2)', - f' +---------------- 3 ----------------', - f' | ValueError: 1', - f' +------------------------------------', - f' +---------------- 3 ----------------', - f' | ValueError: 2', - f' +------------------------------------', - f''] + ' | ExceptionGroup: exc (3 sub-exceptions)', + ' +-+---------------- 1 ----------------', + ' | ValueError: -2', + ' +---------------- 2 ----------------', + ' | ExceptionGroup: exc (3 sub-exceptions)', + ' +-+---------------- 1 ----------------', + ' | ValueError: -1', + ' +---------------- 2 ----------------', + ' | ... (max_group_depth is 2)', + ' +---------------- 3 ----------------', + ' | ValueError: 1', + ' +------------------------------------', + ' +---------------- 3 ----------------', + ' | ValueError: 2', + ' +------------------------------------', + ''] self.assertEqual(formatted, expected) def test_comparison(self): try: - raise self.eg_info[1] - except ExceptionGroup: - exc_info = sys.exc_info() + raise self.eg + except ExceptionGroup as e: + exc = e for _ in range(5): try: - raise exc_info[1] - except: - exc_info = sys.exc_info() - exc = traceback.TracebackException(*exc_info) - exc2 = traceback.TracebackException(*exc_info) - exc3 = traceback.TracebackException(*exc_info, limit=300) - ne = traceback.TracebackException(*exc_info, limit=3) + raise exc + except Exception as e: + exc_obj = e + exc = traceback.TracebackException.from_exception(exc_obj) + exc2 = traceback.TracebackException.from_exception(exc_obj) + exc3 = traceback.TracebackException.from_exception(exc_obj, limit=300) + ne = traceback.TracebackException.from_exception(exc_obj, limit=3) self.assertIsNot(exc, exc2) self.assertEqual(exc, exc2) self.assertEqual(exc, exc3) diff --git a/Lib/test/test_ttk/test_extensions.py b/Lib/test/test_ttk/test_extensions.py index 6135c49701f08e..d5e069716971fe 100644 --- a/Lib/test/test_ttk/test_extensions.py +++ b/Lib/test/test_ttk/test_extensions.py @@ -45,7 +45,9 @@ def test_widget_destroy(self): # value which causes the tracing callback to be called and then # it tries calling instance attributes not yet defined. ttk.LabeledScale(self.root, variable=myvar) - if hasattr(sys, 'last_type'): + if hasattr(sys, 'last_exc'): + self.assertNotEqual(type(sys.last_exc), tkinter.TclError) + elif hasattr(sys, 'last_type'): self.assertNotEqual(sys.last_type, tkinter.TclError) def test_initialization(self): diff --git a/Lib/test/test_turtle.py b/Lib/test/test_turtle.py index 95af84e3779824..3f9f129a3dd200 100644 --- a/Lib/test/test_turtle.py +++ b/Lib/test/test_turtle.py @@ -267,6 +267,14 @@ def test_goto(self): self.assertAlmostEqual(self.nav.xcor(), 100) self.assertAlmostEqual(self.nav.ycor(), -100) + def test_teleport(self): + self.nav.teleport(20, -30, fill_gap=True) + self.assertAlmostEqual(self.nav.xcor(), 20) + self.assertAlmostEqual(self.nav.ycor(), -30) + self.nav.teleport(-20, 30, fill_gap=False) + self.assertAlmostEqual(self.nav.xcor(), -20) + self.assertAlmostEqual(self.nav.ycor(), 30) + def test_pos(self): self.assertEqual(self.nav.pos(), self.nav._position) self.nav.goto(100, -100) @@ -440,6 +448,18 @@ def test_showturtle_hideturtle_and_isvisible(self): tpen.showturtle() self.assertTrue(tpen.isvisible()) + def test_teleport(self): + + tpen = turtle.TPen() + + for fill_gap_value in [True, False]: + tpen.penup() + tpen.teleport(100, 100, fill_gap=fill_gap_value) + self.assertFalse(tpen.isdown()) + tpen.pendown() + tpen.teleport(-100, -100, fill_gap=fill_gap_value) + self.assertTrue(tpen.isdown()) + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_type_cache.py b/Lib/test/test_type_cache.py index 8502f6b0584b00..24f83cd3e172c7 100644 --- a/Lib/test/test_type_cache.py +++ b/Lib/test/test_type_cache.py @@ -9,6 +9,7 @@ # Skip this test if the _testcapi module isn't available. type_get_version = import_helper.import_module('_testcapi').type_get_version +type_assign_version = import_helper.import_module('_testcapi').type_assign_version @support.cpython_only @@ -42,6 +43,19 @@ def test_tp_version_tag_unique(self): self.assertEqual(len(set(all_version_tags)), 30, msg=f"{all_version_tags} contains non-unique versions") + def test_type_assign_version(self): + class C: + x = 5 + + self.assertEqual(type_assign_version(C), 1) + c_ver = type_get_version(C) + + C.x = 6 + self.assertEqual(type_get_version(C), 0) + self.assertEqual(type_assign_version(C), 1) + self.assertNotEqual(type_get_version(C), 0) + self.assertNotEqual(type_get_version(C), c_ver) + if __name__ == "__main__": support.run_unittest(TypeCacheTests) diff --git a/Lib/test/test_type_comments.py b/Lib/test/test_type_comments.py index 8db7394d1512aa..aba4a44be9da96 100644 --- a/Lib/test/test_type_comments.py +++ b/Lib/test/test_type_comments.py @@ -272,7 +272,7 @@ def test_matmul(self): pass def test_fstring(self): - for tree in self.parse_all(fstring, minver=6): + for tree in self.parse_all(fstring): pass def test_underscorednumber(self): diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py index af095632a36fcb..89548100da62d7 100644 --- a/Lib/test/test_types.py +++ b/Lib/test/test_types.py @@ -925,6 +925,35 @@ def test_or_type_operator_with_SpecialForm(self): assert typing.Optional[int] | str == typing.Union[int, str, None] assert typing.Union[int, bool] | str == typing.Union[int, bool, str] + def test_or_type_operator_with_Literal(self): + Literal = typing.Literal + self.assertEqual((Literal[1] | Literal[2]).__args__, + (Literal[1], Literal[2])) + + self.assertEqual((Literal[0] | Literal[False]).__args__, + (Literal[0], Literal[False])) + self.assertEqual((Literal[1] | Literal[True]).__args__, + (Literal[1], Literal[True])) + + self.assertEqual(Literal[1] | Literal[1], Literal[1]) + self.assertEqual(Literal['a'] | Literal['a'], Literal['a']) + + import enum + class Ints(enum.IntEnum): + A = 0 + B = 1 + + self.assertEqual(Literal[Ints.A] | Literal[Ints.A], Literal[Ints.A]) + self.assertEqual(Literal[Ints.B] | Literal[Ints.B], Literal[Ints.B]) + + self.assertEqual((Literal[Ints.B] | Literal[Ints.A]).__args__, + (Literal[Ints.B], Literal[Ints.A])) + + self.assertEqual((Literal[0] | Literal[Ints.A]).__args__, + (Literal[0], Literal[Ints.A])) + self.assertEqual((Literal[1] | Literal[Ints.B]).__args__, + (Literal[1], Literal[Ints.B])) + def test_or_type_repr(self): assert repr(int | str) == "int | str" assert repr((int | str) | list) == "int | str | list" @@ -1360,6 +1389,67 @@ class C: pass D = types.new_class('D', (A(), C, B()), {}) self.assertEqual(D.__bases__, (A1, A2, A3, C, B1, B2)) + def test_get_original_bases(self): + T = typing.TypeVar('T') + class A: pass + class B(typing.Generic[T]): pass + class C(B[int]): pass + class D(B[str], float): pass + self.assertEqual(types.get_original_bases(A), (object,)) + self.assertEqual(types.get_original_bases(B), (typing.Generic[T],)) + self.assertEqual(types.get_original_bases(C), (B[int],)) + self.assertEqual(types.get_original_bases(int), (object,)) + self.assertEqual(types.get_original_bases(D), (B[str], float)) + + class E(list[T]): pass + class F(list[int]): pass + + self.assertEqual(types.get_original_bases(E), (list[T],)) + self.assertEqual(types.get_original_bases(F), (list[int],)) + + class ClassBasedNamedTuple(typing.NamedTuple): + x: int + + class GenericNamedTuple(typing.NamedTuple, typing.Generic[T]): + x: T + + CallBasedNamedTuple = typing.NamedTuple("CallBasedNamedTuple", [("x", int)]) + + self.assertIs( + types.get_original_bases(ClassBasedNamedTuple)[0], typing.NamedTuple + ) + self.assertEqual( + types.get_original_bases(GenericNamedTuple), + (typing.NamedTuple, typing.Generic[T]) + ) + self.assertIs( + types.get_original_bases(CallBasedNamedTuple)[0], typing.NamedTuple + ) + + class ClassBasedTypedDict(typing.TypedDict): + x: int + + class GenericTypedDict(typing.TypedDict, typing.Generic[T]): + x: T + + CallBasedTypedDict = typing.TypedDict("CallBasedTypedDict", {"x": int}) + + self.assertIs( + types.get_original_bases(ClassBasedTypedDict)[0], + typing.TypedDict + ) + self.assertEqual( + types.get_original_bases(GenericTypedDict), + (typing.TypedDict, typing.Generic[T]) + ) + self.assertIs( + types.get_original_bases(CallBasedTypedDict)[0], + typing.TypedDict + ) + + with self.assertRaisesRegex(TypeError, "Expected an instance of type"): + types.get_original_bases(object()) + # Many of the following tests are derived from test_descr.py def test_prepare_class(self): # Basic test of metaclass derivation diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index c9f55de95c548f..f162e587810ac0 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -117,7 +117,7 @@ def test_repr(self): class Sub(Any): pass self.assertEqual( repr(Sub), - "<class 'test.test_typing.AnyTests.test_repr.<locals>.Sub'>", + f"<class '{__name__}.AnyTests.test_repr.<locals>.Sub'>", ) def test_errors(self): @@ -855,6 +855,14 @@ def test_accepts_single_type(self): (*tuple[int],) Unpack[Tuple[int]] + def test_dir(self): + dir_items = set(dir(Unpack[Tuple[int]])) + for required_item in [ + '__args__', '__parameters__', '__origin__', + ]: + with self.subTest(required_item=required_item): + self.assertIn(required_item, dir_items) + def test_rejects_multiple_types(self): with self.assertRaises(TypeError): Unpack[Tuple[int], Tuple[str]] @@ -872,6 +880,11 @@ def test_cannot_be_called(self): with self.assertRaises(TypeError): Unpack() + def test_usage_with_kwargs(self): + Movie = TypedDict('Movie', {'name': str, 'year': int}) + def foo(**kwargs: Unpack[Movie]): ... + self.assertEqual(repr(foo.__annotations__['kwargs']), + f"typing.Unpack[{__name__}.Movie]") class TypeVarTupleTests(BaseTestCase): @@ -1042,14 +1055,14 @@ class G2(Generic[Unpack[Ts]]): pass self.assertEqual(repr(Ts), 'Ts') - self.assertEqual(repr((*Ts,)[0]), '*Ts') - self.assertEqual(repr(Unpack[Ts]), '*Ts') + self.assertEqual(repr((*Ts,)[0]), 'typing.Unpack[Ts]') + self.assertEqual(repr(Unpack[Ts]), 'typing.Unpack[Ts]') - self.assertEqual(repr(tuple[*Ts]), 'tuple[*Ts]') - self.assertEqual(repr(Tuple[Unpack[Ts]]), 'typing.Tuple[*Ts]') + self.assertEqual(repr(tuple[*Ts]), 'tuple[typing.Unpack[Ts]]') + self.assertEqual(repr(Tuple[Unpack[Ts]]), 'typing.Tuple[typing.Unpack[Ts]]') - self.assertEqual(repr(*tuple[*Ts]), '*tuple[*Ts]') - self.assertEqual(repr(Unpack[Tuple[Unpack[Ts]]]), '*typing.Tuple[*Ts]') + self.assertEqual(repr(*tuple[*Ts]), '*tuple[typing.Unpack[Ts]]') + self.assertEqual(repr(Unpack[Tuple[Unpack[Ts]]]), 'typing.Unpack[typing.Tuple[typing.Unpack[Ts]]]') def test_variadic_class_repr_is_correct(self): Ts = TypeVarTuple('Ts') @@ -1066,86 +1079,86 @@ class B(Generic[Unpack[Ts]]): pass self.assertEndsWith(repr(A[*tuple[int, ...]]), 'A[*tuple[int, ...]]') self.assertEndsWith(repr(B[Unpack[Tuple[int, ...]]]), - 'B[*typing.Tuple[int, ...]]') + 'B[typing.Unpack[typing.Tuple[int, ...]]]') self.assertEndsWith(repr(A[float, *tuple[int, ...]]), 'A[float, *tuple[int, ...]]') self.assertEndsWith(repr(A[float, Unpack[Tuple[int, ...]]]), - 'A[float, *typing.Tuple[int, ...]]') + 'A[float, typing.Unpack[typing.Tuple[int, ...]]]') self.assertEndsWith(repr(A[*tuple[int, ...], str]), 'A[*tuple[int, ...], str]') self.assertEndsWith(repr(B[Unpack[Tuple[int, ...]], str]), - 'B[*typing.Tuple[int, ...], str]') + 'B[typing.Unpack[typing.Tuple[int, ...]], str]') self.assertEndsWith(repr(A[float, *tuple[int, ...], str]), 'A[float, *tuple[int, ...], str]') self.assertEndsWith(repr(B[float, Unpack[Tuple[int, ...]], str]), - 'B[float, *typing.Tuple[int, ...], str]') + 'B[float, typing.Unpack[typing.Tuple[int, ...]], str]') def test_variadic_class_alias_repr_is_correct(self): Ts = TypeVarTuple('Ts') class A(Generic[Unpack[Ts]]): pass B = A[*Ts] - self.assertEndsWith(repr(B), 'A[*Ts]') + self.assertEndsWith(repr(B), 'A[typing.Unpack[Ts]]') self.assertEndsWith(repr(B[()]), 'A[()]') self.assertEndsWith(repr(B[float]), 'A[float]') self.assertEndsWith(repr(B[float, str]), 'A[float, str]') C = A[Unpack[Ts]] - self.assertEndsWith(repr(C), 'A[*Ts]') + self.assertEndsWith(repr(C), 'A[typing.Unpack[Ts]]') self.assertEndsWith(repr(C[()]), 'A[()]') self.assertEndsWith(repr(C[float]), 'A[float]') self.assertEndsWith(repr(C[float, str]), 'A[float, str]') D = A[*Ts, int] - self.assertEndsWith(repr(D), 'A[*Ts, int]') + self.assertEndsWith(repr(D), 'A[typing.Unpack[Ts], int]') self.assertEndsWith(repr(D[()]), 'A[int]') self.assertEndsWith(repr(D[float]), 'A[float, int]') self.assertEndsWith(repr(D[float, str]), 'A[float, str, int]') E = A[Unpack[Ts], int] - self.assertEndsWith(repr(E), 'A[*Ts, int]') + self.assertEndsWith(repr(E), 'A[typing.Unpack[Ts], int]') self.assertEndsWith(repr(E[()]), 'A[int]') self.assertEndsWith(repr(E[float]), 'A[float, int]') self.assertEndsWith(repr(E[float, str]), 'A[float, str, int]') F = A[int, *Ts] - self.assertEndsWith(repr(F), 'A[int, *Ts]') + self.assertEndsWith(repr(F), 'A[int, typing.Unpack[Ts]]') self.assertEndsWith(repr(F[()]), 'A[int]') self.assertEndsWith(repr(F[float]), 'A[int, float]') self.assertEndsWith(repr(F[float, str]), 'A[int, float, str]') G = A[int, Unpack[Ts]] - self.assertEndsWith(repr(G), 'A[int, *Ts]') + self.assertEndsWith(repr(G), 'A[int, typing.Unpack[Ts]]') self.assertEndsWith(repr(G[()]), 'A[int]') self.assertEndsWith(repr(G[float]), 'A[int, float]') self.assertEndsWith(repr(G[float, str]), 'A[int, float, str]') H = A[int, *Ts, str] - self.assertEndsWith(repr(H), 'A[int, *Ts, str]') + self.assertEndsWith(repr(H), 'A[int, typing.Unpack[Ts], str]') self.assertEndsWith(repr(H[()]), 'A[int, str]') self.assertEndsWith(repr(H[float]), 'A[int, float, str]') self.assertEndsWith(repr(H[float, str]), 'A[int, float, str, str]') I = A[int, Unpack[Ts], str] - self.assertEndsWith(repr(I), 'A[int, *Ts, str]') + self.assertEndsWith(repr(I), 'A[int, typing.Unpack[Ts], str]') self.assertEndsWith(repr(I[()]), 'A[int, str]') self.assertEndsWith(repr(I[float]), 'A[int, float, str]') self.assertEndsWith(repr(I[float, str]), 'A[int, float, str, str]') J = A[*Ts, *tuple[str, ...]] - self.assertEndsWith(repr(J), 'A[*Ts, *tuple[str, ...]]') + self.assertEndsWith(repr(J), 'A[typing.Unpack[Ts], *tuple[str, ...]]') self.assertEndsWith(repr(J[()]), 'A[*tuple[str, ...]]') self.assertEndsWith(repr(J[float]), 'A[float, *tuple[str, ...]]') self.assertEndsWith(repr(J[float, str]), 'A[float, str, *tuple[str, ...]]') K = A[Unpack[Ts], Unpack[Tuple[str, ...]]] - self.assertEndsWith(repr(K), 'A[*Ts, *typing.Tuple[str, ...]]') - self.assertEndsWith(repr(K[()]), 'A[*typing.Tuple[str, ...]]') - self.assertEndsWith(repr(K[float]), 'A[float, *typing.Tuple[str, ...]]') - self.assertEndsWith(repr(K[float, str]), 'A[float, str, *typing.Tuple[str, ...]]') + self.assertEndsWith(repr(K), 'A[typing.Unpack[Ts], typing.Unpack[typing.Tuple[str, ...]]]') + self.assertEndsWith(repr(K[()]), 'A[typing.Unpack[typing.Tuple[str, ...]]]') + self.assertEndsWith(repr(K[float]), 'A[float, typing.Unpack[typing.Tuple[str, ...]]]') + self.assertEndsWith(repr(K[float, str]), 'A[float, str, typing.Unpack[typing.Tuple[str, ...]]]') def test_cannot_subclass(self): with self.assertRaisesRegex(TypeError, CANNOT_SUBCLASS_TYPE): @@ -1163,9 +1176,9 @@ class C(type(Unpack[Ts])): pass with self.assertRaisesRegex(TypeError, r'Cannot subclass typing\.Unpack'): class C(Unpack): pass - with self.assertRaisesRegex(TypeError, r'Cannot subclass \*Ts'): + with self.assertRaisesRegex(TypeError, r'Cannot subclass typing.Unpack\[Ts\]'): class C(*Ts): pass - with self.assertRaisesRegex(TypeError, r'Cannot subclass \*Ts'): + with self.assertRaisesRegex(TypeError, r'Cannot subclass typing.Unpack\[Ts\]'): class C(Unpack[Ts]): pass def test_variadic_class_args_are_correct(self): @@ -1699,6 +1712,14 @@ def test_repr(self): u = Optional[str] self.assertEqual(repr(u), 'typing.Optional[str]') + def test_dir(self): + dir_items = set(dir(Union[str, int])) + for required_item in [ + '__args__', '__parameters__', '__origin__', + ]: + with self.subTest(required_item=required_item): + self.assertIn(required_item, dir_items) + def test_cannot_subclass(self): with self.assertRaisesRegex(TypeError, r'Cannot subclass typing\.Union'): @@ -1773,6 +1794,35 @@ def Elem(*args): Union[Elem, str] # Nor should this + def test_union_of_literals(self): + self.assertEqual(Union[Literal[1], Literal[2]].__args__, + (Literal[1], Literal[2])) + self.assertEqual(Union[Literal[1], Literal[1]], + Literal[1]) + + self.assertEqual(Union[Literal[False], Literal[0]].__args__, + (Literal[False], Literal[0])) + self.assertEqual(Union[Literal[True], Literal[1]].__args__, + (Literal[True], Literal[1])) + + import enum + class Ints(enum.IntEnum): + A = 0 + B = 1 + + self.assertEqual(Union[Literal[Ints.A], Literal[Ints.A]], + Literal[Ints.A]) + self.assertEqual(Union[Literal[Ints.B], Literal[Ints.B]], + Literal[Ints.B]) + + self.assertEqual(Union[Literal[Ints.A], Literal[Ints.B]].__args__, + (Literal[Ints.A], Literal[Ints.B])) + + self.assertEqual(Union[Literal[0], Literal[Ints.A], Literal[False]].__args__, + (Literal[0], Literal[Ints.A], Literal[False])) + self.assertEqual(Union[Literal[1], Literal[Ints.B], Literal[True]].__args__, + (Literal[1], Literal[Ints.B], Literal[True])) + class TupleTests(BaseTestCase): @@ -1839,6 +1889,15 @@ def test_eq_hash(self): self.assertNotEqual(C, Callable[..., int]) self.assertNotEqual(C, Callable) + def test_dir(self): + Callable = self.Callable + dir_items = set(dir(Callable[..., int])) + for required_item in [ + '__args__', '__parameters__', '__origin__', + ]: + with self.subTest(required_item=required_item): + self.assertIn(required_item, dir_items) + def test_cannot_instantiate(self): Callable = self.Callable with self.assertRaises(TypeError): @@ -2049,6 +2108,48 @@ def test_concatenate(self): Callable[Concatenate[int, str, P2], int]) self.assertEqual(C[...], Callable[Concatenate[int, ...], int]) + def test_nested_paramspec(self): + # Since Callable has some special treatment, we want to be sure + # that substituion works correctly, see gh-103054 + Callable = self.Callable + P = ParamSpec('P') + P2 = ParamSpec('P2') + T = TypeVar('T') + T2 = TypeVar('T2') + Ts = TypeVarTuple('Ts') + class My(Generic[P, T]): + pass + + self.assertEqual(My.__parameters__, (P, T)) + + C1 = My[[int, T2], Callable[P2, T2]] + self.assertEqual(C1.__args__, ((int, T2), Callable[P2, T2])) + self.assertEqual(C1.__parameters__, (T2, P2)) + self.assertEqual(C1[str, [list[int], bytes]], + My[[int, str], Callable[[list[int], bytes], str]]) + + C2 = My[[Callable[[T2], int], list[T2]], str] + self.assertEqual(C2.__args__, ((Callable[[T2], int], list[T2]), str)) + self.assertEqual(C2.__parameters__, (T2,)) + self.assertEqual(C2[list[str]], + My[[Callable[[list[str]], int], list[list[str]]], str]) + + C3 = My[[Callable[P2, T2], T2], T2] + self.assertEqual(C3.__args__, ((Callable[P2, T2], T2), T2)) + self.assertEqual(C3.__parameters__, (P2, T2)) + self.assertEqual(C3[[], int], + My[[Callable[[], int], int], int]) + self.assertEqual(C3[[str, bool], int], + My[[Callable[[str, bool], int], int], int]) + self.assertEqual(C3[[str, bool], T][int], + My[[Callable[[str, bool], int], int], int]) + + C4 = My[[Callable[[int, *Ts, str], T2], T2], T2] + self.assertEqual(C4.__args__, ((Callable[[int, *Ts, str], T2], T2), T2)) + self.assertEqual(C4.__parameters__, (Ts, T2)) + self.assertEqual(C4[bool, bytes, float], + My[[Callable[[int, bool, bytes, str], float], float], float]) + def test_errors(self): Callable = self.Callable alias = Callable[[int, str], float] @@ -2089,6 +2190,13 @@ def test_basics(self): Literal[Literal[1, 2], Literal[4, 5]] Literal[b"foo", u"bar"] + def test_enum(self): + import enum + class My(enum.Enum): + A = 'A' + + self.assertEqual(Literal[My.A].__args__, (My.A,)) + def test_illegal_parameters_do_not_raise_runtime_errors(self): # Type checkers should reject these types, but we do not # raise errors at runtime to maintain maximum flexibility. @@ -2109,6 +2217,14 @@ def test_repr(self): self.assertEqual(repr(Literal[None]), "typing.Literal[None]") self.assertEqual(repr(Literal[1, 2, 3, 3]), "typing.Literal[1, 2, 3]") + def test_dir(self): + dir_items = set(dir(Literal[1, 2, 3])) + for required_item in [ + '__args__', '__parameters__', '__origin__', + ]: + with self.subTest(required_item=required_item): + self.assertIn(required_item, dir_items) + def test_cannot_init(self): with self.assertRaises(TypeError): Literal() @@ -2170,6 +2286,20 @@ def test_flatten(self): self.assertEqual(l, Literal[1, 2, 3]) self.assertEqual(l.__args__, (1, 2, 3)) + def test_does_not_flatten_enum(self): + import enum + class Ints(enum.IntEnum): + A = 1 + B = 2 + + l = Literal[ + Literal[Ints.A], + Literal[Ints.B], + Literal[1], + Literal[2], + ] + self.assertEqual(l.__args__, (Ints.A, Ints.B, 1, 2)) + XK = TypeVar('XK', str, bytes) XV = TypeVar('XV') @@ -2535,6 +2665,22 @@ def meth(x): ... class PG(Protocol[T]): def meth(x): ... + @runtime_checkable + class WeirdProto(Protocol): + meth = str.maketrans + + @runtime_checkable + class WeirdProto2(Protocol): + meth = lambda *args, **kwargs: None + + class CustomCallable: + def __call__(self, *args, **kwargs): + pass + + @runtime_checkable + class WeirderProto(Protocol): + meth = CustomCallable() + class BadP(Protocol): def meth(x): ... @@ -2544,8 +2690,15 @@ def meth(x): ... class C: def meth(x): ... - self.assertIsInstance(C(), P) - self.assertIsInstance(C(), PG) + class C2: + def __init__(self): + self.meth = lambda: None + + for klass in C, C2: + for proto in P, PG, WeirdProto, WeirdProto2, WeirderProto: + with self.subTest(klass=klass.__name__, proto=proto.__name__): + self.assertIsInstance(klass(), proto) + with self.assertRaises(TypeError): isinstance(C(), PG[T]) with self.assertRaises(TypeError): @@ -2595,7 +2748,15 @@ def attr(self): ... class PG1(Protocol[T]): attr: T - for protocol_class in P, P1, PG, PG1: + @runtime_checkable + class MethodP(Protocol): + def attr(self): ... + + @runtime_checkable + class MethodPG(Protocol[T]): + def attr(self) -> T: ... + + for protocol_class in P, P1, PG, PG1, MethodP, MethodPG: for klass in C, D, E, F: with self.subTest( klass=klass.__name__, @@ -2620,7 +2781,12 @@ def attr(self): ... class BadPG1(Protocol[T]): attr: T - for obj in PG[T], PG[C], PG1[T], PG1[C], BadP, BadP1, BadPG, BadPG1: + cases = ( + PG[T], PG[C], PG1[T], PG1[C], MethodPG[T], + MethodPG[C], BadP, BadP1, BadPG, BadPG1 + ) + + for obj in cases: for klass in C, D, E, F, Empty: with self.subTest(klass=klass.__name__, obj=obj): with self.assertRaises(TypeError): @@ -2643,6 +2809,82 @@ def __dir__(self): self.assertIsInstance(CustomDirWithX(), HasX) self.assertNotIsInstance(CustomDirWithoutX(), HasX) + def test_protocols_isinstance_attribute_access_with_side_effects(self): + class C: + @property + def attr(self): + raise AttributeError('no') + + class CustomDescriptor: + def __get__(self, obj, objtype=None): + raise RuntimeError("NO") + + class D: + attr = CustomDescriptor() + + # Check that properties set on superclasses + # are still found by the isinstance() logic + class E(C): ... + class F(D): ... + + class WhyWouldYouDoThis: + def __getattr__(self, name): + raise RuntimeError("wut") + + T = TypeVar('T') + + @runtime_checkable + class P(Protocol): + @property + def attr(self): ... + + @runtime_checkable + class P1(Protocol): + attr: int + + @runtime_checkable + class PG(Protocol[T]): + @property + def attr(self): ... + + @runtime_checkable + class PG1(Protocol[T]): + attr: T + + @runtime_checkable + class MethodP(Protocol): + def attr(self): ... + + @runtime_checkable + class MethodPG(Protocol[T]): + def attr(self) -> T: ... + + for protocol_class in P, P1, PG, PG1, MethodP, MethodPG: + for klass in C, D, E, F: + with self.subTest( + klass=klass.__name__, + protocol_class=protocol_class.__name__ + ): + self.assertIsInstance(klass(), protocol_class) + + with self.subTest( + klass="WhyWouldYouDoThis", + protocol_class=protocol_class.__name__ + ): + self.assertNotIsInstance(WhyWouldYouDoThis(), protocol_class) + + def test_protocols_isinstance___slots__(self): + # As per the consensus in https://github.com/python/typing/issues/1367, + # this is desirable behaviour + @runtime_checkable + class HasX(Protocol): + x: int + + class HasNothingButSlots: + __slots__ = ("x",) + + self.assertIsInstance(HasNothingButSlots(), HasX) + def test_protocols_isinstance_py36(self): class APoint: def __init__(self, x, y, label): @@ -2698,6 +2940,20 @@ def __init__(self, x): self.assertIsInstance(C(1), P) self.assertIsInstance(C(1), PG) + def test_protocols_isinstance_monkeypatching(self): + @runtime_checkable + class HasX(Protocol): + x: int + + class Foo: ... + + f = Foo() + self.assertNotIsInstance(f, HasX) + f.x = 42 + self.assertIsInstance(f, HasX) + del f.x + self.assertNotIsInstance(f, HasX) + def test_protocol_checks_after_subscript(self): class P(Protocol[T]): pass class C(P[T]): pass @@ -3857,13 +4113,13 @@ class TsP(Generic[*Ts, P]): MyCallable[[int], bool]: "MyCallable[[int], bool]", MyCallable[[int, str], bool]: "MyCallable[[int, str], bool]", MyCallable[[int, list[int]], bool]: "MyCallable[[int, list[int]], bool]", - MyCallable[Concatenate[*Ts, P], T]: "MyCallable[typing.Concatenate[*Ts, ~P], ~T]", + MyCallable[Concatenate[*Ts, P], T]: "MyCallable[typing.Concatenate[typing.Unpack[Ts], ~P], ~T]", DoubleSpec[P2, P, T]: "DoubleSpec[~P2, ~P, ~T]", DoubleSpec[[int], [str], bool]: "DoubleSpec[[int], [str], bool]", DoubleSpec[[int, int], [str, str], bool]: "DoubleSpec[[int, int], [str, str], bool]", - TsP[*Ts, P]: "TsP[*Ts, ~P]", + TsP[*Ts, P]: "TsP[typing.Unpack[Ts], ~P]", TsP[int, str, list[int], []]: "TsP[int, str, list[int], []]", TsP[int, [str, list[int]]]: "TsP[int, [str, list[int]]]", @@ -6494,6 +6750,22 @@ def test_copy_and_pickle(self): self.assertEqual(jane2, jane) self.assertIsInstance(jane2, cls) + def test_orig_bases(self): + T = TypeVar('T') + + class SimpleNamedTuple(NamedTuple): + pass + + class GenericNamedTuple(NamedTuple, Generic[T]): + pass + + self.assertEqual(SimpleNamedTuple.__orig_bases__, (NamedTuple,)) + self.assertEqual(GenericNamedTuple.__orig_bases__, (NamedTuple, Generic[T])) + + CallNamedTuple = NamedTuple('CallNamedTuple', []) + + self.assertEqual(CallNamedTuple.__orig_bases__, (NamedTuple,)) + class TypedDictTests(BaseTestCase): def test_basics_functional_syntax(self): @@ -6925,6 +7197,49 @@ class TD(TypedDict): self.assertIs(type(a), dict) self.assertEqual(a, {'a': 1}) + def test_orig_bases(self): + T = TypeVar('T') + + class Parent(TypedDict): + pass + + class Child(Parent): + pass + + class OtherChild(Parent): + pass + + class MixedChild(Child, OtherChild, Parent): + pass + + class GenericParent(TypedDict, Generic[T]): + pass + + class GenericChild(GenericParent[int]): + pass + + class OtherGenericChild(GenericParent[str]): + pass + + class MixedGenericChild(GenericChild, OtherGenericChild, GenericParent[float]): + pass + + class MultipleGenericBases(GenericParent[int], GenericParent[float]): + pass + + CallTypedDict = TypedDict('CallTypedDict', {}) + + self.assertEqual(Parent.__orig_bases__, (TypedDict,)) + self.assertEqual(Child.__orig_bases__, (Parent,)) + self.assertEqual(OtherChild.__orig_bases__, (Parent,)) + self.assertEqual(MixedChild.__orig_bases__, (Child, OtherChild, Parent,)) + self.assertEqual(GenericParent.__orig_bases__, (TypedDict, Generic[T])) + self.assertEqual(GenericChild.__orig_bases__, (GenericParent[int],)) + self.assertEqual(OtherGenericChild.__orig_bases__, (GenericParent[str],)) + self.assertEqual(MixedGenericChild.__orig_bases__, (GenericChild, OtherGenericChild, GenericParent[float])) + self.assertEqual(MultipleGenericBases.__orig_bases__, (GenericParent[int], GenericParent[float])) + self.assertEqual(CallTypedDict.__orig_bases__, (TypedDict,)) + class RequiredTests(BaseTestCase): @@ -7147,6 +7462,15 @@ def test_repr(self): "typing.Annotated[typing.List[int], 4, 5]" ) + def test_dir(self): + dir_items = set(dir(Annotated[int, 4])) + for required_item in [ + '__args__', '__parameters__', '__origin__', + '__metadata__', + ]: + with self.subTest(required_item=required_item): + self.assertIn(required_item, dir_items) + def test_flatten(self): A = Annotated[Annotated[int, 4], 5] self.assertEqual(A, Annotated[int, 4, 5]) @@ -7679,6 +8003,127 @@ def test_bad_var_substitution(self): with self.assertRaises(TypeError): collections.abc.Callable[P, T][arg, str] + def test_type_var_subst_for_other_type_vars(self): + T = TypeVar('T') + T2 = TypeVar('T2') + P = ParamSpec('P') + P2 = ParamSpec('P2') + Ts = TypeVarTuple('Ts') + + class Base(Generic[P]): + pass + + A1 = Base[T] + self.assertEqual(A1.__parameters__, (T,)) + self.assertEqual(A1.__args__, ((T,),)) + self.assertEqual(A1[int], Base[int]) + + A2 = Base[[T]] + self.assertEqual(A2.__parameters__, (T,)) + self.assertEqual(A2.__args__, ((T,),)) + self.assertEqual(A2[int], Base[int]) + + A3 = Base[[int, T]] + self.assertEqual(A3.__parameters__, (T,)) + self.assertEqual(A3.__args__, ((int, T),)) + self.assertEqual(A3[str], Base[[int, str]]) + + A4 = Base[[T, int, T2]] + self.assertEqual(A4.__parameters__, (T, T2)) + self.assertEqual(A4.__args__, ((T, int, T2),)) + self.assertEqual(A4[str, bool], Base[[str, int, bool]]) + + A5 = Base[[*Ts, int]] + self.assertEqual(A5.__parameters__, (Ts,)) + self.assertEqual(A5.__args__, ((*Ts, int),)) + self.assertEqual(A5[str, bool], Base[[str, bool, int]]) + + A5_2 = Base[[int, *Ts]] + self.assertEqual(A5_2.__parameters__, (Ts,)) + self.assertEqual(A5_2.__args__, ((int, *Ts),)) + self.assertEqual(A5_2[str, bool], Base[[int, str, bool]]) + + A6 = Base[[T, *Ts]] + self.assertEqual(A6.__parameters__, (T, Ts)) + self.assertEqual(A6.__args__, ((T, *Ts),)) + self.assertEqual(A6[int, str, bool], Base[[int, str, bool]]) + + A7 = Base[[T, T]] + self.assertEqual(A7.__parameters__, (T,)) + self.assertEqual(A7.__args__, ((T, T),)) + self.assertEqual(A7[int], Base[[int, int]]) + + A8 = Base[[T, list[T]]] + self.assertEqual(A8.__parameters__, (T,)) + self.assertEqual(A8.__args__, ((T, list[T]),)) + self.assertEqual(A8[int], Base[[int, list[int]]]) + + A9 = Base[[Tuple[*Ts], *Ts]] + self.assertEqual(A9.__parameters__, (Ts,)) + self.assertEqual(A9.__args__, ((Tuple[*Ts], *Ts),)) + self.assertEqual(A9[int, str], Base[Tuple[int, str], int, str]) + + A10 = Base[P2] + self.assertEqual(A10.__parameters__, (P2,)) + self.assertEqual(A10.__args__, (P2,)) + self.assertEqual(A10[[int, str]], Base[[int, str]]) + + class DoubleP(Generic[P, P2]): + pass + + B1 = DoubleP[P, P2] + self.assertEqual(B1.__parameters__, (P, P2)) + self.assertEqual(B1.__args__, (P, P2)) + self.assertEqual(B1[[int, str], [bool]], DoubleP[[int, str], [bool]]) + self.assertEqual(B1[[], []], DoubleP[[], []]) + + B2 = DoubleP[[int, str], P2] + self.assertEqual(B2.__parameters__, (P2,)) + self.assertEqual(B2.__args__, ((int, str), P2)) + self.assertEqual(B2[[bool, bool]], DoubleP[[int, str], [bool, bool]]) + self.assertEqual(B2[[]], DoubleP[[int, str], []]) + + B3 = DoubleP[P, [bool, bool]] + self.assertEqual(B3.__parameters__, (P,)) + self.assertEqual(B3.__args__, (P, (bool, bool))) + self.assertEqual(B3[[int, str]], DoubleP[[int, str], [bool, bool]]) + self.assertEqual(B3[[]], DoubleP[[], [bool, bool]]) + + B4 = DoubleP[[T, int], [bool, T2]] + self.assertEqual(B4.__parameters__, (T, T2)) + self.assertEqual(B4.__args__, ((T, int), (bool, T2))) + self.assertEqual(B4[str, float], DoubleP[[str, int], [bool, float]]) + + B5 = DoubleP[[*Ts, int], [bool, T2]] + self.assertEqual(B5.__parameters__, (Ts, T2)) + self.assertEqual(B5.__args__, ((*Ts, int), (bool, T2))) + self.assertEqual(B5[str, bytes, float], + DoubleP[[str, bytes, int], [bool, float]]) + + B6 = DoubleP[[T, int], [bool, *Ts]] + self.assertEqual(B6.__parameters__, (T, Ts)) + self.assertEqual(B6.__args__, ((T, int), (bool, *Ts))) + self.assertEqual(B6[str, bytes, float], + DoubleP[[str, int], [bool, bytes, float]]) + + class PandT(Generic[P, T]): + pass + + C1 = PandT[P, T] + self.assertEqual(C1.__parameters__, (P, T)) + self.assertEqual(C1.__args__, (P, T)) + self.assertEqual(C1[[int, str], bool], PandT[[int, str], bool]) + + C2 = PandT[[int, T], T] + self.assertEqual(C2.__parameters__, (T,)) + self.assertEqual(C2.__args__, ((int, T), T)) + self.assertEqual(C2[str], PandT[[int, str], str]) + + C3 = PandT[[int, *Ts], T] + self.assertEqual(C3.__parameters__, (Ts, T)) + self.assertEqual(C3.__args__, ((int, *Ts), T)) + self.assertEqual(C3[str, bool, bytes], PandT[[int, str, bool], bytes]) + def test_paramspec_in_nested_generics(self): # Although ParamSpec should not be found in __parameters__ of most # generics, they probably should be found when nested in @@ -7744,6 +8189,15 @@ class MyClass: ... c = Concatenate[MyClass, P] self.assertNotEqual(c, Concatenate) + def test_dir(self): + P = ParamSpec('P') + dir_items = set(dir(Concatenate[int, P])) + for required_item in [ + '__args__', '__parameters__', '__origin__', + ]: + with self.subTest(required_item=required_item): + self.assertIn(required_item, dir_items) + def test_valid_uses(self): P = ParamSpec('P') T = TypeVar('T') @@ -8021,10 +8475,18 @@ class Foo(Generic[T]): def bar(self): pass baz = 3 + __magic__ = 4 + # The class attributes of the original class should be visible even # in dir() of the GenericAlias. See bpo-45755. - self.assertIn('bar', dir(Foo[int])) - self.assertIn('baz', dir(Foo[int])) + dir_items = set(dir(Foo[int])) + for required_item in [ + 'bar', 'baz', + '__args__', '__parameters__', '__origin__', + ]: + with self.subTest(required_item=required_item): + self.assertIn(required_item, dir_items) + self.assertNotIn('__magic__', dir_items) class RevealTypeTests(BaseTestCase): diff --git a/Lib/test/test_unittest/support.py b/Lib/test/test_unittest/support.py index 529265304f2882..8c97bf5c7297fe 100644 --- a/Lib/test/test_unittest/support.py +++ b/Lib/test/test_unittest/support.py @@ -136,3 +136,19 @@ def addSuccess(self, test): def wasSuccessful(self): return True + + +class BufferedWriter: + def __init__(self): + self.result = '' + self.buffer = '' + + def write(self, arg): + self.buffer += arg + + def flush(self): + self.result += self.buffer + self.buffer = '' + + def getvalue(self): + return self.result diff --git a/Lib/test/test_unittest/test_break.py b/Lib/test/test_unittest/test_break.py index 33cbdd2661c17e..1da98af3e74d49 100644 --- a/Lib/test/test_unittest/test_break.py +++ b/Lib/test/test_unittest/test_break.py @@ -236,6 +236,7 @@ def __init__(self, catchbreak): self.testRunner = FakeRunner self.test = test self.result = None + self.durations = None p = Program(False) p.runTests() @@ -244,7 +245,8 @@ def __init__(self, catchbreak): 'verbosity': verbosity, 'failfast': failfast, 'tb_locals': False, - 'warnings': None})]) + 'warnings': None, + 'durations': None})]) self.assertEqual(FakeRunner.runArgs, [test]) self.assertEqual(p.result, result) @@ -259,7 +261,8 @@ def __init__(self, catchbreak): 'verbosity': verbosity, 'failfast': failfast, 'tb_locals': False, - 'warnings': None})]) + 'warnings': None, + 'durations': None})]) self.assertEqual(FakeRunner.runArgs, [test]) self.assertEqual(p.result, result) diff --git a/Lib/test/test_unittest/test_case.py b/Lib/test/test_unittest/test_case.py index 05d60a8ad3cf94..dd5ff6d553e61d 100644 --- a/Lib/test/test_unittest/test_case.py +++ b/Lib/test/test_unittest/test_case.py @@ -304,7 +304,8 @@ def defaultTestResult(self): def test(self): pass - Foo('test').run() + with self.assertWarns(RuntimeWarning): + Foo('test').run() def test_deprecation_of_return_val_from_test(self): # Issue 41322 - deprecate return of value that is not None from a test diff --git a/Lib/test/test_unittest/test_program.py b/Lib/test/test_unittest/test_program.py index 3645bcf4b43562..f6d52f93e4a25f 100644 --- a/Lib/test/test_unittest/test_program.py +++ b/Lib/test/test_unittest/test_program.py @@ -71,15 +71,22 @@ def testExpectedFailure(self): def testUnexpectedSuccess(self): pass - class FooBarLoader(unittest.TestLoader): - """Test loader that returns a suite containing FooBar.""" + class Empty(unittest.TestCase): + pass + + class TestLoader(unittest.TestLoader): + """Test loader that returns a suite containing the supplied testcase.""" + + def __init__(self, testcase): + self.testcase = testcase + def loadTestsFromModule(self, module): return self.suiteClass( - [self.loadTestsFromTestCase(Test_TestProgram.FooBar)]) + [self.loadTestsFromTestCase(self.testcase)]) def loadTestsFromNames(self, names, module): return self.suiteClass( - [self.loadTestsFromTestCase(Test_TestProgram.FooBar)]) + [self.loadTestsFromTestCase(self.testcase)]) def test_defaultTest_with_string(self): class FakeRunner(object): @@ -92,7 +99,7 @@ def run(self, test): runner = FakeRunner() program = unittest.TestProgram(testRunner=runner, exit=False, defaultTest='test.test_unittest', - testLoader=self.FooBarLoader()) + testLoader=self.TestLoader(self.FooBar)) sys.argv = old_argv self.assertEqual(('test.test_unittest',), program.testNames) @@ -108,7 +115,7 @@ def run(self, test): program = unittest.TestProgram( testRunner=runner, exit=False, defaultTest=['test.test_unittest', 'test.test_unittest2'], - testLoader=self.FooBarLoader()) + testLoader=self.TestLoader(self.FooBar)) sys.argv = old_argv self.assertEqual(['test.test_unittest', 'test.test_unittest2'], program.testNames) @@ -118,7 +125,7 @@ def test_NonExit(self): program = unittest.main(exit=False, argv=["foobar"], testRunner=unittest.TextTestRunner(stream=stream), - testLoader=self.FooBarLoader()) + testLoader=self.TestLoader(self.FooBar)) self.assertTrue(hasattr(program, 'result')) out = stream.getvalue() self.assertIn('\nFAIL: testFail ', out) @@ -130,13 +137,13 @@ def test_NonExit(self): def test_Exit(self): stream = BufferedWriter() - self.assertRaises( - SystemExit, - unittest.main, - argv=["foobar"], - testRunner=unittest.TextTestRunner(stream=stream), - exit=True, - testLoader=self.FooBarLoader()) + with self.assertRaises(SystemExit) as cm: + unittest.main( + argv=["foobar"], + testRunner=unittest.TextTestRunner(stream=stream), + exit=True, + testLoader=self.TestLoader(self.FooBar)) + self.assertEqual(cm.exception.code, 1) out = stream.getvalue() self.assertIn('\nFAIL: testFail ', out) self.assertIn('\nERROR: testError ', out) @@ -147,12 +154,11 @@ def test_Exit(self): def test_ExitAsDefault(self): stream = BufferedWriter() - self.assertRaises( - SystemExit, - unittest.main, - argv=["foobar"], - testRunner=unittest.TextTestRunner(stream=stream), - testLoader=self.FooBarLoader()) + with self.assertRaises(SystemExit): + unittest.main( + argv=["foobar"], + testRunner=unittest.TextTestRunner(stream=stream), + testLoader=self.TestLoader(self.FooBar)) out = stream.getvalue() self.assertIn('\nFAIL: testFail ', out) self.assertIn('\nERROR: testError ', out) @@ -161,6 +167,17 @@ def test_ExitAsDefault(self): 'expected failures=1, unexpected successes=1)\n') self.assertTrue(out.endswith(expected)) + def test_ExitEmptySuite(self): + stream = BufferedWriter() + with self.assertRaises(SystemExit) as cm: + unittest.main( + argv=["empty"], + testRunner=unittest.TextTestRunner(stream=stream), + testLoader=self.TestLoader(self.Empty)) + self.assertEqual(cm.exception.code, 5) + out = stream.getvalue() + self.assertIn('\nNO TESTS RAN\n', out) + class InitialisableProgram(unittest.TestProgram): exit = False @@ -284,6 +301,7 @@ def testRunTestsRunnerClass(self): program.failfast = 'failfast' program.buffer = 'buffer' program.warnings = 'warnings' + program.durations = '5' program.runTests() @@ -291,7 +309,8 @@ def testRunTestsRunnerClass(self): 'failfast': 'failfast', 'buffer': 'buffer', 'tb_locals': False, - 'warnings': 'warnings'}) + 'warnings': 'warnings', + 'durations': '5'}) self.assertEqual(FakeRunner.test, 'test') self.assertIs(program.result, RESULT) @@ -320,7 +339,8 @@ def test_locals(self): 'failfast': False, 'tb_locals': True, 'verbosity': 1, - 'warnings': None}) + 'warnings': None, + 'durations': None}) def testRunTestsOldRunnerClass(self): program = self.program @@ -333,6 +353,7 @@ def testRunTestsOldRunnerClass(self): program.failfast = 'failfast' program.buffer = 'buffer' program.test = 'test' + program.durations = '0' program.runTests() @@ -356,6 +377,7 @@ def fakeInstallHandler(): program = self.program program.catchbreak = True + program.durations = None program.testRunner = FakeRunner diff --git a/Lib/test/test_unittest/test_result.py b/Lib/test/test_unittest/test_result.py index efd9c902350506..db551b7890ca3e 100644 --- a/Lib/test/test_unittest/test_result.py +++ b/Lib/test/test_unittest/test_result.py @@ -6,7 +6,9 @@ import traceback import unittest +from unittest import mock from unittest.util import strclass +from test.test_unittest.support import BufferedWriter class MockTraceback(object): @@ -33,22 +35,6 @@ def bad_cleanup2(): raise ValueError('bad cleanup2') -class BufferedWriter: - def __init__(self): - self.result = '' - self.buffer = '' - - def write(self, arg): - self.buffer += arg - - def flush(self): - self.result += self.buffer - self.buffer = '' - - def getvalue(self): - return self.result - - class Test_TestResult(unittest.TestCase): # Note: there are not separate tests for TestResult.wasSuccessful(), # TestResult.errors, TestResult.failures, TestResult.testsRun or @@ -465,6 +451,7 @@ def testFailFastSetByRunner(self): stream = BufferedWriter() runner = unittest.TextTestRunner(stream=stream, failfast=True) def test(result): + result.testsRun += 1 self.assertTrue(result.failfast) result = runner.run(test) stream.flush() diff --git a/Lib/test/test_unittest/test_runner.py b/Lib/test/test_unittest/test_runner.py index df584b7620d092..f3b2c0cffd4513 100644 --- a/Lib/test/test_unittest/test_runner.py +++ b/Lib/test/test_unittest/test_runner.py @@ -8,8 +8,11 @@ import unittest from unittest.case import _Outcome -from test.test_unittest.support import (LoggingResult, - ResultWithNoStartTestRunStopTestRun) +from test.test_unittest.support import ( + BufferedWriter, + LoggingResult, + ResultWithNoStartTestRunStopTestRun, +) def resultFactory(*_): @@ -574,6 +577,16 @@ def test(self): 'inner setup', 'inner test', 'inner cleanup', 'end outer test', 'outer cleanup']) + def test_run_empty_suite_error_message(self): + class EmptyTest(unittest.TestCase): + pass + + suite = unittest.defaultTestLoader.loadTestsFromTestCase(EmptyTest) + runner = getRunner() + runner.run(suite) + + self.assertIn("\nNO TESTS RAN\n", runner.stream.getvalue()) + class TestModuleCleanUp(unittest.TestCase): def test_add_and_do_ModuleCleanup(self): @@ -1176,6 +1189,7 @@ def test_init(self): self.assertTrue(runner.descriptions) self.assertEqual(runner.resultclass, unittest.TextTestResult) self.assertFalse(runner.tb_locals) + self.assertIsNone(runner.durations) def test_multiple_inheritance(self): class AResult(unittest.TestResult): @@ -1362,6 +1376,65 @@ def testSpecifiedStreamUsed(self): runner = unittest.TextTestRunner(f) self.assertTrue(runner.stream.stream is f) + def test_durations(self): + def run(test, *, expect_durations=True): + stream = BufferedWriter() + runner = unittest.TextTestRunner(stream=stream, durations=5, verbosity=2) + result = runner.run(test) + self.assertEqual(result.durations, 5) + stream.flush() + text = stream.getvalue() + regex = r"\n\d+.\d\d\ds" + if expect_durations: + self.assertEqual(len(result.collectedDurations), 1) + self.assertIn('Slowest test durations', text) + self.assertRegex(text, regex) + else: + self.assertEqual(len(result.collectedDurations), 0) + self.assertNotIn('Slowest test durations', text) + self.assertNotRegex(text, regex) + + # success + class Foo(unittest.TestCase): + def test_1(self): + pass + + run(Foo('test_1'), expect_durations=True) + + # failure + class Foo(unittest.TestCase): + def test_1(self): + self.assertEqual(0, 1) + + run(Foo('test_1'), expect_durations=True) + + # error + class Foo(unittest.TestCase): + def test_1(self): + 1 / 0 + + run(Foo('test_1'), expect_durations=True) + + + # error in setUp and tearDown + class Foo(unittest.TestCase): + def setUp(self): + 1 / 0 + tearDown = setUp + def test_1(self): + pass + + run(Foo('test_1'), expect_durations=True) + + # skip (expect no durations) + class Foo(unittest.TestCase): + @unittest.skip("reason") + def test_1(self): + pass + + run(Foo('test_1'), expect_durations=False) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_unittest/testmock/testhelpers.py b/Lib/test/test_unittest/testmock/testhelpers.py index 9e7ec5d62d5da2..74785a83757a92 100644 --- a/Lib/test/test_unittest/testmock/testhelpers.py +++ b/Lib/test/test_unittest/testmock/testhelpers.py @@ -952,6 +952,24 @@ def __getattr__(self, attribute): self.assertFalse(hasattr(autospec, '__name__')) + def test_autospec_signature_staticmethod(self): + class Foo: + @staticmethod + def static_method(a, b=10, *, c): pass + + mock = create_autospec(Foo.__dict__['static_method']) + self.assertEqual(inspect.signature(Foo.static_method), inspect.signature(mock)) + + + def test_autospec_signature_classmethod(self): + class Foo: + @classmethod + def class_method(cls, a, b=10, *, c): pass + + mock = create_autospec(Foo.__dict__['class_method']) + self.assertEqual(inspect.signature(Foo.class_method), inspect.signature(mock)) + + def test_spec_inspect_signature(self): def myfunc(x, y): pass @@ -1077,7 +1095,7 @@ def test_propertymock(self): p.stop() - def test_propertymock_returnvalue(self): + def test_propertymock_bare(self): m = MagicMock() p = PropertyMock() type(m).foo = p @@ -1088,6 +1106,27 @@ def test_propertymock_returnvalue(self): self.assertNotIsInstance(returned, PropertyMock) + def test_propertymock_returnvalue(self): + m = MagicMock() + p = PropertyMock(return_value=42) + type(m).foo = p + + returned = m.foo + p.assert_called_once_with() + self.assertEqual(returned, 42) + self.assertNotIsInstance(returned, PropertyMock) + + + def test_propertymock_side_effect(self): + m = MagicMock() + p = PropertyMock(side_effect=ValueError) + type(m).foo = p + + with self.assertRaises(ValueError): + m.foo + p.assert_called_once_with() + + class TestCallablePredicate(unittest.TestCase): def test_type(self): diff --git a/Lib/test/test_unittest/testmock/testpatch.py b/Lib/test/test_unittest/testmock/testpatch.py index 8ceb5d973e1aaf..833d7da1f31a20 100644 --- a/Lib/test/test_unittest/testmock/testpatch.py +++ b/Lib/test/test_unittest/testmock/testpatch.py @@ -996,6 +996,36 @@ def test_autospec_classmethod(self): method.assert_called_once_with() + def test_autospec_staticmethod_signature(self): + # Patched methods which are decorated with @staticmethod should have the same signature + class Foo: + @staticmethod + def static_method(a, b=10, *, c): pass + + Foo.static_method(1, 2, c=3) + + with patch.object(Foo, 'static_method', autospec=True) as method: + method(1, 2, c=3) + self.assertRaises(TypeError, method) + self.assertRaises(TypeError, method, 1) + self.assertRaises(TypeError, method, 1, 2, 3, c=4) + + + def test_autospec_classmethod_signature(self): + # Patched methods which are decorated with @classmethod should have the same signature + class Foo: + @classmethod + def class_method(cls, a, b=10, *, c): pass + + Foo.class_method(1, 2, c=3) + + with patch.object(Foo, 'class_method', autospec=True) as method: + method(1, 2, c=3) + self.assertRaises(TypeError, method) + self.assertRaises(TypeError, method, 1) + self.assertRaises(TypeError, method, 1, 2, 3, c=4) + + def test_autospec_with_new(self): patcher = patch('%s.function' % __name__, new=3, autospec=True) self.assertRaises(TypeError, patcher.start) diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py index 633d596ac3de3f..99c9e24994732f 100644 --- a/Lib/test/test_urllib2.py +++ b/Lib/test/test_urllib2.py @@ -3,6 +3,7 @@ from test.support import os_helper from test.support import warnings_helper from test import test_urllib +from unittest import mock import os import io @@ -484,7 +485,18 @@ def build_test_opener(*handler_instances): return opener -class MockHTTPHandler(urllib.request.BaseHandler): +class MockHTTPHandler(urllib.request.HTTPHandler): + # Very simple mock HTTP handler with no special behavior other than using a mock HTTP connection + + def __init__(self, debuglevel=None): + super(MockHTTPHandler, self).__init__(debuglevel=debuglevel) + self.httpconn = MockHTTPClass() + + def http_open(self, req): + return self.do_open(self.httpconn, req) + + +class MockHTTPHandlerRedirect(urllib.request.BaseHandler): # useful for testing redirections and auth # sends supplied headers and code as first response # sends 200 OK as second response @@ -512,16 +524,17 @@ def http_open(self, req): return MockResponse(200, "OK", msg, "", req.get_full_url()) -class MockHTTPSHandler(urllib.request.AbstractHTTPHandler): - # Useful for testing the Proxy-Authorization request by verifying the - # properties of httpcon +if hasattr(http.client, 'HTTPSConnection'): + class MockHTTPSHandler(urllib.request.HTTPSHandler): + # Useful for testing the Proxy-Authorization request by verifying the + # properties of httpcon - def __init__(self, debuglevel=0): - urllib.request.AbstractHTTPHandler.__init__(self, debuglevel=debuglevel) - self.httpconn = MockHTTPClass() + def __init__(self, debuglevel=None, context=None, check_hostname=None): + super(MockHTTPSHandler, self).__init__(debuglevel, context, check_hostname) + self.httpconn = MockHTTPClass() - def https_open(self, req): - return self.do_open(self.httpconn, req) + def https_open(self, req): + return self.do_open(self.httpconn, req) class MockHTTPHandlerCheckAuth(urllib.request.BaseHandler): @@ -1048,12 +1061,37 @@ def test_http_body_array(self): newreq = h.do_request_(req) self.assertEqual(int(newreq.get_header('Content-length')),16) - def test_http_handler_debuglevel(self): + def test_http_handler_global_debuglevel(self): + with mock.patch.object(http.client.HTTPConnection, 'debuglevel', 6): + o = OpenerDirector() + h = MockHTTPHandler() + o.add_handler(h) + o.open("http://www.example.com") + self.assertEqual(h._debuglevel, 6) + + def test_http_handler_local_debuglevel(self): + o = OpenerDirector() + h = MockHTTPHandler(debuglevel=5) + o.add_handler(h) + o.open("http://www.example.com") + self.assertEqual(h._debuglevel, 5) + + @unittest.skipUnless(hasattr(http.client, 'HTTPSConnection'), 'HTTPSConnection required for HTTPS tests.') + def test_https_handler_global_debuglevel(self): + with mock.patch.object(http.client.HTTPSConnection, 'debuglevel', 7): + o = OpenerDirector() + h = MockHTTPSHandler() + o.add_handler(h) + o.open("https://www.example.com") + self.assertEqual(h._debuglevel, 7) + + @unittest.skipUnless(hasattr(http.client, 'HTTPSConnection'), 'HTTPSConnection required for HTTPS tests.') + def test_https_handler_local_debuglevel(self): o = OpenerDirector() - h = MockHTTPSHandler(debuglevel=1) + h = MockHTTPSHandler(debuglevel=4) o.add_handler(h) o.open("https://www.example.com") - self.assertEqual(h._debuglevel, 1) + self.assertEqual(h._debuglevel, 4) def test_http_doubleslash(self): # Checks the presence of any unnecessary double slash in url does not @@ -1289,7 +1327,7 @@ def test_cookie_redirect(self): cj = CookieJar() interact_netscape(cj, "http://www.example.com/", "spam=eggs") - hh = MockHTTPHandler(302, "Location: http://www.cracker.com/\r\n\r\n") + hh = MockHTTPHandlerRedirect(302, "Location: http://www.cracker.com/\r\n\r\n") hdeh = urllib.request.HTTPDefaultErrorHandler() hrh = urllib.request.HTTPRedirectHandler() cp = urllib.request.HTTPCookieProcessor(cj) @@ -1299,7 +1337,7 @@ def test_cookie_redirect(self): def test_redirect_fragment(self): redirected_url = 'http://www.example.com/index.html#OK\r\n\r\n' - hh = MockHTTPHandler(302, 'Location: ' + redirected_url) + hh = MockHTTPHandlerRedirect(302, 'Location: ' + redirected_url) hdeh = urllib.request.HTTPDefaultErrorHandler() hrh = urllib.request.HTTPRedirectHandler() o = build_test_opener(hh, hdeh, hrh) @@ -1421,6 +1459,7 @@ def test_proxy_https(self): self.assertEqual([(handlers[0], "https_open")], [tup[0:2] for tup in o.calls]) + @unittest.skipUnless(hasattr(http.client, 'HTTPSConnection'), 'HTTPSConnection required for HTTPS tests.') def test_proxy_https_proxy_authorization(self): o = OpenerDirector() ph = urllib.request.ProxyHandler(dict(https='proxy.example.com:3128')) @@ -1484,7 +1523,7 @@ def check_basic_auth(self, headers, realm): password_manager = MockPasswordManager() auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager) body = '\r\n'.join(headers) + '\r\n\r\n' - http_handler = MockHTTPHandler(401, body) + http_handler = MockHTTPHandlerRedirect(401, body) opener.add_handler(auth_handler) opener.add_handler(http_handler) self._test_basic_auth(opener, auth_handler, "Authorization", @@ -1544,7 +1583,7 @@ def test_proxy_basic_auth(self): password_manager = MockPasswordManager() auth_handler = urllib.request.ProxyBasicAuthHandler(password_manager) realm = "ACME Networks" - http_handler = MockHTTPHandler( + http_handler = MockHTTPHandlerRedirect( 407, 'Proxy-Authenticate: Basic realm="%s"\r\n\r\n' % realm) opener.add_handler(auth_handler) opener.add_handler(http_handler) @@ -1588,7 +1627,7 @@ def http_error_401(self, *args, **kwds): digest_handler = TestDigestAuthHandler(password_manager) basic_handler = TestBasicAuthHandler(password_manager) realm = "ACME Networks" - http_handler = MockHTTPHandler( + http_handler = MockHTTPHandlerRedirect( 401, 'WWW-Authenticate: Basic realm="%s"\r\n\r\n' % realm) opener.add_handler(basic_handler) opener.add_handler(digest_handler) @@ -1608,7 +1647,7 @@ def test_unsupported_auth_digest_handler(self): opener = OpenerDirector() # While using DigestAuthHandler digest_auth_handler = urllib.request.HTTPDigestAuthHandler(None) - http_handler = MockHTTPHandler( + http_handler = MockHTTPHandlerRedirect( 401, 'WWW-Authenticate: Kerberos\r\n\r\n') opener.add_handler(digest_auth_handler) opener.add_handler(http_handler) @@ -1618,7 +1657,7 @@ def test_unsupported_auth_basic_handler(self): # While using BasicAuthHandler opener = OpenerDirector() basic_auth_handler = urllib.request.HTTPBasicAuthHandler(None) - http_handler = MockHTTPHandler( + http_handler = MockHTTPHandlerRedirect( 401, 'WWW-Authenticate: NTLM\r\n\r\n') opener.add_handler(basic_auth_handler) opener.add_handler(http_handler) @@ -1705,7 +1744,7 @@ def test_basic_prior_auth_send_after_first_success(self): opener = OpenerDirector() opener.add_handler(auth_prior_handler) - http_handler = MockHTTPHandler( + http_handler = MockHTTPHandlerRedirect( 401, 'WWW-Authenticate: Basic realm="%s"\r\n\r\n' % None) opener.add_handler(http_handler) diff --git a/Lib/test/test_urllib2net.py b/Lib/test/test_urllib2net.py index 5da41c37bbfb8e..d8d882b2d33589 100644 --- a/Lib/test/test_urllib2net.py +++ b/Lib/test/test_urllib2net.py @@ -134,7 +134,9 @@ def setUp(self): # They do sometimes catch some major disasters, though. def test_ftp(self): + # Testing the same URL twice exercises the caching in CacheFTPHandler urls = [ + 'ftp://www.pythontest.net/README', 'ftp://www.pythontest.net/README', ('ftp://www.pythontest.net/non-existent-file', None, urllib.error.URLError), diff --git a/Lib/test/test_uuid.py b/Lib/test/test_uuid.py index b2c229cd634e31..a178e942ecda0f 100755 --- a/Lib/test/test_uuid.py +++ b/Lib/test/test_uuid.py @@ -600,7 +600,22 @@ def test_uuid1_time(self): def test_uuid3(self): equal = self.assertEqual - # Test some known version-3 UUIDs. + # Test some known version-3 UUIDs with name passed as a byte object + for u, v in [(self.uuid.uuid3(self.uuid.NAMESPACE_DNS, b'python.org'), + '6fa459ea-ee8a-3ca4-894e-db77e160355e'), + (self.uuid.uuid3(self.uuid.NAMESPACE_URL, b'http://python.org/'), + '9fe8e8c4-aaa8-32a9-a55c-4535a88b748d'), + (self.uuid.uuid3(self.uuid.NAMESPACE_OID, b'1.3.6.1'), + 'dd1a1cef-13d5-368a-ad82-eca71acd4cd1'), + (self.uuid.uuid3(self.uuid.NAMESPACE_X500, b'c=ca'), + '658d3002-db6b-3040-a1d1-8ddd7d189a4d'), + ]: + equal(u.variant, self.uuid.RFC_4122) + equal(u.version, 3) + equal(u, self.uuid.UUID(v)) + equal(str(u), v) + + # Test some known version-3 UUIDs with name passed as a string for u, v in [(self.uuid.uuid3(self.uuid.NAMESPACE_DNS, 'python.org'), '6fa459ea-ee8a-3ca4-894e-db77e160355e'), (self.uuid.uuid3(self.uuid.NAMESPACE_URL, 'http://python.org/'), @@ -632,7 +647,22 @@ def test_uuid4(self): def test_uuid5(self): equal = self.assertEqual - # Test some known version-5 UUIDs. + # Test some known version-5 UUIDs with names given as byte objects + for u, v in [(self.uuid.uuid5(self.uuid.NAMESPACE_DNS, b'python.org'), + '886313e1-3b8a-5372-9b90-0c9aee199e5d'), + (self.uuid.uuid5(self.uuid.NAMESPACE_URL, b'http://python.org/'), + '4c565f0d-3f5a-5890-b41b-20cf47701c5e'), + (self.uuid.uuid5(self.uuid.NAMESPACE_OID, b'1.3.6.1'), + '1447fa61-5277-5fef-a9b3-fbc6e44f4af3'), + (self.uuid.uuid5(self.uuid.NAMESPACE_X500, b'c=ca'), + 'cc957dd1-a972-5349-98cd-874190002798'), + ]: + equal(u.variant, self.uuid.RFC_4122) + equal(u.version, 5) + equal(u, self.uuid.UUID(v)) + equal(str(u), v) + + # Test some known version-5 UUIDs with names given as strings for u, v in [(self.uuid.uuid5(self.uuid.NAMESPACE_DNS, 'python.org'), '886313e1-3b8a-5372-9b90-0c9aee199e5d'), (self.uuid.uuid5(self.uuid.NAMESPACE_URL, 'http://python.org/'), diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index 4e18dfc23c40c2..95944c7c711620 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -227,7 +227,6 @@ def pip_cmd_checker(cmd, **kwargs): 'install', '--upgrade', 'pip', - 'setuptools' ] ) @@ -601,9 +600,15 @@ def test_zippath_from_non_installed_posix(self): ld_library_path_env = "DYLD_LIBRARY_PATH" else: ld_library_path_env = "LD_LIBRARY_PATH" - subprocess.check_call(cmd, - env={"PYTHONPATH": pythonpath, - ld_library_path_env: ld_library_path}) + # Note that in address sanitizer mode, the current runtime + # implementation leaks memory due to not being able to correctly + # clean all unicode objects during runtime shutdown. Therefore, + # this uses subprocess.run instead of subprocess.check_call to + # maintain the core of the test while not failing due to the refleaks. + # This should be able to use check_call once all refleaks are fixed. + subprocess.run(cmd, + env={"PYTHONPATH": pythonpath, + ld_library_path_env: ld_library_path}) envpy = os.path.join(self.env_dir, self.bindir, self.exe) # Now check the venv created from the non-installed python has # correct zip path in pythonpath. @@ -611,6 +616,22 @@ def test_zippath_from_non_installed_posix(self): out, err = check_output(cmd) self.assertTrue(zip_landmark.encode() in out) + def test_activate_shell_script_has_no_dos_newlines(self): + """ + Test that the `activate` shell script contains no CR LF. + This is relevant for Cygwin, as the Windows build might have + converted line endings accidentally. + """ + venv_dir = pathlib.Path(self.env_dir) + rmtree(venv_dir) + [[scripts_dir], *_] = self.ENV_SUBDIRS + script_path = venv_dir / scripts_dir / "activate" + venv.create(venv_dir) + with open(script_path, 'rb') as script: + for i, line in enumerate(script, 1): + error_message = f"CR LF found in line {i}" + self.assertFalse(line.endswith(b'\r\n'), error_message) + @requireVenvCreate class EnsurePipTest(BaseTest): """Test venv module installation of pip.""" @@ -729,7 +750,6 @@ def do_test_with_pip(self, system_site_packages): # future pip versions, this test can likely be relaxed further. out = out.decode("latin-1") # Force to text, prevent decoding errors self.assertIn("Successfully uninstalled pip", out) - self.assertIn("Successfully uninstalled setuptools", out) # Check pip is now gone from the virtual environment. This only # applies in the system_site_packages=False case, because in the # other case, pip may still be available in the system site-packages diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py index 7c5920797d2538..1bc1d05f7daba9 100644 --- a/Lib/test/test_weakref.py +++ b/Lib/test/test_weakref.py @@ -116,6 +116,17 @@ def test_basic_ref(self): del o repr(wr) + def test_repr_failure_gh99184(self): + class MyConfig(dict): + def __getattr__(self, x): + return self[x] + + obj = MyConfig(offset=5) + obj_weakref = weakref.ref(obj) + + self.assertIn('MyConfig', repr(obj_weakref)) + self.assertIn('MyConfig', str(obj_weakref)) + def test_basic_callback(self): self.check_basic_callback(C) self.check_basic_callback(create_function) diff --git a/Lib/test/test_webbrowser.py b/Lib/test/test_webbrowser.py index 9d608d63a01ed3..2d695bc883131f 100644 --- a/Lib/test/test_webbrowser.py +++ b/Lib/test/test_webbrowser.py @@ -11,7 +11,7 @@ if not support.has_subprocess_support: raise unittest.SkipTest("test webserver requires subprocess") -URL = 'http://www.example.com' +URL = 'https://www.example.com' CMD_NAME = 'test' @@ -95,9 +95,9 @@ def test_open_new_tab(self): arguments=[URL]) -class MozillaCommandTest(CommandTestMixin, unittest.TestCase): +class EdgeCommandTest(CommandTestMixin, unittest.TestCase): - browser_class = webbrowser.Mozilla + browser_class = webbrowser.Edge def test_open(self): self._test('open', @@ -111,43 +111,43 @@ def test_open_with_autoraise_false(self): def test_open_new(self): self._test('open_new', - options=[], - arguments=['-new-window', URL]) + options=['--new-window'], + arguments=[URL]) def test_open_new_tab(self): self._test('open_new_tab', options=[], - arguments=['-new-tab', URL]) + arguments=[URL]) -class NetscapeCommandTest(CommandTestMixin, unittest.TestCase): +class MozillaCommandTest(CommandTestMixin, unittest.TestCase): - browser_class = webbrowser.Netscape + browser_class = webbrowser.Mozilla def test_open(self): self._test('open', - options=['-raise', '-remote'], - arguments=['openURL({})'.format(URL)]) + options=[], + arguments=[URL]) def test_open_with_autoraise_false(self): self._test('open', kw=dict(autoraise=False), - options=['-noraise', '-remote'], - arguments=['openURL({})'.format(URL)]) + options=[], + arguments=[URL]) def test_open_new(self): self._test('open_new', - options=['-raise', '-remote'], - arguments=['openURL({},new-window)'.format(URL)]) + options=[], + arguments=['-new-window', URL]) def test_open_new_tab(self): self._test('open_new_tab', - options=['-raise', '-remote'], - arguments=['openURL({},new-tab)'.format(URL)]) + options=[], + arguments=['-new-tab', URL]) -class GaleonCommandTest(CommandTestMixin, unittest.TestCase): +class EpiphanyCommandTest(CommandTestMixin, unittest.TestCase): - browser_class = webbrowser.Galeon + browser_class = webbrowser.Epiphany def test_open(self): self._test('open', diff --git a/Lib/test/test_winreg.py b/Lib/test/test_winreg.py index 769ab67b0f5611..924a962781a75b 100644 --- a/Lib/test/test_winreg.py +++ b/Lib/test/test_winreg.py @@ -1,11 +1,12 @@ # Test the windows specific win32reg module. # Only win32reg functions not hit here: FlushKey, LoadKey and SaveKey +import gc import os, sys, errno -import unittest -from test.support import import_helper import threading +import unittest from platform import machine, win32_edition +from test.support import cpython_only, import_helper # Do this first so test will be skipped if module doesn't exist import_helper.import_module('winreg', required_on=['win']) @@ -49,6 +50,17 @@ ("Japanese 日本", "日本語", REG_SZ), ] + +@cpython_only +class HeapTypeTests(unittest.TestCase): + def test_have_gc(self): + self.assertTrue(gc.is_tracked(HKEYType)) + + def test_immutable(self): + with self.assertRaisesRegex(TypeError, "immutable"): + HKEYType.foo = "bar" + + class BaseWinregTests(unittest.TestCase): def setUp(self): diff --git a/Lib/test/test_with.py b/Lib/test/test_with.py index 07522bda6a5583..d81902327a7e0a 100644 --- a/Lib/test/test_with.py +++ b/Lib/test/test_with.py @@ -79,11 +79,11 @@ def __exit__(self, *exc_info): try: if mgr.__exit__(*ex): ex = (None, None, None) - except: - ex = sys.exc_info() + except BaseException as e: + ex = (type(e), e, e.__traceback__) self.entered = None if ex is not exc_info: - raise ex[0](ex[1]).with_traceback(ex[2]) + raise ex class MockNested(Nested): diff --git a/Lib/test/test_zipfile/test_core.py b/Lib/test/test_zipfile/test_core.py index e23f5c2a8556f2..73c6b0185a1a0e 100644 --- a/Lib/test/test_zipfile/test_core.py +++ b/Lib/test/test_zipfile/test_core.py @@ -1616,6 +1616,33 @@ def test_write_unicode_filenames(self): self.assertEqual(zf.filelist[0].filename, "foo.txt") self.assertEqual(zf.filelist[1].filename, "\xf6.txt") + @requires_zlib() + def test_read_zipfile_containing_unicode_path_extra_field(self): + with zipfile.ZipFile(TESTFN, mode='w') as zf: + # create a file with a non-ASCII name + filename = '이름.txt' + filename_encoded = filename.encode('utf-8') + + # create a ZipInfo object with Unicode path extra field + zip_info = zipfile.ZipInfo(filename) + + tag_for_unicode_path = b'\x75\x70' + version_of_unicode_path = b'\x01' + + import zlib + filename_crc = struct.pack('<L', zlib.crc32(filename_encoded)) + + extra_data = version_of_unicode_path + filename_crc + filename_encoded + tsize = len(extra_data).to_bytes(2, 'little') + + zip_info.extra = tag_for_unicode_path + tsize + extra_data + + # add the file to the ZIP archive + zf.writestr(zip_info, b'Hello World!') + + with zipfile.ZipFile(TESTFN, "r") as zf: + self.assertEqual(zf.filelist[0].filename, "이름.txt") + def test_read_after_write_unicode_filenames(self): with zipfile.ZipFile(TESTFN2, 'w') as zipfp: zipfp.writestr('приклад', b'sample') diff --git a/Lib/test/wheel-0.40.0-py3-none-any.whl b/Lib/test/wheel-0.40.0-py3-none-any.whl new file mode 100644 index 00000000000000..410132385bba4d Binary files /dev/null and b/Lib/test/wheel-0.40.0-py3-none-any.whl differ diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py index 7565e0f7e46073..bf0b3b92155938 100644 --- a/Lib/tkinter/__init__.py +++ b/Lib/tkinter/__init__.py @@ -2400,6 +2400,7 @@ def report_callback_exception(self, exc, val, tb): should when sys.stderr is None.""" import traceback print("Exception in Tkinter callback", file=sys.stderr) + sys.last_exc = val sys.last_type = exc sys.last_value = val sys.last_traceback = tb @@ -3429,8 +3430,7 @@ def entryconfigure(self, index, cnf=None, **kw): def index(self, index): """Return the index of a menu item identified by INDEX.""" i = self.tk.call(self._w, 'index', index) - if i == 'none': return None - return self.tk.getint(i) + return None if i in ('', 'none') else self.tk.getint(i) # GH-103685. def invoke(self, index): """Invoke a menu item identified by INDEX and execute diff --git a/Lib/tkinter/filedialog.py b/Lib/tkinter/filedialog.py index 600d0bd49fe2cf..e2eff98e601c07 100644 --- a/Lib/tkinter/filedialog.py +++ b/Lib/tkinter/filedialog.py @@ -461,7 +461,6 @@ def test(): # Start off with UTF-8 enc = "utf-8" - import sys # See whether CODESET is defined try: @@ -477,9 +476,9 @@ def test(): try: fp=open(openfilename,"r") fp.close() - except: + except BaseException as exc: print("Could not open File: ") - print(sys.exc_info()[1]) + print(exc) print("open", openfilename.encode(enc)) diff --git a/Lib/token.py b/Lib/token.py index 95b107c6643b3f..1459d12b376f82 100644 --- a/Lib/token.py +++ b/Lib/token.py @@ -57,18 +57,22 @@ RARROW = 51 ELLIPSIS = 52 COLONEQUAL = 53 -OP = 54 -AWAIT = 55 -ASYNC = 56 -TYPE_IGNORE = 57 -TYPE_COMMENT = 58 -SOFT_KEYWORD = 59 +EXCLAMATION = 54 +OP = 55 +AWAIT = 56 +ASYNC = 57 +TYPE_IGNORE = 58 +TYPE_COMMENT = 59 +SOFT_KEYWORD = 60 +FSTRING_START = 61 +FSTRING_MIDDLE = 62 +FSTRING_END = 63 # These aren't used by the C tokenizer but are needed for tokenize.py -ERRORTOKEN = 60 -COMMENT = 61 -NL = 62 -ENCODING = 63 -N_TOKENS = 64 +ERRORTOKEN = 64 +COMMENT = 65 +NL = 66 +ENCODING = 67 +N_TOKENS = 68 # Special definitions for cooperation with parser NT_OFFSET = 256 @@ -78,6 +82,7 @@ __all__.extend(tok_name.values()) EXACT_TOKEN_TYPES = { + '!': EXCLAMATION, '!=': NOTEQUAL, '%': PERCENT, '%=': PERCENTEQUAL, diff --git a/Lib/trace.py b/Lib/trace.py index 213e46517d683d..fb9a423ea09fce 100755 --- a/Lib/trace.py +++ b/Lib/trace.py @@ -49,6 +49,7 @@ """ __all__ = ['Trace', 'CoverageResults'] +import io import linecache import os import sys @@ -716,7 +717,7 @@ def parse_ignore_dir(s): sys.argv = [opts.progname, *opts.arguments] sys.path[0] = os.path.dirname(opts.progname) - with open(opts.progname, 'rb') as fp: + with io.open_code(opts.progname) as fp: code = compile(fp.read(), opts.progname, 'exec') # try to emulate __main__ namespace as much as possible globs = { diff --git a/Lib/traceback.py b/Lib/traceback.py index c43c4720ae5a15..419f6e81b5e1be 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -179,20 +179,24 @@ def _safe_string(value, what, func=str): # -- def print_exc(limit=None, file=None, chain=True): - """Shorthand for 'print_exception(*sys.exc_info(), limit, file)'.""" - print_exception(*sys.exc_info(), limit=limit, file=file, chain=chain) + """Shorthand for 'print_exception(sys.exception(), limit, file, chain)'.""" + print_exception(sys.exception(), limit=limit, file=file, chain=chain) def format_exc(limit=None, chain=True): """Like print_exc() but return a string.""" - return "".join(format_exception(*sys.exc_info(), limit=limit, chain=chain)) + return "".join(format_exception(sys.exception(), limit=limit, chain=chain)) def print_last(limit=None, file=None, chain=True): - """This is a shorthand for 'print_exception(sys.last_type, - sys.last_value, sys.last_traceback, limit, file)'.""" - if not hasattr(sys, "last_type"): + """This is a shorthand for 'print_exception(sys.last_exc, limit, file, chain)'.""" + if not hasattr(sys, "last_exc") and not hasattr(sys, "last_type"): raise ValueError("no last exception") - print_exception(sys.last_type, sys.last_value, sys.last_traceback, - limit, file, chain) + + if hasattr(sys, "last_exc"): + print_exception(sys.last_exc, limit, file, chain) + else: + print_exception(sys.last_type, sys.last_value, sys.last_traceback, + limit, file, chain) + # # Printing and Extracting Stacks. @@ -848,12 +852,16 @@ def format_exception_only(self): yield _format_final_exc_line(stype, self._str) else: yield from self._format_syntax_error(stype) - if isinstance(self.__notes__, collections.abc.Sequence): + + if ( + isinstance(self.__notes__, collections.abc.Sequence) + and not isinstance(self.__notes__, (str, bytes)) + ): for note in self.__notes__: note = _safe_string(note, 'note') yield from [l + '\n' for l in note.split('\n')] elif self.__notes__ is not None: - yield _safe_string(self.__notes__, '__notes__', func=repr) + yield "{}\n".format(_safe_string(self.__notes__, '__notes__', func=repr)) def _format_syntax_error(self, stype): """Format SyntaxError exceptions (internal helper).""" diff --git a/Lib/turtle.py b/Lib/turtle.py index 1b369327bc8eff..2de406e0f517af 100644 --- a/Lib/turtle.py +++ b/Lib/turtle.py @@ -135,7 +135,7 @@ 'pu', 'radians', 'right', 'reset', 'resizemode', 'rt', 'seth', 'setheading', 'setpos', 'setposition', 'settiltangle', 'setundobuffer', 'setx', 'sety', 'shape', 'shapesize', 'shapetransform', 'shearfactor', 'showturtle', - 'speed', 'st', 'stamp', 'tilt', 'tiltangle', 'towards', + 'speed', 'st', 'stamp', 'teleport', 'tilt', 'tiltangle', 'towards', 'turtlesize', 'undo', 'undobufferentries', 'up', 'width', 'write', 'xcor', 'ycor'] _tg_utilities = ['write_docstringdict', 'done'] @@ -1614,6 +1614,13 @@ def _goto(self, end): """move turtle to position end.""" self._position = end + def teleport(self, x=None, y=None, *, fill_gap: bool = False) -> None: + """To be overwritten by child class RawTurtle. + Includes no TPen references.""" + new_x = x if x is not None else self._position[0] + new_y = y if y is not None else self._position[1] + self._position = Vec2D(new_x, new_y) + def forward(self, distance): """Move the turtle forward by the specified distance. @@ -2293,6 +2300,15 @@ def fillcolor(self, *args): else: return self._color(self._fillcolor) + def teleport(self, x=None, y=None, *, fill_gap: bool = False) -> None: + """To be overwritten by child class RawTurtle. + Includes no TNavigator references. + """ + pendown = self.isdown() + if pendown: + self.pen(pendown=False) + self.pen(pendown=pendown) + def showturtle(self): """Makes the turtle visible. @@ -2710,6 +2726,54 @@ def _cc(self, args): if not ((0 <= r <= 255) and (0 <= g <= 255) and (0 <= b <= 255)): raise TurtleGraphicsError("bad color sequence: %s" % str(args)) return "#%02x%02x%02x" % (r, g, b) + + def teleport(self, x=None, y=None, *, fill_gap: bool = False) -> None: + """Instantly move turtle to an absolute position. + + Arguments: + x -- a number or None + y -- a number None + fill_gap -- a boolean This argument must be specified by name. + + call: teleport(x, y) # two coordinates + --or: teleport(x) # teleport to x position, keeping y as is + --or: teleport(y=y) # teleport to y position, keeping x as is + --or: teleport(x, y, fill_gap=True) + # teleport but fill the gap in between + + Move turtle to an absolute position. Unlike goto(x, y), a line will not + be drawn. The turtle's orientation does not change. If currently + filling, the polygon(s) teleported from will be filled after leaving, + and filling will begin again after teleporting. This can be disabled + with fill_gap=True, which makes the imaginary line traveled during + teleporting act as a fill barrier like in goto(x, y). + + Example (for a Turtle instance named turtle): + >>> tp = turtle.pos() + >>> tp + (0.00,0.00) + >>> turtle.teleport(60) + >>> turtle.pos() + (60.00,0.00) + >>> turtle.teleport(y=10) + >>> turtle.pos() + (60.00,10.00) + >>> turtle.teleport(20, 30) + >>> turtle.pos() + (20.00,30.00) + """ + pendown = self.isdown() + was_filling = self.filling() + if pendown: + self.pen(pendown=False) + if was_filling and not fill_gap: + self.end_fill() + new_x = x if x is not None else self._position[0] + new_y = y if y is not None else self._position[1] + self._position = Vec2D(new_x, new_y) + self.pen(pendown=pendown) + if was_filling and not fill_gap: + self.begin_fill() def clone(self): """Create and return a clone of the turtle. diff --git a/Lib/types.py b/Lib/types.py index aa8a1c84722399..6110e6e1de7249 100644 --- a/Lib/types.py +++ b/Lib/types.py @@ -143,6 +143,38 @@ def _calculate_meta(meta, bases): "of the metaclasses of all its bases") return winner + +def get_original_bases(cls, /): + """Return the class's "original" bases prior to modification by `__mro_entries__`. + + Examples:: + + from typing import TypeVar, Generic, NamedTuple, TypedDict + + T = TypeVar("T") + class Foo(Generic[T]): ... + class Bar(Foo[int], float): ... + class Baz(list[str]): ... + Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) + Spam = TypedDict("Spam", {"a": int, "b": str}) + + assert get_original_bases(Bar) == (Foo[int], float) + assert get_original_bases(Baz) == (list[str],) + assert get_original_bases(Eggs) == (NamedTuple,) + assert get_original_bases(Spam) == (TypedDict,) + assert get_original_bases(int) == (object,) + """ + try: + return cls.__orig_bases__ + except AttributeError: + try: + return cls.__bases__ + except AttributeError: + raise TypeError( + f'Expected an instance of type, not {type(cls).__name__!r}' + ) from None + + class DynamicClassAttribute: """Route attribute access on a class to __getattr__. diff --git a/Lib/typing.py b/Lib/typing.py index 3ee9679e50c0c4..1a1c989dbaf37d 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -255,10 +255,17 @@ def _collect_parameters(args): """ parameters = [] for t in args: - # We don't want __parameters__ descriptor of a bare Python class. if isinstance(t, type): - continue - if hasattr(t, '__typing_subst__'): + # We don't want __parameters__ descriptor of a bare Python class. + pass + elif isinstance(t, tuple): + # `t` might be a tuple, when `ParamSpec` is substituted with + # `[T, int]`, or `[int, *Ts]`, etc. + for x in t: + for collected in _collect_parameters([x]): + if collected not in parameters: + parameters.append(collected) + elif hasattr(t, '__typing_subst__'): if t not in parameters: parameters.append(t) else: @@ -1441,10 +1448,12 @@ def _determine_new_args(self, args): raise TypeError(f"Too {'many' if alen > plen else 'few'} arguments for {self};" f" actual {alen}, expected {plen}") new_arg_by_param = dict(zip(params, args)) + return tuple(self._make_substitution(self.__args__, new_arg_by_param)) + def _make_substitution(self, args, new_arg_by_param): + """Create a list of new type arguments.""" new_args = [] - for old_arg in self.__args__: - + for old_arg in args: if isinstance(old_arg, type): new_args.append(old_arg) continue @@ -1488,10 +1497,20 @@ def _determine_new_args(self, args): # should join all these types together in a flat list # `(float, int, str)` - so again, we should `extend`. new_args.extend(new_arg) + elif isinstance(old_arg, tuple): + # Corner case: + # P = ParamSpec('P') + # T = TypeVar('T') + # class Base(Generic[P]): ... + # Can be substituted like this: + # X = Base[[int, T]] + # In this case, `old_arg` will be a tuple: + new_args.append( + tuple(self._make_substitution(old_arg, new_arg_by_param)), + ) else: new_args.append(new_arg) - - return tuple(new_args) + return new_args def copy_with(self, args): return self.__class__(self.__origin__, args, name=self._name, inst=self._inst, @@ -1734,6 +1753,17 @@ class Bar(Generic[Unpack[Ts]]): ... Foo[*tuple[int, str]] class Bar(Generic[*Ts]): ... + The operator can also be used along with a `TypedDict` to annotate + `**kwargs` in a function signature. For instance: + + class Movie(TypedDict): + name: str + year: int + + # This function expects two keyword arguments - *name* of type `str` and + # *year* of type `int`. + def foo(**kwargs: Unpack[Movie]): ... + Note that there is only some runtime checking of this operator. Not everything the runtime allows may be accepted by static type checkers. @@ -1748,7 +1778,7 @@ class _UnpackGenericAlias(_GenericAlias, _root=True): def __repr__(self): # `Unpack` only takes one argument, so __args__ should contain only # a single item. - return '*' + repr(self.__args__[0]) + return f'typing.Unpack[{_type_repr(self.__args__[0])}]' def __getitem__(self, args): if self.__typing_is_unpacked_typevartuple__: @@ -1884,15 +1914,20 @@ class _TypingEllipsis: """Internal placeholder for ... (ellipsis).""" -_TYPING_INTERNALS = ['__parameters__', '__orig_bases__', '__orig_class__', - '_is_protocol', '_is_runtime_protocol'] +_TYPING_INTERNALS = frozenset({ + '__parameters__', '__orig_bases__', '__orig_class__', + '_is_protocol', '_is_runtime_protocol', '__protocol_attrs__', + '__callable_proto_members_only__', +}) -_SPECIAL_NAMES = ['__abstractmethods__', '__annotations__', '__dict__', '__doc__', - '__init__', '__module__', '__new__', '__slots__', - '__subclasshook__', '__weakref__', '__class_getitem__'] +_SPECIAL_NAMES = frozenset({ + '__abstractmethods__', '__annotations__', '__dict__', '__doc__', + '__init__', '__module__', '__new__', '__slots__', + '__subclasshook__', '__weakref__', '__class_getitem__' +}) # These special attributes will be not collected as protocol members. -EXCLUDED_ATTRIBUTES = _TYPING_INTERNALS + _SPECIAL_NAMES + ['_MutableMapping__marker'] +EXCLUDED_ATTRIBUTES = _TYPING_INTERNALS | _SPECIAL_NAMES | {'_MutableMapping__marker'} def _get_protocol_attrs(cls): @@ -1903,20 +1938,15 @@ def _get_protocol_attrs(cls): """ attrs = set() for base in cls.__mro__[:-1]: # without object - if base.__name__ in ('Protocol', 'Generic'): + if base.__name__ in {'Protocol', 'Generic'}: continue annotations = getattr(base, '__annotations__', {}) - for attr in list(base.__dict__.keys()) + list(annotations.keys()): + for attr in (*base.__dict__, *annotations): if not attr.startswith('_abc_') and attr not in EXCLUDED_ATTRIBUTES: attrs.add(attr) return attrs -def _is_callable_members_only(cls): - # PEP 544 prohibits using issubclass() with protocols that have non-method members. - return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls)) - - def _no_init_or_replace_init(self, *args, **kwargs): cls = type(self) @@ -1975,32 +2005,57 @@ def _allow_reckless_class_checks(depth=3): } +@functools.cache +def _lazy_load_getattr_static(): + # Import getattr_static lazily so as not to slow down the import of typing.py + # Cache the result so we don't slow down _ProtocolMeta.__instancecheck__ unnecessarily + from inspect import getattr_static + return getattr_static + + +_cleanups.append(_lazy_load_getattr_static.cache_clear) + + class _ProtocolMeta(ABCMeta): # This metaclass is really unfortunate and exists only because of # the lack of __instancehook__. + def __init__(cls, *args, **kwargs): + super().__init__(*args, **kwargs) + cls.__protocol_attrs__ = _get_protocol_attrs(cls) + # PEP 544 prohibits using issubclass() + # with protocols that have non-method members. + cls.__callable_proto_members_only__ = all( + callable(getattr(cls, attr, None)) for attr in cls.__protocol_attrs__ + ) + def __instancecheck__(cls, instance): # We need this method for situations where attributes are # assigned in __init__. + is_protocol_cls = getattr(cls, "_is_protocol", False) if ( - getattr(cls, '_is_protocol', False) and + is_protocol_cls and not getattr(cls, '_is_runtime_protocol', False) and not _allow_reckless_class_checks(depth=2) ): raise TypeError("Instance and class checks can only be used with" " @runtime_checkable protocols") - if ((not getattr(cls, '_is_protocol', False) or - _is_callable_members_only(cls)) and - issubclass(instance.__class__, cls)): + if super().__instancecheck__(instance): return True - if cls._is_protocol: - if all(hasattr(instance, attr) and - # All *methods* can be blocked by setting them to None. - (not callable(getattr(cls, attr, None)) or - getattr(instance, attr) is not None) - for attr in _get_protocol_attrs(cls)): + + if is_protocol_cls: + getattr_static = _lazy_load_getattr_static() + for attr in cls.__protocol_attrs__: + try: + val = getattr_static(instance, attr) + except AttributeError: + break + if val is None and callable(getattr(cls, attr, None)): + break + else: return True - return super().__instancecheck__(instance) + + return False class Protocol(Generic, metaclass=_ProtocolMeta): @@ -2055,7 +2110,8 @@ def _proto_hook(other): return NotImplemented raise TypeError("Instance and class checks can only be used with" " @runtime_checkable protocols") - if not _is_callable_members_only(cls): + + if not cls.__callable_proto_members_only__ : if _allow_reckless_class_checks(): return NotImplemented raise TypeError("Protocols with non-method members" @@ -2065,7 +2121,7 @@ def _proto_hook(other): raise TypeError('issubclass() arg 1 must be a class') # Second, perform the actual structural compatibility check. - for attr in _get_protocol_attrs(cls): + for attr in cls.__protocol_attrs__: for base in other.__mro__: # Check if the members appears in the class dictionary... if attr in base.__dict__: @@ -2109,6 +2165,8 @@ class _AnnotatedAlias(_NotIterable, _GenericAlias, _root=True): with extra annotations. The alias behaves like a normal typing alias, instantiating is the same as instantiating the underlying type, binding it to types is also the same. + + The metadata itself is stored in a '__metadata__' attribute as a tuple. """ def __init__(self, origin, metadata): if isinstance(origin, _AnnotatedAlias): @@ -2164,6 +2222,10 @@ class Annotated: Details: - It's an error to call `Annotated` with less than two arguments. + - Access the metadata via the ``__metadata__`` attribute:: + + Annotated[int, '$'].__metadata__ == ('$',) + - Nested Annotated are flattened:: Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] @@ -2911,7 +2973,9 @@ class Employee(NamedTuple): elif kwargs: raise TypeError("Either list of fields or keywords" " can be provided to NamedTuple, not both") - return _make_nmtuple(typename, fields, module=_caller()) + nt = _make_nmtuple(typename, fields, module=_caller()) + nt.__orig_bases__ = (NamedTuple,) + return nt _NamedTuple = type.__new__(NamedTupleMeta, 'NamedTuple', (), {}) @@ -2943,6 +3007,9 @@ def __new__(cls, name, bases, ns, total=True): tp_dict = type.__new__(_TypedDictMeta, name, (*generic_base, dict), ns) + if not hasattr(tp_dict, '__orig_bases__'): + tp_dict.__orig_bases__ = bases + annotations = {} own_annotations = ns.get('__annotations__', {}) msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" @@ -3053,7 +3120,9 @@ class body be required. # Setting correct module is necessary to make typed dict classes pickleable. ns['__module__'] = module - return _TypedDictMeta(typename, (), ns, total=total) + td = _TypedDictMeta(typename, (), ns, total=total) + td.__orig_bases__ = (TypedDict,) + return td _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {}) TypedDict.__mro_entries__ = lambda bases: (_TypedDict,) diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py index 5167c5f843f085..018f22e7ce0c73 100644 --- a/Lib/unittest/case.py +++ b/Lib/unittest/case.py @@ -9,6 +9,7 @@ import collections import contextlib import traceback +import time import types from . import result @@ -572,6 +573,15 @@ def _addUnexpectedSuccess(self, result): else: addUnexpectedSuccess(self) + def _addDuration(self, result, elapsed): + try: + addDuration = result.addDuration + except AttributeError: + warnings.warn("TestResult has no addDuration method", + RuntimeWarning) + else: + addDuration(self, elapsed) + def _callSetUp(self): self.setUp() @@ -612,6 +622,7 @@ def run(self, result=None): getattr(testMethod, "__unittest_expecting_failure__", False) ) outcome = _Outcome(result) + start_time = time.perf_counter() try: self._outcome = outcome @@ -625,6 +636,7 @@ def run(self, result=None): with outcome.testPartExecutor(self): self._callTearDown() self.doCleanups() + self._addDuration(result, (time.perf_counter() - start_time)) if outcome.success: if expecting_failure: diff --git a/Lib/unittest/main.py b/Lib/unittest/main.py index 046fbd3a45dcf8..51b81a6c3728bb 100644 --- a/Lib/unittest/main.py +++ b/Lib/unittest/main.py @@ -9,6 +9,7 @@ from .signals import installHandler __unittest = True +_NO_TESTS_EXITCODE = 5 MAIN_EXAMPLES = """\ Examples: @@ -66,7 +67,8 @@ class TestProgram(object): def __init__(self, module='__main__', defaultTest=None, argv=None, testRunner=None, testLoader=loader.defaultTestLoader, exit=True, verbosity=1, failfast=None, catchbreak=None, - buffer=None, warnings=None, *, tb_locals=False): + buffer=None, warnings=None, *, tb_locals=False, + durations=None): if isinstance(module, str): self.module = __import__(module) for part in module.split('.')[1:]: @@ -82,6 +84,7 @@ def __init__(self, module='__main__', defaultTest=None, argv=None, self.verbosity = verbosity self.buffer = buffer self.tb_locals = tb_locals + self.durations = durations if warnings is None and not sys.warnoptions: # even if DeprecationWarnings are ignored by default # print them anyway unless other warnings settings are @@ -178,6 +181,9 @@ def _getParentArgParser(self): parser.add_argument('--locals', dest='tb_locals', action='store_true', help='Show local variables in tracebacks') + parser.add_argument('--durations', dest='durations', type=int, + default=None, metavar="N", + help='Show the N slowest test cases (N=0 for all)') if self.failfast is None: parser.add_argument('-f', '--failfast', dest='failfast', action='store_true', @@ -258,9 +264,10 @@ def runTests(self): failfast=self.failfast, buffer=self.buffer, warnings=self.warnings, - tb_locals=self.tb_locals) + tb_locals=self.tb_locals, + durations=self.durations) except TypeError: - # didn't accept the tb_locals argument + # didn't accept the tb_locals or durations argument testRunner = self.testRunner(verbosity=self.verbosity, failfast=self.failfast, buffer=self.buffer, @@ -273,6 +280,12 @@ def runTests(self): testRunner = self.testRunner self.result = testRunner.run(self.test) if self.exit: - sys.exit(not self.result.wasSuccessful()) + if self.result.testsRun == 0: + sys.exit(_NO_TESTS_EXITCODE) + elif self.result.wasSuccessful(): + sys.exit(0) + else: + sys.exit(1) + main = TestProgram diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py index 0f93cb53c3d5ce..7ca085760650af 100644 --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -98,6 +98,12 @@ def _get_signature_object(func, as_instance, eat_self): func = func.__init__ # Skip the `self` argument in __init__ eat_self = True + elif isinstance(func, (classmethod, staticmethod)): + if isinstance(func, classmethod): + # Skip the `cls` argument of a class method + eat_self = True + # Use the original decorated method to extract the correct function signature + func = func.__func__ elif not isinstance(func, FunctionTypes): # If we really want to model an instance of the passed type, # __call__ should be looked up, not __init__. diff --git a/Lib/unittest/result.py b/Lib/unittest/result.py index 5ca4c23238b419..7757dba9670b43 100644 --- a/Lib/unittest/result.py +++ b/Lib/unittest/result.py @@ -43,6 +43,7 @@ def __init__(self, stream=None, descriptions=None, verbosity=None): self.skipped = [] self.expectedFailures = [] self.unexpectedSuccesses = [] + self.collectedDurations = [] self.shouldStop = False self.buffer = False self.tb_locals = False @@ -157,6 +158,16 @@ def addUnexpectedSuccess(self, test): """Called when a test was expected to fail, but succeed.""" self.unexpectedSuccesses.append(test) + def addDuration(self, test, elapsed): + """Called when a test finished to run, regardless of its outcome. + *test* is the test case corresponding to the test method. + *elapsed* is the time represented in seconds, and it includes the + execution of cleanup functions. + """ + # support for a TextTestRunner using an old TestResult class + if hasattr(self, "collectedDurations"): + self.collectedDurations.append((test, elapsed)) + def wasSuccessful(self): """Tells whether or not this result was a success.""" # The hasattr check is for test_result's OldResult test. That diff --git a/Lib/unittest/runner.py b/Lib/unittest/runner.py index 6678adb6a7d813..e3c020e0ace96d 100644 --- a/Lib/unittest/runner.py +++ b/Lib/unittest/runner.py @@ -35,13 +35,16 @@ class TextTestResult(result.TestResult): separator1 = '=' * 70 separator2 = '-' * 70 - def __init__(self, stream, descriptions, verbosity): + def __init__(self, stream, descriptions, verbosity, *, durations=None): + """Construct a TextTestResult. Subclasses should accept **kwargs + to ensure compatibility as the interface changes.""" super(TextTestResult, self).__init__(stream, descriptions, verbosity) self.stream = stream self.showAll = verbosity > 1 self.dots = verbosity == 1 self.descriptions = descriptions self._newline = True + self.durations = durations def getDescription(self, test): doc_first_line = test.shortDescription() @@ -168,7 +171,7 @@ class TextTestRunner(object): def __init__(self, stream=None, descriptions=True, verbosity=1, failfast=False, buffer=False, resultclass=None, warnings=None, - *, tb_locals=False): + *, tb_locals=False, durations=None): """Construct a TextTestRunner. Subclasses should accept **kwargs to ensure compatibility as the @@ -182,12 +185,41 @@ def __init__(self, stream=None, descriptions=True, verbosity=1, self.failfast = failfast self.buffer = buffer self.tb_locals = tb_locals + self.durations = durations self.warnings = warnings if resultclass is not None: self.resultclass = resultclass def _makeResult(self): - return self.resultclass(self.stream, self.descriptions, self.verbosity) + try: + return self.resultclass(self.stream, self.descriptions, + self.verbosity, durations=self.durations) + except TypeError: + # didn't accept the durations argument + return self.resultclass(self.stream, self.descriptions, + self.verbosity) + + def _printDurations(self, result): + if not result.collectedDurations: + return + ls = sorted(result.collectedDurations, key=lambda x: x[1], + reverse=True) + if self.durations > 0: + ls = ls[:self.durations] + self.stream.writeln("Slowest test durations") + if hasattr(result, 'separator2'): + self.stream.writeln(result.separator2) + hidden = False + for test, elapsed in ls: + if self.verbosity < 2 and elapsed < 0.001: + hidden = True + continue + self.stream.writeln("%-10s %s" % ("%.3fs" % elapsed, test)) + if hidden: + self.stream.writeln("\n(durations < 0.001s were hidden; " + "use -v to show these durations)") + else: + self.stream.writeln("") def run(self, test): "Run the given test case or test suite." @@ -213,8 +245,12 @@ def run(self, test): stopTime = time.perf_counter() timeTaken = stopTime - startTime result.printErrors() + if self.durations is not None: + self._printDurations(result) + if hasattr(result, 'separator2'): self.stream.writeln(result.separator2) + run = result.testsRun self.stream.writeln("Ran %d test%s in %.3fs" % (run, run != 1 and "s" or "", timeTaken)) @@ -238,6 +274,8 @@ def run(self, test): infos.append("failures=%d" % failed) if errored: infos.append("errors=%d" % errored) + elif run == 0: + self.stream.write("NO TESTS RAN") else: self.stream.write("OK") if skipped: diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py index 151034e6a81bf9..5314b3f26021eb 100644 --- a/Lib/urllib/request.py +++ b/Lib/urllib/request.py @@ -1251,8 +1251,8 @@ def http_error_407(self, req, fp, code, msg, headers): class AbstractHTTPHandler(BaseHandler): - def __init__(self, debuglevel=0): - self._debuglevel = debuglevel + def __init__(self, debuglevel=None): + self._debuglevel = debuglevel if debuglevel is not None else http.client.HTTPConnection.debuglevel def set_http_debuglevel(self, level): self._debuglevel = level @@ -1378,7 +1378,8 @@ def http_open(self, req): class HTTPSHandler(AbstractHTTPHandler): - def __init__(self, debuglevel=0, context=None, check_hostname=None): + def __init__(self, debuglevel=None, context=None, check_hostname=None): + debuglevel = debuglevel if debuglevel is not None else http.client.HTTPSConnection.debuglevel AbstractHTTPHandler.__init__(self, debuglevel) if context is None: http_version = http.client.HTTPSConnection._http_vsn @@ -2474,7 +2475,13 @@ def retrfile(self, file, type): return (ftpobj, retrlen) def endtransfer(self): + if not self.busy: + return self.busy = 0 + try: + self.ftp.voidresp() + except ftperrors(): + pass def close(self): self.keepalive = False diff --git a/Lib/uuid.py b/Lib/uuid.py index 1c5578bf1f05c2..697f3b45597023 100644 --- a/Lib/uuid.py +++ b/Lib/uuid.py @@ -401,7 +401,7 @@ def _get_command_stdout(command, *args): # over locally administered ones since the former are globally unique, but # we'll return the first of the latter found if that's all the machine has. # -# See https://en.wikipedia.org/wiki/MAC_address#Universal_vs._local +# See https://en.wikipedia.org/wiki/MAC_address#Universal_vs._local_(U/L_bit) def _is_universal(mac): return not (mac & (1 << 41)) @@ -615,7 +615,7 @@ def _random_getnode(): # significant bit of the first octet". This works out to be the 41st bit # counting from 1 being the least significant bit, or 1<<40. # - # See https://en.wikipedia.org/wiki/MAC_address#Unicast_vs._multicast + # See https://en.wikipedia.org/w/index.php?title=MAC_address&oldid=1128764812#Universal_vs._local_(U/L_bit) import random return random.getrandbits(48) | (1 << 40) @@ -711,9 +711,11 @@ def uuid1(node=None, clock_seq=None): def uuid3(namespace, name): """Generate a UUID from the MD5 hash of a namespace UUID and a name.""" + if isinstance(name, str): + name = bytes(name, "utf-8") from hashlib import md5 digest = md5( - namespace.bytes + bytes(name, "utf-8"), + namespace.bytes + name, usedforsecurity=False ).digest() return UUID(bytes=digest[:16], version=3) @@ -724,8 +726,10 @@ def uuid4(): def uuid5(namespace, name): """Generate a UUID from the SHA-1 hash of a namespace UUID and a name.""" + if isinstance(name, str): + name = bytes(name, "utf-8") from hashlib import sha1 - hash = sha1(namespace.bytes + bytes(name, "utf-8")).digest() + hash = sha1(namespace.bytes + name).digest() return UUID(bytes=hash[:16], version=5) diff --git a/Lib/venv/__init__.py b/Lib/venv/__init__.py index 2f87c62ccba866..2173c9b13e5cf7 100644 --- a/Lib/venv/__init__.py +++ b/Lib/venv/__init__.py @@ -13,7 +13,7 @@ import types -CORE_VENV_DEPS = ('pip', 'setuptools') +CORE_VENV_DEPS = ('pip',) logger = logging.getLogger(__name__) @@ -523,7 +523,7 @@ def main(args=None): 'this environment.') parser.add_argument('--upgrade-deps', default=False, action='store_true', dest='upgrade_deps', - help=f'Upgrade core dependencies: {", ".join(CORE_VENV_DEPS)} ' + help=f'Upgrade core dependencies ({", ".join(CORE_VENV_DEPS)}) ' 'to the latest version in PyPI') options = parser.parse_args(args) if options.upgrade and options.clear: diff --git a/Lib/venv/scripts/common/activate b/Lib/venv/scripts/common/activate index 6fbc2b8801da04..408df5cb93b9e9 100644 --- a/Lib/venv/scripts/common/activate +++ b/Lib/venv/scripts/common/activate @@ -1,5 +1,5 @@ # This file must be used with "source bin/activate" *from bash* -# you cannot run it directly +# You cannot run it directly deactivate () { # reset old environment variables @@ -38,8 +38,15 @@ deactivate () { # unset irrelevant variables deactivate nondestructive -VIRTUAL_ENV="__VENV_DIR__" -export VIRTUAL_ENV +# on Windows, a path can contain colons and backslashes and has to be converted: +if [ "$OSTYPE" = "cygwin" ] || [ "$OSTYPE" = "msys" ] ; then + # transform D:\path\to\venv to /d/path/to/venv on MSYS + # and to /cygdrive/d/path/to/venv on Cygwin + export VIRTUAL_ENV=$(cygpath "__VENV_DIR__") +else + # use the path as-is + export VIRTUAL_ENV="__VENV_DIR__" +fi _OLD_VIRTUAL_PATH="$PATH" PATH="$VIRTUAL_ENV/__VENV_BIN_NAME__:$PATH" diff --git a/Lib/venv/scripts/posix/activate.csh b/Lib/venv/scripts/posix/activate.csh index d6f697c55ed81c..5e8d66fa9e5061 100644 --- a/Lib/venv/scripts/posix/activate.csh +++ b/Lib/venv/scripts/posix/activate.csh @@ -1,5 +1,6 @@ # This file must be used with "source bin/activate.csh" *from csh*. # You cannot run it directly. + # Created by Davide Di Blasi <davidedb@gmail.com>. # Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com> diff --git a/Lib/venv/scripts/posix/activate.fish b/Lib/venv/scripts/posix/activate.fish index 9aa4446005f4d8..91ad6442e05692 100644 --- a/Lib/venv/scripts/posix/activate.fish +++ b/Lib/venv/scripts/posix/activate.fish @@ -1,5 +1,5 @@ # This file must be used with "source <venv>/bin/activate.fish" *from fish* -# (https://fishshell.com/); you cannot run it directly. +# (https://fishshell.com/). You cannot run it directly. function deactivate -d "Exit virtual environment and return to normal shell environment" # reset old environment variables diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index a56ff33dbbdc69..b86d131f030d80 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -292,19 +292,8 @@ class Mozilla(UnixBrowser): background = True -class Netscape(UnixBrowser): - """Launcher class for Netscape browser.""" - - raise_opts = ["-noraise", "-raise"] - remote_args = ['-remote', 'openURL(%s%action)'] - remote_action = "" - remote_action_newwin = ",new-window" - remote_action_newtab = ",new-tab" - background = True - - -class Galeon(UnixBrowser): - """Launcher class for Galeon/Epiphany browsers.""" +class Epiphany(UnixBrowser): + """Launcher class for Epiphany browser.""" raise_opts = ["-noraise", ""] remote_args = ['%action', '%s'] @@ -402,44 +391,6 @@ def open(self, url, new=0, autoraise=True): return (p.poll() is None) -class Grail(BaseBrowser): - # There should be a way to maintain a connection to Grail, but the - # Grail remote control protocol doesn't really allow that at this - # point. It probably never will! - def _find_grail_rc(self): - import glob - import pwd - import socket - import tempfile - tempdir = os.path.join(tempfile.gettempdir(), - ".grail-unix") - user = pwd.getpwuid(os.getuid())[0] - filename = os.path.join(glob.escape(tempdir), glob.escape(user) + "-*") - maybes = glob.glob(filename) - if not maybes: - return None - s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - for fn in maybes: - # need to PING each one until we find one that's live - try: - s.connect(fn) - except OSError: - # no good; attempt to clean it out, but don't fail: - try: - os.unlink(fn) - except OSError: - pass - else: - return s - - def _remote(self, action): - s = self._find_grail_rc() - if not s: - return 0 - s.send(action) - s.close() - return 1 - def open(self, url, new=0, autoraise=True): sys.audit("webbrowser.open", url) if new: @@ -449,6 +400,16 @@ def open(self, url, new=0, autoraise=True): return ok +class Edge(UnixBrowser): + "Launcher class for Microsoft Edge browser." + + remote_args = ['%action', '%s'] + remote_action = "" + remote_action_newwin = "--new-window" + remote_action_newtab = "" + background = True + + # # Platform support for Unix # @@ -475,35 +436,25 @@ def register_X_browsers(): if "KDE_FULL_SESSION" in os.environ and shutil.which("kfmclient"): register("kfmclient", Konqueror, Konqueror("kfmclient")) + # Common symbolic link for the default X11 browser if shutil.which("x-www-browser"): register("x-www-browser", None, BackgroundBrowser("x-www-browser")) # The Mozilla browsers - for browser in ("firefox", "iceweasel", "iceape", "seamonkey"): + for browser in ("firefox", "iceweasel", "seamonkey", "mozilla-firefox", + "mozilla"): if shutil.which(browser): register(browser, None, Mozilla(browser)) - # The Netscape and old Mozilla browsers - for browser in ("mozilla-firefox", - "mozilla-firebird", "firebird", - "mozilla", "netscape"): - if shutil.which(browser): - register(browser, None, Netscape(browser)) - # Konqueror/kfm, the KDE browser. if shutil.which("kfm"): register("kfm", Konqueror, Konqueror("kfm")) elif shutil.which("konqueror"): register("konqueror", Konqueror, Konqueror("konqueror")) - # Gnome's Galeon and Epiphany - for browser in ("galeon", "epiphany"): - if shutil.which(browser): - register(browser, None, Galeon(browser)) - - # Skipstone, another Gtk/Mozilla based browser - if shutil.which("skipstone"): - register("skipstone", None, BackgroundBrowser("skipstone")) + # Gnome's Epiphany + if shutil.which("epiphany"): + register("epiphany", None, Epiphany("epiphany")) # Google Chrome/Chromium browsers for browser in ("google-chrome", "chrome", "chromium", "chromium-browser"): @@ -514,13 +465,10 @@ def register_X_browsers(): if shutil.which("opera"): register("opera", None, Opera("opera")) - # Next, Mosaic -- old but still in use. - if shutil.which("mosaic"): - register("mosaic", None, BackgroundBrowser("mosaic")) - # Grail, the Python browser. Does anybody still use it? - if shutil.which("grail"): - register("grail", Grail, None) + if shutil.which("microsoft-edge"): + register("microsoft-edge", None, Edge("microsoft-edge")) + def register_standard_browsers(): global _tryorder @@ -549,10 +497,12 @@ def register_standard_browsers(): # location in 32-bit Windows edge32 = os.path.join(os.environ.get("PROGRAMFILES", "C:\\Program Files"), "Microsoft\\Edge\\Application\\msedge.exe") - for browser in ("firefox", "firebird", "seamonkey", "mozilla", + for browser in ("firefox", "seamonkey", "mozilla", "chrome", "opera", edge64, edge32): if shutil.which(browser): register(browser, None, BackgroundBrowser(browser)) + if shutil.which("MicrosoftEdge.exe"): + register("microsoft-edge", None, Edge("MicrosoftEdge.exe")) else: # Prefer X browsers if present if os.environ.get("DISPLAY") or os.environ.get("WAYLAND_DISPLAY"): @@ -570,14 +520,15 @@ def register_standard_browsers(): # Also try console browsers if os.environ.get("TERM"): + # Common symbolic link for the default text-based browser if shutil.which("www-browser"): register("www-browser", None, GenericBrowser("www-browser")) - # The Links/elinks browsers <http://artax.karlin.mff.cuni.cz/~mikulas/links/> + # The Links/elinks browsers <http://links.twibright.com/> if shutil.which("links"): register("links", None, GenericBrowser("links")) if shutil.which("elinks"): register("elinks", None, Elinks("elinks")) - # The Lynx browser <http://lynx.isc.org/>, <http://lynx.browser.org/> + # The Lynx browser <https://lynx.invisible-island.net/>, <http://lynx.browser.org/> if shutil.which("lynx"): register("lynx", None, GenericBrowser("lynx")) # The w3m browser <http://w3m.sourceforge.net/> @@ -713,11 +664,12 @@ def open(self, url, new=0, autoraise=True): def main(): import getopt - usage = """Usage: %s [-n | -t] url + usage = """Usage: %s [-n | -t | -h] url -n: open new window - -t: open new tab""" % sys.argv[0] + -t: open new tab + -h, --help: show help""" % sys.argv[0] try: - opts, args = getopt.getopt(sys.argv[1:], 'ntd') + opts, args = getopt.getopt(sys.argv[1:], 'ntdh',['help']) except getopt.error as msg: print(msg, file=sys.stderr) print(usage, file=sys.stderr) @@ -726,6 +678,9 @@ def main(): for o, a in opts: if o == '-n': new_win = 1 elif o == '-t': new_win = 2 + elif o == '-h' or o == '--help': + print(usage, file=sys.stderr) + sys.exit() if len(args) != 1: print(usage, file=sys.stderr) sys.exit(1) diff --git a/Lib/zipfile/__init__.py b/Lib/zipfile/__init__.py index 6e6211de6b1684..95c047991f872b 100644 --- a/Lib/zipfile/__init__.py +++ b/Lib/zipfile/__init__.py @@ -338,6 +338,22 @@ def _EndRecData(fpin): # Unable to find a valid end of central directory structure return None +def _sanitize_filename(filename): + """Terminate the file name at the first null byte and + ensure paths always use forward slashes as the directory separator.""" + + # Terminate the file name at the first null byte. Null bytes in file + # names are used as tricks by viruses in archives. + null_byte = filename.find(chr(0)) + if null_byte >= 0: + filename = filename[0:null_byte] + # This is used to ensure paths in generated ZIP files always use + # forward slashes as the directory separator, as required by the + # ZIP format specification. + if os.sep != "/" and os.sep in filename: + filename = filename.replace(os.sep, "/") + return filename + class ZipInfo (object): """Class with attributes describing each file in the ZIP archive.""" @@ -368,16 +384,9 @@ class ZipInfo (object): def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)): self.orig_filename = filename # Original file name in archive - # Terminate the file name at the first null byte. Null bytes in file - # names are used as tricks by viruses in archives. - null_byte = filename.find(chr(0)) - if null_byte >= 0: - filename = filename[0:null_byte] - # This is used to ensure paths in generated ZIP files always use - # forward slashes as the directory separator, as required by the - # ZIP format specification. - if os.sep != "/" and os.sep in filename: - filename = filename.replace(os.sep, "/") + # Terminate the file name at the first null byte and + # ensure paths always use forward slashes as the directory separator. + filename = _sanitize_filename(filename) self.filename = filename # Normalized file name self.date_time = date_time # year, month, day, hour, min, sec @@ -482,7 +491,7 @@ def _encodeFilenameFlags(self): except UnicodeEncodeError: return self.filename.encode('utf-8'), self.flag_bits | _MASK_UTF_FILENAME - def _decodeExtra(self): + def _decodeExtra(self, filename_crc): # Try to decode the extra field. extra = self.extra unpack = struct.unpack @@ -508,6 +517,21 @@ def _decodeExtra(self): except struct.error: raise BadZipFile(f"Corrupt zip64 extra field. " f"{field} not found.") from None + elif tp == 0x7075: + data = extra[4:ln+4] + # Unicode Path Extra Field + try: + up_version, up_name_crc = unpack('<BL', data[:5]) + if up_version == 1 and up_name_crc == filename_crc: + up_unicode_name = data[5:].decode('utf-8') + if up_unicode_name: + self.filename = _sanitize_filename(up_unicode_name) + else: + warnings.warn("Empty unicode path extra field (0x7075)", stacklevel=2) + except struct.error as e: + raise BadZipFile("Corrupt unicode path extra field (0x7075)") from e + except UnicodeDecodeError as e: + raise BadZipFile('Corrupt unicode path extra field (0x7075): invalid utf-8 bytes') from e extra = extra[ln+4:] @@ -1409,6 +1433,7 @@ def _RealGetContents(self): if self.debug > 2: print(centdir) filename = fp.read(centdir[_CD_FILENAME_LENGTH]) + orig_filename_crc = crc32(filename) flags = centdir[_CD_FLAG_BITS] if flags & _MASK_UTF_FILENAME: # UTF-8 file names extension @@ -1432,8 +1457,7 @@ def _RealGetContents(self): x._raw_time = t x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F, t>>11, (t>>5)&0x3F, (t&0x1F) * 2 ) - - x._decodeExtra() + x._decodeExtra(orig_filename_crc) x.header_offset = x.header_offset + concat self.filelist.append(x) self.NameToInfo[x.filename] = x diff --git a/Mac/BuildScript/resources/Welcome.rtf b/Mac/BuildScript/resources/Welcome.rtf index 7819241b618d87..1f109ee9f13e0d 100644 --- a/Mac/BuildScript/resources/Welcome.rtf +++ b/Mac/BuildScript/resources/Welcome.rtf @@ -1,9 +1,9 @@ -{\rtf1\ansi\ansicpg1252\cocoartf2639 +{\rtf1\ansi\ansicpg1252\cocoartf2708 \cocoascreenfonts1\cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;\f2\fmodern\fcharset0 CourierNewPSMT; } {\colortbl;\red255\green255\blue255;} {\*\expandedcolortbl;;} -\paperw11900\paperh16840\margl1440\margr1440\vieww12200\viewh10880\viewkind0 +\margl1440\margr1440\vieww12200\viewh10880\viewkind0 \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\partightenfactor0 \f0\fs24 \cf0 This package will install @@ -25,6 +25,28 @@ At the end of this install, click on \f0 to install a set of current SSL root certificates.\ \ +\f1\b macOS 13 Ventura users +\f0\b0 : Due to an issue with the macOS +\f1\b Installer +\f0\b0 app, installation of some third-party packages including this Python package may fail with a vague +\f1\b "The installer encountered an error" +\f0\b0 message if the +\f1\b Installer +\f0\b0 app does not have permission to access the folder containing the downloaded installer file, typically in the +\f1\b Downloads +\f0\b0 folder. Go to +\f1\b System Settings +\f0\b0 -> +\f1\b Privacy & Security +\f0\b0 -> +\f1\b Files and Folders +\f0\b0 , then click the mark in front of +\f1\b Installer +\f0\b0 to expand, and enable +\f1\b Downloads Folder +\f0\b0 by moving the toggle to the right. See {\field{\*\fldinst{HYPERLINK "https://github.com/python/cpython/issues/103207"}}{\fldrslt https://github.com/python/cpython/issues/103207}} for up-to-date information on this issue.\ +\ + \f1\b NOTE: \f0\b0 This is an alpha test preview of Python 3.12.0, the next feature release of Python 3. It is not intended for production use.\ } \ No newline at end of file diff --git a/Mac/BuildScript/scripts/postflight.ensurepip b/Mac/BuildScript/scripts/postflight.ensurepip index 36d05945b6fd90..ce3c6c1c2bf9e6 100755 --- a/Mac/BuildScript/scripts/postflight.ensurepip +++ b/Mac/BuildScript/scripts/postflight.ensurepip @@ -56,19 +56,19 @@ if [ -d /usr/local/bin ] ; then cd /usr/local/bin - # Create pipx.y and easy_install-x.y links if /usr/local/bin/pythonx.y + # Create pipx.y links if /usr/local/bin/pythonx.y # is linked to this framework version install_links_if_our_fw "python${PYVER}" \ - "pip${PYVER}" "easy_install-${PYVER}" + "pip${PYVER}" # Create pipx link if /usr/local/bin/pythonx is linked to this version install_links_if_our_fw "python${PYMAJOR}" \ "pip${PYMAJOR}" - # Create pip and easy_install link if /usr/local/bin/python + # Create pip link if /usr/local/bin/python # is linked to this version install_links_if_our_fw "python" \ - "pip" "easy_install" + "pip" ) fi exit 0 diff --git a/Mac/Makefile.in b/Mac/Makefile.in index f9691288414538..69ab4198988570 100644 --- a/Mac/Makefile.in +++ b/Mac/Makefile.in @@ -166,7 +166,6 @@ altinstallunixtools: -if test "x$(ENSUREPIP)" != "xno" ; then \ cd "$(DESTDIR)$(FRAMEWORKUNIXTOOLSPREFIX)/bin" && \ for fn in \ - easy_install-$(VERSION) \ pip$(VERSION) \ ; \ do \ diff --git a/Makefile.pre.in b/Makefile.pre.in index 8f13198e7e34b3..b285ef9e832db5 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -38,6 +38,7 @@ CC= @CC@ CXX= @CXX@ LINKCC= @LINKCC@ AR= @AR@ +READELF= @READELF@ SOABI= @SOABI@ LDVERSION= @LDVERSION@ LIBPYTHON= @LIBPYTHON@ @@ -59,7 +60,7 @@ DSYMUTIL_PATH= @DSYMUTIL_PATH@ GNULD= @GNULD@ # Shell used by make (some versions default to the login shell, which is bad) -SHELL= /bin/sh +SHELL= /bin/sh -e # Use this to make a link between python$(VERSION) and python in $(BINDIR) LN= @LN@ @@ -369,6 +370,7 @@ PYTHON_OBJS= \ Python/Python-ast.o \ Python/Python-tokenize.o \ Python/asdl.o \ + Python/assemble.o \ Python/ast.o \ Python/ast_opt.o \ Python/ast_unparse.o \ @@ -379,6 +381,7 @@ PYTHON_OBJS= \ Python/context.o \ Python/dynamic_annotations.o \ Python/errors.o \ + Python/flowgraph.o \ Python/frame.o \ Python/frozenmain.o \ Python/future.o \ @@ -393,7 +396,9 @@ PYTHON_OBJS= \ Python/import.o \ Python/importdl.o \ Python/initconfig.o \ + Python/instrumentation.o \ Python/intrinsics.o \ + Python/legacy_tracing.o \ Python/marshal.o \ Python/modsupport.o \ Python/mysnprintf.o \ @@ -666,13 +671,18 @@ profile-opt: profile-run-stamp bolt-opt: @PREBOLT_RULE@ rm -f *.fdata - @LLVM_BOLT@ ./$(BUILDPYTHON) -instrument -instrumentation-file-append-pid -instrumentation-file=$(abspath $(BUILDPYTHON).bolt) -o $(BUILDPYTHON).bolt_inst - ./$(BUILDPYTHON).bolt_inst $(PROFILE_TASK) || true - @MERGE_FDATA@ $(BUILDPYTHON).*.fdata > $(BUILDPYTHON).fdata - @LLVM_BOLT@ ./$(BUILDPYTHON) -o $(BUILDPYTHON).bolt -data=$(BUILDPYTHON).fdata -update-debug-sections -reorder-blocks=ext-tsp -reorder-functions=hfsort+ -split-functions -icf=1 -inline-all -split-eh -reorder-functions-use-hot-size -peepholes=all -jump-tables=aggressive -inline-ap -indirect-call-promotion=all -dyno-stats -use-gnu-stack -frame-opt=hot - rm -f *.fdata - rm -f $(BUILDPYTHON).bolt_inst - mv $(BUILDPYTHON).bolt $(BUILDPYTHON) + @if $(READELF) -p .note.bolt_info $(BUILDPYTHON) | grep BOLT > /dev/null; then\ + echo "skip: $(BUILDPYTHON) is already BOLTed."; \ + else \ + @LLVM_BOLT@ ./$(BUILDPYTHON) -instrument -instrumentation-file-append-pid -instrumentation-file=$(abspath $(BUILDPYTHON).bolt) -o $(BUILDPYTHON).bolt_inst; \ + ./$(BUILDPYTHON).bolt_inst $(PROFILE_TASK) || true; \ + @MERGE_FDATA@ $(BUILDPYTHON).*.fdata > $(BUILDPYTHON).fdata; \ + @LLVM_BOLT@ ./$(BUILDPYTHON) -o $(BUILDPYTHON).bolt -data=$(BUILDPYTHON).fdata -update-debug-sections -reorder-blocks=ext-tsp -reorder-functions=hfsort+ -split-functions -icf=1 -inline-all -split-eh -reorder-functions-use-hot-size -peepholes=none -jump-tables=aggressive -inline-ap -indirect-call-promotion=all -dyno-stats -use-gnu-stack -frame-opt=hot; \ + rm -f *.fdata; \ + rm -f $(BUILDPYTHON).bolt_inst; \ + mv $(BUILDPYTHON).bolt $(BUILDPYTHON); \ + fi + # Compile and run with gcov .PHONY=coverage coverage-lcov coverage-report @@ -972,7 +982,7 @@ Makefile Modules/config.c: Makefile.pre \ Modules/Setup.local \ Modules/Setup.bootstrap \ Modules/Setup.stdlib - $(SHELL) $(MAKESETUP) -c $(srcdir)/Modules/config.c.in \ + $(MAKESETUP) -c $(srcdir)/Modules/config.c.in \ -s Modules \ Modules/Setup.local \ Modules/Setup.stdlib \ @@ -1184,7 +1194,7 @@ Tools/build/freeze_modules.py: $(FREEZE_MODULE) .PHONY: regen-frozen regen-frozen: Tools/build/freeze_modules.py $(FROZEN_FILES_IN) - $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/freeze_modules.py + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/freeze_modules.py --frozen-modules @echo "The Makefile was updated, you may need to re-run make." ############################################################################ @@ -1659,6 +1669,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_asdl.h \ $(srcdir)/Include/internal/pycore_ast.h \ $(srcdir)/Include/internal/pycore_ast_state.h \ + $(srcdir)/Include/internal/pycore_atexit.h \ $(srcdir)/Include/internal/pycore_atomic.h \ $(srcdir)/Include/internal/pycore_atomic_funcs.h \ $(srcdir)/Include/internal/pycore_bitutils.h \ @@ -1699,8 +1710,11 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_moduleobject.h \ $(srcdir)/Include/internal/pycore_namespace.h \ $(srcdir)/Include/internal/pycore_object.h \ + $(srcdir)/Include/internal/pycore_object_state.h \ $(srcdir)/Include/internal/pycore_obmalloc.h \ $(srcdir)/Include/internal/pycore_obmalloc_init.h \ + $(srcdir)/Include/internal/pycore_opcode.h \ + $(srcdir)/Include/internal/pycore_opcode_utils.h \ $(srcdir)/Include/internal/pycore_pathconfig.h \ $(srcdir)/Include/internal/pycore_pyarena.h \ $(srcdir)/Include/internal/pycore_pyerrors.h \ @@ -2029,16 +2043,26 @@ LIBSUBDIRS= asyncio \ zoneinfo \ __phello__ TESTSUBDIRS= idlelib/idle_test \ - test test/audiodata \ - test/capath test/cjkencodings \ - test/data test/decimaltestdata \ - test/dtracedata test/eintrdata \ - test/encoded_modules test/imghdrdata \ - test/libregrtest test/sndhdrdata \ - test/subprocessdata test/support \ + test \ + test/audiodata \ + test/capath \ + test/cjkencodings \ + test/crashers \ + test/data \ + test/decimaltestdata \ + test/dtracedata \ + test/encoded_modules \ + test/imghdrdata \ + test/leakers \ + test/libregrtest \ + test/sndhdrdata \ + test/subprocessdata \ + test/support \ test/test_asyncio \ + test/test_capi \ test/test_ctypes \ - test/test_email test/test_email/data \ + test/test_email \ + test/test_email/data \ test/test_import \ test/test_import/data \ test/test_import/data/circular_imports \ @@ -2097,16 +2121,39 @@ TESTSUBDIRS= idlelib/idle_test \ test/test_lib2to3/data/fixers \ test/test_lib2to3/data/fixers/myfixes \ test/test_peg_generator \ + test/test_sqlite3 \ test/test_tkinter \ + test/test_tomllib \ + test/test_tomllib/data \ + test/test_tomllib/data/invalid \ + test/test_tomllib/data/invalid/array \ + test/test_tomllib/data/invalid/array-of-tables \ + test/test_tomllib/data/invalid/boolean \ + test/test_tomllib/data/invalid/dates-and-times \ + test/test_tomllib/data/invalid/dotted-keys \ + test/test_tomllib/data/invalid/inline-table \ + test/test_tomllib/data/invalid/keys-and-vals \ + test/test_tomllib/data/invalid/literal-str \ + test/test_tomllib/data/invalid/multiline-basic-str \ + test/test_tomllib/data/invalid/multiline-literal-str \ + test/test_tomllib/data/invalid/table \ + test/test_tomllib/data/valid \ + test/test_tomllib/data/valid/array \ + test/test_tomllib/data/valid/dates-and-times \ + test/test_tomllib/data/valid/multiline-basic-str \ test/test_tools \ test/test_ttk \ - test/test_warnings test/test_warnings/data \ + test/test_unittest \ + test/test_unittest/testmock \ + test/test_warnings \ + test/test_warnings/data \ test/test_zipfile \ - test/test_zoneinfo test/test_zoneinfo/data \ - test/test_unittest test/test_unittest/testmock \ + test/test_zoneinfo \ + test/test_zoneinfo/data \ test/tracedmodules \ test/typinganndata \ - test/xmltestdata test/xmltestdata/c14n-20 \ + test/xmltestdata \ + test/xmltestdata/c14n-20 \ test/ziptestdata COMPILEALL_OPTS=-j0 @@ -2409,12 +2456,12 @@ frameworkinstallextras: # Build the toplevel Makefile Makefile.pre: $(srcdir)/Makefile.pre.in config.status - CONFIG_FILES=Makefile.pre CONFIG_HEADERS= $(SHELL) config.status + CONFIG_FILES=Makefile.pre CONFIG_HEADERS= ./config.status $(MAKE) -f Makefile.pre Makefile # Run the configure script. config.status: $(srcdir)/configure - $(SHELL) $(srcdir)/configure $(CONFIG_ARGS) + $(srcdir)/configure $(CONFIG_ARGS) .PRECIOUS: config.status $(BUILDPYTHON) Makefile Makefile.pre @@ -2439,8 +2486,8 @@ reindent: # Rerun configure with the same options as it was run last time, # provided the config.status script exists recheck: - $(SHELL) config.status --recheck - $(SHELL) config.status + ./config.status --recheck + ./config.status # Regenerate configure and pyconfig.h.in .PHONY: autoconf @@ -2655,6 +2702,15 @@ MODULE__SSL_DEPS=$(srcdir)/Modules/_ssl.h $(srcdir)/Modules/_ssl/cert.c $(srcdir MODULE__TESTCAPI_DEPS=$(srcdir)/Modules/_testcapi/testcapi_long.h $(srcdir)/Modules/_testcapi/parts.h MODULE__SQLITE3_DEPS=$(srcdir)/Modules/_sqlite/connection.h $(srcdir)/Modules/_sqlite/cursor.h $(srcdir)/Modules/_sqlite/microprotocols.h $(srcdir)/Modules/_sqlite/module.h $(srcdir)/Modules/_sqlite/prepare_protocol.h $(srcdir)/Modules/_sqlite/row.h $(srcdir)/Modules/_sqlite/util.h +CODECS_COMMON_HEADERS=$(srcdir)/Modules/cjkcodecs/multibytecodec.h $(srcdir)/Modules/cjkcodecs/cjkcodecs.h +MODULE__CODECS_CN_DEPS=$(srcdir)/Modules/cjkcodecs/mappings_cn.h $(CODECS_COMMON_HEADERS) +MODULE__CODECS_HK_DEPS=$(srcdir)/Modules/cjkcodecs/mappings_hk.h $(CODECS_COMMON_HEADERS) +MODULE__CODECS_ISO2022_DEPS=$(srcdir)/Modules/cjkcodecs/mappings_jisx0213_pair.h $(srcdir)/Modules/cjkcodecs/alg_jisx0201.h $(srcdir)/Modules/cjkcodecs/emu_jisx0213_2000.h $(CODECS_COMMON_HEADERS) +MODULE__CODECS_JP_DEPS=$(srcdir)/Modules/cjkcodecs/mappings_jisx0213_pair.h $(srcdir)/Modules/cjkcodecs/alg_jisx0201.h $(srcdir)/Modules/cjkcodecs/emu_jisx0213_2000.h $(srcdir)/Modules/cjkcodecs/mappings_jp.h $(CODECS_COMMON_HEADERS) +MODULE__CODECS_KR_DEPS=$(srcdir)/Modules/cjkcodecs/mappings_kr.h $(CODECS_COMMON_HEADERS) +MODULE__CODECS_TW_DEPS=$(srcdir)/Modules/cjkcodecs/mappings_tw.h $(CODECS_COMMON_HEADERS) +MODULE__MULTIBYTECODEC_DEPS=$(srcdir)/Modules/cjkcodecs/multibytecodec.h + # IF YOU PUT ANYTHING HERE IT WILL GO AWAY # Local Variables: # mode: makefile diff --git a/Misc/ACKS b/Misc/ACKS index 8cf5166a2bb1f4..42ec059a7c4ec2 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -160,6 +160,7 @@ Brice Berna Olivier Bernard Vivien Bernet-Rollande Maxwell Bernstein +Jay Berry Eric Beser Steven Bethard Stephen Bevan @@ -298,6 +299,7 @@ Dave Chambers Pascal Chambon Nicholas Chammas Ofey Chan +Juhi Chandalia John Chandler Hye-Shik Chang Jeffrey Chang @@ -627,6 +629,7 @@ Julian Gindi Yannick Gingras Neil Girdhar Matt Giuca +Andrea Giudiceandrea Franz Glasner Wim Glenn Michael Goderbauer @@ -693,6 +696,7 @@ Anders Hammarquist Mark Hammond Harald Hanche-Olsen Manus Hand +Michael Handler Andreas Hangauer Milton L. Hankins Carl Bordum Hansen @@ -1113,6 +1117,7 @@ Jason Lowe Tony Lownds Ray Loyzaga Kang-Hao (Kenny) Lu +Raymond Lu Lukas Lueg Loren Luke Fredrik Lundh @@ -1483,7 +1488,7 @@ John Redford Kalyan Reddy Terry J. Reedy Gareth Rees -John Reese +Amethyst Reese Steve Reeves Lennart Regebro John Regehr @@ -1508,6 +1513,7 @@ Vlad Riscutia Wes Rishel Daniel Riti Juan M. Bello Rivas +Stefano Rivera Llandy Riveron Del Risco Mohd Sanad Zaki Rizvi Davide Rizzo @@ -1547,6 +1553,7 @@ Hugo van Rossum Saskia van Rossum Robin Roth Clement Rouault +Tomas Roun Donald Wallace Rouse II Liam Routt Todd Rovito diff --git a/Misc/NEWS.d/3.10.0a2.rst b/Misc/NEWS.d/3.10.0a2.rst index 61a291914f9333..061a82e90afd6b 100644 --- a/Misc/NEWS.d/3.10.0a2.rst +++ b/Misc/NEWS.d/3.10.0a2.rst @@ -888,7 +888,7 @@ file descriptors. .. nonce: JUPE59 .. section: C API -:c:data:`Py_FileSystemDefaultEncodeErrors` and :c:data:`Py_UTF8Mode` are +:c:data:`!Py_FileSystemDefaultEncodeErrors` and :c:data:`!Py_UTF8Mode` are available again in limited API. .. diff --git a/Misc/NEWS.d/3.12.0a6.rst b/Misc/NEWS.d/3.12.0a6.rst index 2bcb4c8c854d4e..f6beb5b7ec3dbc 100644 --- a/Misc/NEWS.d/3.12.0a6.rst +++ b/Misc/NEWS.d/3.12.0a6.rst @@ -189,7 +189,7 @@ not just glibc. This fixes support for musl. .. nonce: _P5sMa .. section: Core and Builtins -Make the slice object hashable. +Make the slice object hashable. Patch by Will Bradshaw and Furkan Onder. .. diff --git a/Misc/NEWS.d/3.12.0a7.rst b/Misc/NEWS.d/3.12.0a7.rst new file mode 100644 index 00000000000000..8f078e50823a00 --- /dev/null +++ b/Misc/NEWS.d/3.12.0a7.rst @@ -0,0 +1,745 @@ +.. date: 2023-03-31-12-22-25 +.. gh-issue: 102192 +.. nonce: gYxJP_ +.. release date: 2023-04-04 +.. section: Core and Builtins + +Deprecated ``_PyErr_ChainExceptions`` in favour of +``_PyErr_ChainExceptions1``. + +.. + +.. date: 2023-03-24-02-50-33 +.. gh-issue: 89987 +.. nonce: oraTzh +.. section: Core and Builtins + +Reduce the number of inline :opcode:`CACHE` entries for +:opcode:`BINARY_SUBSCR`. + +.. + +.. date: 2023-03-21-00-46-36 +.. gh-issue: 102859 +.. nonce: PRkGca +.. section: Core and Builtins + +Removed :opcode:`JUMP_IF_FALSE_OR_POP` and :opcode:`JUMP_IF_TRUE_OR_POP` +instructions. + +.. + +.. date: 2023-03-18-02-36-39 +.. gh-issue: 101975 +.. nonce: HwMR1d +.. section: Core and Builtins + +Fixed ``stacktop`` value on tracing entries to avoid corruption on garbage +collection. + +.. + +.. date: 2023-03-17-13-43-34 +.. gh-issue: 102778 +.. nonce: ANDv8I +.. section: Core and Builtins + +Add :data:`sys.last_exc` and deprecate :data:`sys.last_type`, +:data:`sys.last_value` and :data:`sys.last_traceback`, which hold the same +information in its legacy form. + +.. + +.. date: 2023-03-17-12-09-45 +.. gh-issue: 100982 +.. nonce: Pf_BI6 +.. section: Core and Builtins + +Replace all occurrences of ``COMPARE_AND_BRANCH`` with :opcode:`COMPARE_OP`. + +.. + +.. date: 2023-03-16-17-24-44 +.. gh-issue: 102701 +.. nonce: iNGVaS +.. section: Core and Builtins + +Fix overflow when creating very large dict. + +.. + +.. date: 2023-03-16-14-44-29 +.. gh-issue: 102755 +.. nonce: j1GxlV +.. section: Core and Builtins + +Add :c:func:`PyErr_DisplayException` which takes just an exception instance, +to replace the legacy :c:func:`PyErr_Display` which takes the ``(typ, exc, +tb)`` triplet. + +.. + +.. date: 2023-03-14-00-11-46 +.. gh-issue: 102594 +.. nonce: BjU-m2 +.. section: Core and Builtins + +Add note to exception raised in ``PyErr_SetObject`` when normalization +fails. + +.. + +.. date: 2023-03-09-13-57-35 +.. gh-issue: 90997 +.. nonce: J-Yhn2 +.. section: Core and Builtins + +Shrink the number of inline :opcode:`CACHE` entries used by +:opcode:`LOAD_GLOBAL`. + +.. + +.. date: 2023-03-08-08-37-36 +.. gh-issue: 102491 +.. nonce: SFvvsC +.. section: Core and Builtins + +Improve import time of ``platform`` by removing IronPython version parsing. +The IronPython version parsing was not functional (see +https://github.com/IronLanguages/ironpython3/issues/1667). + +.. + +.. date: 2023-03-06-10-02-22 +.. gh-issue: 101291 +.. nonce: 0FT2QS +.. section: Core and Builtins + +Rearrage bits in first field (after header) of PyLongObject. * Bits 0 and 1: +1 - sign. I.e. 0 for positive numbers, 1 for zero and 2 for negative +numbers. * Bit 2 reserved (probably for the immortal bit) * Bits 3+ the +unsigned size. + +This makes a few operations slightly more efficient, and will enable a more +compact and faster 2s-complement representation of most ints in future. + +.. + +.. date: 2023-03-04-06-48-34 +.. gh-issue: 102397 +.. nonce: ACJaOf +.. section: Core and Builtins + +Fix segfault from race condition in signal handling during garbage +collection. Patch by Kumar Aditya. + +.. + +.. date: 2023-03-03-23-21-16 +.. gh-issue: 102406 +.. nonce: XLqYO3 +.. section: Core and Builtins + +:mod:`codecs` encoding/decoding errors now get the context information +(which operation and which codecs) attached as :pep:`678` notes instead of +through chaining a new instance of the exception. + +.. + +.. date: 2023-03-02-13-49-21 +.. gh-issue: 102281 +.. nonce: QCuu2N +.. section: Core and Builtins + +Fix potential nullptr dereference and use of uninitialized memory in +fileutils. Patch by Max Bachmann. + +.. + +.. date: 2023-02-27-15-48-31 +.. gh-issue: 102300 +.. nonce: 8o-_Mt +.. section: Core and Builtins + +Reuse operands with refcount of 1 in float specializations of BINARY_OP. + +.. + +.. date: 2023-02-26-13-12-55 +.. gh-issue: 102213 +.. nonce: fTH8X7 +.. section: Core and Builtins + +Fix performance loss when accessing an object's attributes with +``__getattr__`` defined. + +.. + +.. date: 2023-02-26-11-43-56 +.. gh-issue: 102255 +.. nonce: cRnI5x +.. section: Core and Builtins + +Improve build support for the Xbox. Patch by Max Bachmann. + +.. + +.. date: 2023-02-21-23-42-39 +.. gh-issue: 102027 +.. nonce: fQARG0 +.. section: Core and Builtins + +Fix SSE2 and SSE3 detection in ``_blake2`` internal module. Patch by Max +Bachmann. + +.. + +.. date: 2023-02-21-17-22-06 +.. gh-issue: 101865 +.. nonce: fwrTOA +.. section: Core and Builtins + +Deprecate ``co_lnotab`` in code objects, schedule it for removal in Python +3.14 + +.. + +.. bpo: 1635741 +.. date: 2020-07-04-09-04-41 +.. nonce: ZsP31Y +.. section: Core and Builtins + +Adapt :mod:`!_pickle` to :pep:`687`. Patch by Mohamed Koubaa and Erlend +Aasland. + +.. + +.. date: 2023-03-28-15-12-53 +.. gh-issue: 103085 +.. nonce: DqNehf +.. section: Library + +Pure python :func:`locale.getencoding()` will not warn deprecation. + +.. + +.. date: 2023-03-28-05-14-59 +.. gh-issue: 103068 +.. nonce: YQTmrA +.. section: Library + +It's no longer possible to register conditional breakpoints in +:class:`~pdb.Pdb` that raise :exc:`SyntaxError`. Patch by Tian Gao. + +.. + +.. date: 2023-03-27-19-21-51 +.. gh-issue: 102549 +.. nonce: NQ6Nlv +.. section: Library + +Don't ignore exceptions in member type creation. + +.. + +.. date: 2023-03-27-15-01-16 +.. gh-issue: 103056 +.. nonce: -Efh5Q +.. section: Library + +Ensure final ``_generate_next_value_`` is a ``staticmethod``. + +.. + +.. date: 2023-03-26-20-54-57 +.. gh-issue: 103046 +.. nonce: xBlA2l +.. section: Library + +Display current line label correctly in :mod:`dis` when ``show_caches`` is +False and ``lasti`` points to a CACHE entry. + +.. + +.. date: 2023-03-25-16-57-18 +.. gh-issue: 102433 +.. nonce: L-7x2Q +.. section: Library + +:func:`isinstance` checks against :func:`runtime-checkable protocols +<typing.runtime_checkable>` now use :func:`inspect.getattr_static` rather +than :func:`hasattr` to lookup whether attributes exist. This means that +descriptors and :meth:`~object.__getattr__` methods are no longer +unexpectedly evaluated during ``isinstance()`` checks against +runtime-checkable protocols. However, it may also mean that some objects +which used to be considered instances of a runtime-checkable protocol may no +longer be considered instances of that protocol on Python 3.12+, and vice +versa. Most users are unlikely to be affected by this change. Patch by Alex +Waygood. + +.. + +.. date: 2023-03-25-02-08-05 +.. gh-issue: 103023 +.. nonce: Qfn7Hl +.. section: Library + +It's no longer possible to register expressions to display in +:class:`~pdb.Pdb` that raise :exc:`SyntaxError`. Patch by Tian Gao. + +.. + +.. date: 2023-03-23-13-34-33 +.. gh-issue: 102947 +.. nonce: cTwcpU +.. section: Library + +Improve traceback when :func:`dataclasses.fields` is called on a +non-dataclass. Patch by Alex Waygood + +.. + +.. date: 2023-03-22-16-15-18 +.. gh-issue: 102780 +.. nonce: NEcljy +.. section: Library + +The :class:`asyncio.Timeout` context manager now works reliably even when +performing cleanup due to task cancellation. Previously it could raise a +:exc:`~asyncio.CancelledError` instead of an :exc:`~asyncio.TimeoutError` in +such cases. + +.. + +.. date: 2023-03-21-15-17-07 +.. gh-issue: 102871 +.. nonce: U9mchn +.. section: Library + +Remove support for obsolete browsers from :mod:`webbrowser`. Removed +browsers include Grail, Mosaic, Netscape, Galeon, Skipstone, Iceape, +Firebird, and Firefox versions 35 and below. + +.. + +.. date: 2023-03-20-12-21-19 +.. gh-issue: 102839 +.. nonce: RjRi12 +.. section: Library + +Improve performance of :func:`math.log` arguments handling by removing the +argument clinic. + +.. + +.. date: 2023-03-19-15-30-59 +.. gh-issue: 102828 +.. nonce: NKClXg +.. section: Library + +Add the ``onexc`` arg to :func:`shutil.rmtree`, which is like ``onerror`` +but expects an exception instance rather than an exc_info tuple. Deprecate +``onerror``. + +.. + +.. date: 2023-03-18-14-59-21 +.. gh-issue: 88965 +.. nonce: kA70Km +.. section: Library + +typing: Fix a bug relating to substitution in custom classes generic over a +:class:`~typing.ParamSpec`. Previously, if the ``ParamSpec`` was substituted +with a parameters list that itself contained a :class:`~typing.TypeVar`, the +``TypeVar`` in the parameters list could not be subsequently substituted. +This is now fixed. + +Patch by Nikita Sobolev. + +.. + +.. date: 2023-03-17-19-14-26 +.. gh-issue: 76846 +.. nonce: KEamjK +.. section: Library + +Fix issue where ``__new__()`` and ``__init__()`` methods of +:class:`pathlib.PurePath` and :class:`~pathlib.Path` subclasses were not +called in some circumstances. + +.. + +.. date: 2023-03-16-16-43-04 +.. gh-issue: 78530 +.. nonce: Lr8eq_ +.. section: Library + +:func:`asyncio.wait` now accepts generators yielding tasks. Patch by Kumar +Aditya. + +.. + +.. date: 2023-03-16-08-17-29 +.. gh-issue: 102748 +.. nonce: WNACpI +.. section: Library + +:func:`asyncio.iscoroutine` now returns ``False`` for generators as +:mod:`asyncio` does not support legacy generator-based coroutines. Patch by +Kumar Aditya. + +.. + +.. date: 2023-03-13-18-27-00 +.. gh-issue: 102670 +.. nonce: GyoThv +.. section: Library + +Optimized fmean(), correlation(), covariance(), and linear_regression() +using the new math.sumprod() function. + +.. + +.. date: 2023-03-13-12-05-55 +.. gh-issue: 102615 +.. nonce: NcA_ZL +.. section: Library + +Typing: Improve the ``repr`` of generic aliases for classes generic over a +:class:`~typing.ParamSpec`. (Use square brackets to represent a parameter +list.) + +.. + +.. date: 2023-03-10-13-51-21 +.. gh-issue: 100112 +.. nonce: VHh4mw +.. section: Library + +:meth:`asyncio.Task.get_coro` now always returns a coroutine when wrapping +an awaitable object. Patch by Kumar Aditya. + +.. + +.. date: 2023-03-10-13-21-16 +.. gh-issue: 102578 +.. nonce: -gujoI +.. section: Library + +Speed up setting or deleting mutable attributes on non-dataclass subclasses +of frozen dataclasses. Due to the implementation of ``__setattr__`` and +``__delattr__`` for frozen dataclasses, this previously had a time +complexity of ``O(n)``. It now has a time complexity of ``O(1)``. + +.. + +.. date: 2023-03-08-23-08-38 +.. gh-issue: 102519 +.. nonce: wlcsFI +.. section: Library + +Add :func:`os.listdrives`, :func:`os.listvolumes` and :func:`os.listmounts` +functions on Windows for enumerating drives, volumes and mount points + +.. + +.. date: 2023-03-04-20-58-29 +.. gh-issue: 74468 +.. nonce: Ac5Ew_ +.. section: Library + +Attribute name of the extracted :mod:`tarfile` file object now holds +filename of itself rather than of the archive it is contained in. Patch by +Oleg Iarygin. + +.. + +.. date: 2023-03-03-19-53-08 +.. gh-issue: 102378 +.. nonce: kRdOZc +.. section: Library + +Private helper method ``inspect._signature_strip_non_python_syntax`` will no +longer strip ``/`` from the input string. + +.. + +.. date: 2023-02-26-17-29-57 +.. gh-issue: 79940 +.. nonce: SAfmAy +.. section: Library + +Add :func:`inspect.getasyncgenstate` and :func:`inspect.getasyncgenlocals`. +Patch by Thomas Krennwallner. + +.. + +.. date: 2023-02-21-11-56-16 +.. gh-issue: 102103 +.. nonce: Dj0WEj +.. section: Library + +Add ``module`` argument to :func:`dataclasses.make_dataclass` and make +classes produced by it pickleable. + +.. + +.. date: 2023-02-20-16-47-56 +.. gh-issue: 102069 +.. nonce: FS7f1j +.. section: Library + +Fix ``__weakref__`` descriptor generation for custom dataclasses. + +.. + +.. date: 2023-02-19-01-49-46 +.. gh-issue: 102038 +.. nonce: n3if3D +.. section: Library + +Skip a ``stat`` in :mod:`site` if we have already found a ``pyvenv.cfg`` + +.. + +.. date: 2023-02-18-23-03-50 +.. gh-issue: 98886 +.. nonce: LkKGWv +.. section: Library + +Fix issues when defining dataclasses that have fields with specific +underscore names that aren't clearly reserved by :mod:`dataclasses`. + +.. + +.. date: 2023-02-09-19-40-41 +.. gh-issue: 101673 +.. nonce: mX-Ppq +.. section: Library + +Fix a :mod:`pdb` bug where ``ll`` clears the changes to local variables. + +.. + +.. date: 2023-01-27-14-51-07 +.. gh-issue: 101313 +.. nonce: 10AEXh +.. section: Library + +Added -h and --help arguments to the webbrowser CLI + +.. + +.. date: 2022-12-20-10-55-14 +.. gh-issue: 100372 +.. nonce: utfP65 +.. section: Library + +:meth:`ssl.SSLContext.load_verify_locations` no longer incorrectly accepts +some cases of trailing data when parsing DER. + +.. + +.. date: 2022-12-16-10-27-58 +.. gh-issue: 89727 +.. nonce: y64ZLM +.. section: Library + +Fix pathlib.Path.walk RecursionError on deep directory trees by rewriting it +using iteration instead of recursion. + +.. + +.. date: 2022-12-09-11-21-38 +.. gh-issue: 100131 +.. nonce: v863yR +.. section: Library + +Added an optional ``delete`` keyword argument to +:class:`tempfile.TemporaryDirectory`. + +.. + +.. date: 2022-11-24-13-23-07 +.. gh-issue: 48330 +.. nonce: 6uAX9F +.. section: Library + +Added ``--durations`` command line option, showing the N slowest test cases. +:class:`unittest.TextTestRunner` and :class:`unittest.TextTestResult` +constructors accept a new *durations* keyword argument. Subclasses should +take this into account or accept ``**kwargs``. Added +:meth:`unittest.TestResult.addDuration` method and +:attr:`unittest.TestResult.collectedDurations` attribute. + +(Contributed by Giampaolo Rodola) + +.. + +.. date: 2022-10-10-19-14-51 +.. gh-issue: 98169 +.. nonce: DBWIxL +.. section: Library + +Fix :func:`dataclasses.astuple` crash when :class:`collections.defaultdict` +is present in the attributes. + +.. + +.. date: 2022-09-19-08-12-58 +.. gh-issue: 96931 +.. nonce: x0WQhh +.. section: Library + +Fix incorrect results from :meth:`ssl.SSLSocket.shared_ciphers` + +.. + +.. date: 2022-07-30-23-01-43 +.. gh-issue: 95495 +.. nonce: RA-q1d +.. section: Library + +When built against OpenSSL 3.0, the :mod:`ssl` module had a bug where it +reported unauthenticated EOFs (i.e. without close_notify) as a clean +TLS-level EOF. It now raises :exc:`~ssl.SSLEOFError`, matching the behavior +in previous versions of OpenSSL. The :attr:`~ssl.SSLContext.options` +attribute on :class:`~ssl.SSLContext` also no longer includes +:data:`~ssl.OP_IGNORE_UNEXPECTED_EOF` by default. This option may be set to +specify the previous OpenSSL 3.0 behavior. + +.. + +.. date: 2022-07-09-13-07-30 +.. gh-issue: 94684 +.. nonce: nV5yno +.. section: Library + +Now :func:`uuid.uuid3` and :func:`uuid.uuid5` functions support +:class:`bytes` objects as their *name* argument. + +.. + +.. date: 2022-06-30-21-28-41 +.. gh-issue: 94440 +.. nonce: LtgX0d +.. section: Library + +Fix a :mod:`concurrent.futures.process` bug where ``ProcessPoolExecutor`` +shutdown could hang after a future has been quickly submitted and canceled. + +.. + +.. date: 2022-04-11-18-34-33 +.. gh-issue: 72346 +.. nonce: pC7gnM +.. section: Library + +Added deprecation warning to *isdst* parameter of +:func:`email.utils.localtime`. + +.. + +.. bpo: 36305 +.. date: 2019-03-15-22-50-27 +.. nonce: Pbkv6u +.. section: Library + +Fix handling of Windows filenames that resemble drives, such as ``./a:b``, +in :mod:`pathlib`. + +.. + +.. date: 2023-03-29-14-51-39 +.. gh-issue: 103112 +.. nonce: XgGSEO +.. section: Documentation + +Add docstring to :meth:`http.client.HTTPResponse.read` to fix ``pydoc`` +output. + +.. + +.. date: 2023-03-23-23-25-18 +.. gh-issue: 102980 +.. nonce: Zps4QF +.. section: Tests + +Improve test coverage on :mod:`pdb`. + +.. + +.. date: 2023-03-08-13-54-20 +.. gh-issue: 102537 +.. nonce: Vfplpb +.. section: Tests + +Adjust the error handling strategy in +``test_zoneinfo.TzPathTest.python_tzpath_context``. Patch by Paul Ganssle. + +.. + +.. date: 2023-01-27-18-10-40 +.. gh-issue: 101377 +.. nonce: IJGpqh +.. section: Tests + +Improved test_locale_calendar_formatweekday of calendar. + +.. + +.. date: 2023-03-23-20-58-56 +.. gh-issue: 102973 +.. nonce: EaJUrw +.. section: Build + +Add a dev container (along with accompanying Dockerfile) for development +purposes. + +.. + +.. date: 2023-03-15-02-03-39 +.. gh-issue: 102711 +.. nonce: zTkjts +.. section: Build + +Fix ``-Wstrict-prototypes`` compiler warnings. + +.. + +.. date: 2023-03-14-10-52-43 +.. gh-issue: 102690 +.. nonce: sbXtqk +.. section: Windows + +Update :mod:`webbrowser` to fall back to Microsoft Edge instead of Internet +Explorer. + +.. + +.. date: 2023-02-22-17-26-10 +.. gh-issue: 99726 +.. nonce: 76t957 +.. section: Windows + +Improves correctness of stat results for Windows, and uses faster API when +available + +.. + +.. date: 2023-03-21-01-27-07 +.. gh-issue: 102809 +.. nonce: 2F1Byz +.. section: Tools/Demos + +``Misc/gdbinit`` was removed. + +.. + +.. date: 2023-02-18-00-55-14 +.. gh-issue: 102013 +.. nonce: 83mrtI +.. section: C API + +Add a new (unstable) C-API function for iterating over GC'able objects using +a callback: ``PyUnstable_VisitObjects``. diff --git a/Misc/NEWS.d/3.7.0b2.rst b/Misc/NEWS.d/3.7.0b2.rst index b2ade206bd5f97..9590914599bb86 100644 --- a/Misc/NEWS.d/3.7.0b2.rst +++ b/Misc/NEWS.d/3.7.0b2.rst @@ -357,7 +357,7 @@ Wirtel Add TLSVersion constants and SSLContext.maximum_version / minimum_version attributes. The new API wraps OpenSSL 1.1 -https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set_min_proto_version.html +https://web.archive.org/web/20180309043602/https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set_min_proto_version.html feature. .. diff --git a/Misc/NEWS.d/3.8.0a1.rst b/Misc/NEWS.d/3.8.0a1.rst index 991bbc128670b2..db2eba32e6ea34 100644 --- a/Misc/NEWS.d/3.8.0a1.rst +++ b/Misc/NEWS.d/3.8.0a1.rst @@ -5951,7 +5951,7 @@ Wirtel Add TLSVersion constants and SSLContext.maximum_version / minimum_version attributes. The new API wraps OpenSSL 1.1 -https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set_min_proto_version.html +https://web.archive.org/web/20180309043602/https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set_min_proto_version.html feature. .. diff --git a/Misc/NEWS.d/3.9.0a1.rst b/Misc/NEWS.d/3.9.0a1.rst index 633620583838df..0888a5c43087b5 100644 --- a/Misc/NEWS.d/3.9.0a1.rst +++ b/Misc/NEWS.d/3.9.0a1.rst @@ -4887,7 +4887,7 @@ Fix use of registry values to launch Python from Microsoft Store app. .. section: Windows Fix memory leak on Windows in creating an SSLContext object or running -urllib.request.urlopen('https://...'). +``urllib.request.urlopen('https://...')``. .. diff --git a/Misc/NEWS.d/3.9.0a2.rst b/Misc/NEWS.d/3.9.0a2.rst index 226ea0d3df2243..a03eb10f1d523a 100644 --- a/Misc/NEWS.d/3.9.0a2.rst +++ b/Misc/NEWS.d/3.9.0a2.rst @@ -686,7 +686,7 @@ added. Update documentation to state that to activate virtual environments under fish one should use `source`, not `.` as documented at -https://fishshell.com/docs/current/commands.html#source. +https://fishshell.com/docs/current/cmds/source.html. .. diff --git a/Misc/NEWS.d/3.9.0a4.rst b/Misc/NEWS.d/3.9.0a4.rst index 2aef8b26b01696..019b34c4082d10 100644 --- a/Misc/NEWS.d/3.9.0a4.rst +++ b/Misc/NEWS.d/3.9.0a4.rst @@ -392,7 +392,7 @@ The distutils ``bdist_msi`` command is deprecated in Python 3.9, use Improved performance of zipfile.Path for files with a large number of entries. Also improved performance and fixed minor issue as published with `importlib_metadata 1.5 -<https://importlib-metadata.readthedocs.io/en/latest/changelog%20(links).html#v1-5-0>`_. +<https://importlib-metadata.readthedocs.io/en/latest/history.html#v1-5-0>`_. .. diff --git a/Misc/NEWS.d/next/Build/2022-12-18-07-24-44.gh-issue-100220.BgSV7C.rst b/Misc/NEWS.d/next/Build/2022-12-18-07-24-44.gh-issue-100220.BgSV7C.rst new file mode 100644 index 00000000000000..7135317cd06fa2 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2022-12-18-07-24-44.gh-issue-100220.BgSV7C.rst @@ -0,0 +1,4 @@ +Changed the default value of the ``SHELL`` Makefile variable from ``/bin/sh`` +to ``/bin/sh -e`` to ensure that complex recipes correctly fail after an error. +Previously, ``make install`` could fail to install some files and yet return +a successful result. diff --git a/Misc/NEWS.d/next/Build/2023-02-11-05-31-05.gh-issue-99069.X4LDvY.rst b/Misc/NEWS.d/next/Build/2023-02-11-05-31-05.gh-issue-99069.X4LDvY.rst new file mode 100644 index 00000000000000..ae9b4d59ca8cec --- /dev/null +++ b/Misc/NEWS.d/next/Build/2023-02-11-05-31-05.gh-issue-99069.X4LDvY.rst @@ -0,0 +1 @@ +Extended workaround defining ``static_assert`` when missing from the libc headers to all clang and gcc builds. In particular, this fixes building on macOS <= 10.10. diff --git a/Misc/NEWS.d/next/Build/2023-04-14-10-24-37.gh-issue-103532.H1djkd.rst b/Misc/NEWS.d/next/Build/2023-04-14-10-24-37.gh-issue-103532.H1djkd.rst new file mode 100644 index 00000000000000..255c9833282c2f --- /dev/null +++ b/Misc/NEWS.d/next/Build/2023-04-14-10-24-37.gh-issue-103532.H1djkd.rst @@ -0,0 +1,4 @@ +The ``TKINTER_PROTECT_LOADTK`` macro is no longer defined or used in the +``_tkinter`` module. It was previously only defined when building against +Tk 8.4.13 and older, but Tk older than 8.5.12 has been unsupported since +gh-issue-91152. diff --git a/Misc/NEWS.d/next/C API/2023-02-09-23-09-29.gh-issue-101408._paFIF.rst b/Misc/NEWS.d/next/C API/2023-02-09-23-09-29.gh-issue-101408._paFIF.rst new file mode 100644 index 00000000000000..172d66163d42e6 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2023-02-09-23-09-29.gh-issue-101408._paFIF.rst @@ -0,0 +1,2 @@ +:c:func:`PyObject_GC_Resize` should calculate preheader size if needed. +Patch by Dong-hee Na. diff --git a/Misc/NEWS.d/next/C API/2023-02-18-00-55-14.gh-issue-102013.83mrtI.rst b/Misc/NEWS.d/next/C API/2023-02-18-00-55-14.gh-issue-102013.83mrtI.rst deleted file mode 100644 index 0350237ebc7390..00000000000000 --- a/Misc/NEWS.d/next/C API/2023-02-18-00-55-14.gh-issue-102013.83mrtI.rst +++ /dev/null @@ -1 +0,0 @@ -Add a new (unstable) C-API function for iterating over GC'able objects using a callback: ``PyUnstable_VisitObjects``. diff --git a/Misc/NEWS.d/next/C API/2023-03-28-12-31-51.gh-issue-103091.CzZyaZ.rst b/Misc/NEWS.d/next/C API/2023-03-28-12-31-51.gh-issue-103091.CzZyaZ.rst new file mode 100644 index 00000000000000..28c77b6816af87 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2023-03-28-12-31-51.gh-issue-103091.CzZyaZ.rst @@ -0,0 +1 @@ +Add a new C-API function to eagerly assign a version tag to a PyTypeObject: ``PyUnstable_Type_AssignVersionTag()``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-12-01-12-58-31.bpo-31821.1FNmwk.rst b/Misc/NEWS.d/next/Core and Builtins/2019-12-01-12-58-31.bpo-31821.1FNmwk.rst new file mode 100644 index 00000000000000..13c054fdd68276 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-12-01-12-58-31.bpo-31821.1FNmwk.rst @@ -0,0 +1 @@ +Fix :func:`!pause_reading` to work when called from :func:`!connection_made` in :mod:`asyncio`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-02-11-15-54-40.bpo-39610.fvgsCl.rst b/Misc/NEWS.d/next/Core and Builtins/2020-02-11-15-54-40.bpo-39610.fvgsCl.rst new file mode 100644 index 00000000000000..d65e0f3db9d6f5 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-02-11-15-54-40.bpo-39610.fvgsCl.rst @@ -0,0 +1,2 @@ +``len()`` for 0-dimensional :class:`memoryview`` objects (such as ``memoryview(ctypes.c_uint8(42))``) now raises a :exc:`TypeError`. +Previously this returned ``1``, which was not consistent with ``mem_0d[0]`` raising an :exc:`IndexError``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-06-23-32-11.gh-issue-98003.xWE0Yu.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-06-23-32-11.gh-issue-98003.xWE0Yu.rst new file mode 100644 index 00000000000000..f9e71bc1344bb3 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-10-06-23-32-11.gh-issue-98003.xWE0Yu.rst @@ -0,0 +1,3 @@ +Complex function calls are now faster and consume no C stack +space. + diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-11-08-12-36-25.gh-issue-99184.KIaqzz.rst b/Misc/NEWS.d/next/Core and Builtins/2022-11-08-12-36-25.gh-issue-99184.KIaqzz.rst new file mode 100644 index 00000000000000..80076831badfea --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-11-08-12-36-25.gh-issue-99184.KIaqzz.rst @@ -0,0 +1,2 @@ +Bypass instance attribute access of ``__name__`` in ``repr`` of +:class:`weakref.ref`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-02-26-11-43-56.gh-issue-102255.cRnI5x.rst b/Misc/NEWS.d/next/Core and Builtins/2023-02-26-11-43-56.gh-issue-102255.cRnI5x.rst deleted file mode 100644 index daabc3c15f6ee2..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-02-26-11-43-56.gh-issue-102255.cRnI5x.rst +++ /dev/null @@ -1 +0,0 @@ -Improve build support for the Xbox. Patch by Max Bachmann. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-02-27-15-48-31.gh-issue-102300.8o-_Mt.rst b/Misc/NEWS.d/next/Core and Builtins/2023-02-27-15-48-31.gh-issue-102300.8o-_Mt.rst deleted file mode 100644 index 4227014582b7e6..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-02-27-15-48-31.gh-issue-102300.8o-_Mt.rst +++ /dev/null @@ -1 +0,0 @@ -Reuse operands with refcount of 1 in float specializations of BINARY_OP. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-03-02-13-49-21.gh-issue-102281.QCuu2N.rst b/Misc/NEWS.d/next/Core and Builtins/2023-03-02-13-49-21.gh-issue-102281.QCuu2N.rst deleted file mode 100644 index b0269dd3d92bd5..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-03-02-13-49-21.gh-issue-102281.QCuu2N.rst +++ /dev/null @@ -1 +0,0 @@ -Fix potential nullptr dereference and use of uninitialized memory in fileutils. Patch by Max Bachmann. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-03-04-06-48-34.gh-issue-102397.ACJaOf.rst b/Misc/NEWS.d/next/Core and Builtins/2023-03-04-06-48-34.gh-issue-102397.ACJaOf.rst deleted file mode 100644 index db0b3f32c2ec0b..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-03-04-06-48-34.gh-issue-102397.ACJaOf.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix segfault from race condition in signal handling during garbage collection. -Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-03-09-13-57-35.gh-issue-90997.J-Yhn2.rst b/Misc/NEWS.d/next/Core and Builtins/2023-03-09-13-57-35.gh-issue-90997.J-Yhn2.rst deleted file mode 100644 index 723a4b9fa777d6..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-03-09-13-57-35.gh-issue-90997.J-Yhn2.rst +++ /dev/null @@ -1,2 +0,0 @@ -Shrink the number of inline :opcode:`CACHE` entries used by -:opcode:`LOAD_GLOBAL`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-03-14-00-11-46.gh-issue-102594.BjU-m2.rst b/Misc/NEWS.d/next/Core and Builtins/2023-03-14-00-11-46.gh-issue-102594.BjU-m2.rst deleted file mode 100644 index 0b95b5ec98e811..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-03-14-00-11-46.gh-issue-102594.BjU-m2.rst +++ /dev/null @@ -1 +0,0 @@ -Add note to exception raised in ``PyErr_SetObject`` when normalization fails. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-03-16-14-44-29.gh-issue-102755.j1GxlV.rst b/Misc/NEWS.d/next/Core and Builtins/2023-03-16-14-44-29.gh-issue-102755.j1GxlV.rst deleted file mode 100644 index d09af8d060d405..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-03-16-14-44-29.gh-issue-102755.j1GxlV.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add :c:func:`PyErr_DisplayException` which takes just an exception instance, -to replace the legacy :c:func:`PyErr_Display` which takes the ``(typ, exc, -tb)`` triplet. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-03-16-17-24-44.gh-issue-102701.iNGVaS.rst b/Misc/NEWS.d/next/Core and Builtins/2023-03-16-17-24-44.gh-issue-102701.iNGVaS.rst deleted file mode 100644 index 4e1f31893377ba..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-03-16-17-24-44.gh-issue-102701.iNGVaS.rst +++ /dev/null @@ -1 +0,0 @@ -Fix overflow when creating very large dict. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-03-25-23-24-38.gh-issue-88691.2SWBd1.rst b/Misc/NEWS.d/next/Core and Builtins/2023-03-25-23-24-38.gh-issue-88691.2SWBd1.rst new file mode 100644 index 00000000000000..761d45b0a3a84f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-03-25-23-24-38.gh-issue-88691.2SWBd1.rst @@ -0,0 +1 @@ +Reduce the number of inline :opcode:`CACHE` entries for :opcode:`CALL`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-03-31-17-24-03.gh-issue-103082.isRUcV.rst b/Misc/NEWS.d/next/Core and Builtins/2023-03-31-17-24-03.gh-issue-103082.isRUcV.rst new file mode 100644 index 00000000000000..631ef4c7890450 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-03-31-17-24-03.gh-issue-103082.isRUcV.rst @@ -0,0 +1 @@ +Implement :pep:`669` Low Impact Monitoring for CPython. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-01-00-46-31.gh-issue-102700.493NB4.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-01-00-46-31.gh-issue-102700.493NB4.rst new file mode 100644 index 00000000000000..46951486e4f9c9 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-01-00-46-31.gh-issue-102700.493NB4.rst @@ -0,0 +1 @@ +Allow built-in modules to be submodules. This allows submodules to be statically linked into a CPython binary. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-02-22-14-57.gh-issue-84436.hvMgwF.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-02-22-14-57.gh-issue-84436.hvMgwF.rst new file mode 100644 index 00000000000000..c4d8ce75b35a30 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-02-22-14-57.gh-issue-84436.hvMgwF.rst @@ -0,0 +1,3 @@ +The implementation of PEP-683 which adds Immortal Objects by using a fixed +reference count that skips reference counting to make objects truly +immutable. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-07-12-18-41.gh-issue-103323.9802br.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-07-12-18-41.gh-issue-103323.9802br.rst new file mode 100644 index 00000000000000..347c91d973e5ce --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-07-12-18-41.gh-issue-103323.9802br.rst @@ -0,0 +1,3 @@ +We've replaced our use of ``_PyRuntime.tstate_current`` with a thread-local +variable. This is a fairly low-level implementation detail, and there +should be no change in behavior. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-08-17-13-07.gh-issue-103242.ysI1b3.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-08-17-13-07.gh-issue-103242.ysI1b3.rst new file mode 100644 index 00000000000000..38b107f3be174f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-08-17-13-07.gh-issue-103242.ysI1b3.rst @@ -0,0 +1,2 @@ +Migrate :meth:`~ssl.SSLContext.set_ecdh_curve` method not to use deprecated +OpenSSL APIs. Patch by Dong-hee Na. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-09-22-21-57.gh-issue-77757._Ow-u2.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-09-22-21-57.gh-issue-77757._Ow-u2.rst new file mode 100644 index 00000000000000..85c8ecf7de8d1b --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-09-22-21-57.gh-issue-77757._Ow-u2.rst @@ -0,0 +1,3 @@ +Exceptions raised in a typeobject's ``__set_name__`` method are no longer +wrapped by a :exc:`RuntimeError`. Context information is added to the +exception as a :pep:`678` note. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-12-20-18-51.gh-issue-103488.vYvlHD.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-12-20-18-51.gh-issue-103488.vYvlHD.rst new file mode 100644 index 00000000000000..e7daa104e57105 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-12-20-18-51.gh-issue-103488.vYvlHD.rst @@ -0,0 +1,3 @@ +Change the internal offset distinguishing yield and return target addresses, +so that the instruction pointer is correct for exception handling and other +stack unwinding. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-12-20-22-03.gh-issue-87729.99A7UO.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-12-20-22-03.gh-issue-87729.99A7UO.rst new file mode 100644 index 00000000000000..9d75de1565a170 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-12-20-22-03.gh-issue-87729.99A7UO.rst @@ -0,0 +1,4 @@ +Add :opcode:`LOAD_SUPER_ATTR` (and a specialization for ``super().method()``) to +speed up ``super().method()`` and ``super().attr``. This makes +``super().method()`` roughly 2.3x faster and brings it within 20% of the +performance of a simple method call. Patch by Vladimir Matveev and Carl Meyer. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-13-00-58-55.gh-issue-103492.P4k0Ay.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-13-00-58-55.gh-issue-103492.P4k0Ay.rst new file mode 100644 index 00000000000000..929650968173e7 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-13-00-58-55.gh-issue-103492.P4k0Ay.rst @@ -0,0 +1 @@ +Clarify :exc:`SyntaxWarning` with literal ``is`` comparison by specifying which literal is problematic, since comparisons using ``is`` with e.g. None and bool literals are idiomatic. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-14-22-35-23.gh-issue-101517.5EqM-S.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-14-22-35-23.gh-issue-101517.5EqM-S.rst new file mode 100644 index 00000000000000..730c6cd40d7235 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-14-22-35-23.gh-issue-101517.5EqM-S.rst @@ -0,0 +1 @@ +Fix bug in line numbers of instructions emitted for :keyword:`except* <except_star>`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-16-14-38-39.gh-issue-100530.OR6-sn.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-16-14-38-39.gh-issue-100530.OR6-sn.rst new file mode 100644 index 00000000000000..5b1bcc4a680fc3 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-16-14-38-39.gh-issue-100530.OR6-sn.rst @@ -0,0 +1 @@ +Clarify the error message raised when the called part of a class pattern isn't actually a class. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-17-16-00-32.gh-issue-102856.UunJ7y.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-17-16-00-32.gh-issue-102856.UunJ7y.rst new file mode 100644 index 00000000000000..35eceb83816bcb --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-17-16-00-32.gh-issue-102856.UunJ7y.rst @@ -0,0 +1 @@ +Implement the required C tokenizer changes for PEP 701. Patch by Pablo Galindo Salgado, Lysandros Nikolaou, Batuhan Taskaya, Marta Gómez Macías and sunmy2019. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-21-16-12-41.gh-issue-103590.7DHDOE.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-21-16-12-41.gh-issue-103590.7DHDOE.rst new file mode 100644 index 00000000000000..af733a8207a2c1 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-21-16-12-41.gh-issue-103590.7DHDOE.rst @@ -0,0 +1 @@ +Do not wrap a single exception raised from a ``try-except*`` construct in an :exc:`ExceptionGroup`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-21-17-03-14.gh-issue-102310.anLjDx.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-21-17-03-14.gh-issue-102310.anLjDx.rst new file mode 100644 index 00000000000000..15cb6c64adbab1 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-21-17-03-14.gh-issue-102310.anLjDx.rst @@ -0,0 +1 @@ +Change the error range for invalid bytes literals. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-24-14-38-16.gh-issue-103793.kqoH6Q.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-24-14-38-16.gh-issue-103793.kqoH6Q.rst new file mode 100644 index 00000000000000..c48348798e7142 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-24-14-38-16.gh-issue-103793.kqoH6Q.rst @@ -0,0 +1,3 @@ +Optimized asyncio Task creation by deferring expensive string formatting +(task name generation) from Task creation to the first time ``get_name`` is +called. This makes asyncio benchmarks up to 5% faster. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-24-21-47-38.gh-issue-103801.WaBanq.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-24-21-47-38.gh-issue-103801.WaBanq.rst new file mode 100644 index 00000000000000..6f07d72fafdfc3 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-24-21-47-38.gh-issue-103801.WaBanq.rst @@ -0,0 +1 @@ +Adds three minor linting fixes to the wasm module caught that were caught by ruff. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-26-17-56-18.gh-issue-103895.ESB6tn.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-26-17-56-18.gh-issue-103895.ESB6tn.rst new file mode 100644 index 00000000000000..6fed304c9132b3 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-26-17-56-18.gh-issue-103895.ESB6tn.rst @@ -0,0 +1,3 @@ +Improve handling of edge cases in showing ``Exception.__notes__``. Ensures +that the messages always end with a newline and that string/bytes are not +exploded over multiple lines. Patch by Carey Metcalfe. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-02-26-13-12-55.gh-issue-102213.fTH8X7.rst b/Misc/NEWS.d/next/Core and Builtins/2023-05-01-08-08-05.gh-issue-102213.nfH-4C.rst similarity index 100% rename from Misc/NEWS.d/next/Core and Builtins/2023-02-26-13-12-55.gh-issue-102213.fTH8X7.rst rename to Misc/NEWS.d/next/Core and Builtins/2023-05-01-08-08-05.gh-issue-102213.nfH-4C.rst diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-05-01-12-03-52.gh-issue-104018.PFxGS4.rst b/Misc/NEWS.d/next/Core and Builtins/2023-05-01-12-03-52.gh-issue-104018.PFxGS4.rst new file mode 100644 index 00000000000000..f3cadaee0e32d9 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-05-01-12-03-52.gh-issue-104018.PFxGS4.rst @@ -0,0 +1 @@ +Disallow the "z" format specifier in %-format of bytes objects. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-05-01-14-10-38.gh-issue-104028.dxfh13.rst b/Misc/NEWS.d/next/Core and Builtins/2023-05-01-14-10-38.gh-issue-104028.dxfh13.rst new file mode 100644 index 00000000000000..9c35ea88499dce --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-05-01-14-10-38.gh-issue-104028.dxfh13.rst @@ -0,0 +1,2 @@ +Reduce object creation while calling callback function from gc. +Patch by Dong-hee Na. diff --git a/Misc/NEWS.d/next/Documentation/2023-03-07-23-30-29.gh-issue-99202.hhiAJF.rst b/Misc/NEWS.d/next/Documentation/2023-03-07-23-30-29.gh-issue-99202.hhiAJF.rst new file mode 100644 index 00000000000000..1569e815ee50fa --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2023-03-07-23-30-29.gh-issue-99202.hhiAJF.rst @@ -0,0 +1 @@ +Fix extension type from documentation for compiling in C++20 mode diff --git a/Misc/NEWS.d/next/Documentation/2023-03-10-04-59-35.gh-issue-86094.zOYdy8.rst b/Misc/NEWS.d/next/Documentation/2023-03-10-04-59-35.gh-issue-86094.zOYdy8.rst new file mode 100644 index 00000000000000..39461f3f84c9ac --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2023-03-10-04-59-35.gh-issue-86094.zOYdy8.rst @@ -0,0 +1,2 @@ +Add support for Unicode Path Extra Field in ZipFile. Patch by Yeojin Kim +and Andrea Giudiceandrea diff --git a/Misc/NEWS.d/next/Documentation/2023-04-25-22-58-08.gh-issue-48241.l1Gxxh.rst b/Misc/NEWS.d/next/Documentation/2023-04-25-22-58-08.gh-issue-48241.l1Gxxh.rst new file mode 100644 index 00000000000000..619505cf6ee5b8 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2023-04-25-22-58-08.gh-issue-48241.l1Gxxh.rst @@ -0,0 +1 @@ +Clarifying documentation about the url parameter to urllib.request.urlopen and urllib.request.Requst needing to be encoded properly. diff --git a/Misc/NEWS.d/next/Documentation/2023-04-26-23-55-31.gh-issue-103629.-0reqn.rst b/Misc/NEWS.d/next/Documentation/2023-04-26-23-55-31.gh-issue-103629.-0reqn.rst new file mode 100644 index 00000000000000..6dc0a1cb5a3e4f --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2023-04-26-23-55-31.gh-issue-103629.-0reqn.rst @@ -0,0 +1,2 @@ +Mention the new way of typing ``**kwargs`` with ``Unpack`` and ``TypedDict`` +introduced in :pep:`692`. diff --git a/Misc/NEWS.d/next/IDLE/2023-04-30-20-01-18.gh-issue-88496.y65vUb.rst b/Misc/NEWS.d/next/IDLE/2023-04-30-20-01-18.gh-issue-88496.y65vUb.rst new file mode 100644 index 00000000000000..4f390d189d23b5 --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2023-04-30-20-01-18.gh-issue-88496.y65vUb.rst @@ -0,0 +1 @@ +Fix IDLE test hang on macOS. diff --git a/Misc/NEWS.d/next/Library/2018-07-16-14-10-29.bpo-22708.592iRR.rst b/Misc/NEWS.d/next/Library/2018-07-16-14-10-29.bpo-22708.592iRR.rst new file mode 100644 index 00000000000000..00bcf38bbcdf51 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2018-07-16-14-10-29.bpo-22708.592iRR.rst @@ -0,0 +1,3 @@ +http.client CONNECT method tunnel improvements: Use HTTP 1.1 protocol; send +a matching Host: header with CONNECT, if one is not provided; convert IDN +domain names to Punycode. Patch by Michael Handler. diff --git a/Misc/NEWS.d/next/Library/2019-03-15-22-50-27.bpo-36305.Pbkv6u.rst b/Misc/NEWS.d/next/Library/2019-03-15-22-50-27.bpo-36305.Pbkv6u.rst deleted file mode 100644 index d9360496ac24cb..00000000000000 --- a/Misc/NEWS.d/next/Library/2019-03-15-22-50-27.bpo-36305.Pbkv6u.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix handling of Windows filenames that resemble drives, such as ``./a:b``, -in :mod:`pathlib`. diff --git a/Misc/NEWS.d/next/Library/2020-02-25-00-43-22.bpo-39744.hgK689.rst b/Misc/NEWS.d/next/Library/2020-02-25-00-43-22.bpo-39744.hgK689.rst new file mode 100644 index 00000000000000..6e690f996569a4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-02-25-00-43-22.bpo-39744.hgK689.rst @@ -0,0 +1 @@ +Make :func:`asyncio.subprocess.Process.communicate` close the subprocess's stdin even when called with ``input=None``. diff --git a/Misc/NEWS.d/next/Library/2021-11-07-15-31-25.bpo-23041.564i32.rst b/Misc/NEWS.d/next/Library/2021-11-07-15-31-25.bpo-23041.564i32.rst new file mode 100644 index 00000000000000..53c32d397b206b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2021-11-07-15-31-25.bpo-23041.564i32.rst @@ -0,0 +1,2 @@ +Add :data:`~csv.QUOTE_STRINGS` and :data:`~csv.QUOTE_NOTNULL` to the suite +of :mod:`csv` module quoting styles. diff --git a/Misc/NEWS.d/next/Library/2021-12-03-23-00-56.bpo-44844.tvg2VY.rst b/Misc/NEWS.d/next/Library/2021-12-03-23-00-56.bpo-44844.tvg2VY.rst new file mode 100644 index 00000000000000..f0c91236dfdf21 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2021-12-03-23-00-56.bpo-44844.tvg2VY.rst @@ -0,0 +1 @@ +Enables :mod:`webbrowser` to detect and launch Microsoft Edge browser. diff --git a/Misc/NEWS.d/next/Library/2022-06-30-21-28-41.gh-issue-94440.LtgX0d.rst b/Misc/NEWS.d/next/Library/2022-06-30-21-28-41.gh-issue-94440.LtgX0d.rst deleted file mode 100644 index 3eee82e59dfafb..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-06-30-21-28-41.gh-issue-94440.LtgX0d.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a :mod:`concurrent.futures.process` bug where ``ProcessPoolExecutor`` shutdown -could hang after a future has been quickly submitted and canceled. diff --git a/Misc/NEWS.d/next/Library/2022-07-03-23-13-28.gh-issue-94518.511Tbh.rst b/Misc/NEWS.d/next/Library/2022-07-03-23-13-28.gh-issue-94518.511Tbh.rst new file mode 100644 index 00000000000000..7719b74b8e5ef1 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-07-03-23-13-28.gh-issue-94518.511Tbh.rst @@ -0,0 +1 @@ +Convert private :meth:`_posixsubprocess.fork_exec` to use Argument Clinic. diff --git a/Misc/NEWS.d/next/Library/2022-07-06-11-10-37.gh-issue-51574.sveUeD.rst b/Misc/NEWS.d/next/Library/2022-07-06-11-10-37.gh-issue-51574.sveUeD.rst new file mode 100644 index 00000000000000..50a3d6a4629182 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-07-06-11-10-37.gh-issue-51574.sveUeD.rst @@ -0,0 +1,2 @@ +Make :func:`tempfile.mkdtemp` return absolute paths when its *dir* +parameter is relative. diff --git a/Misc/NEWS.d/next/Library/2022-10-10-19-14-51.gh-issue-98169.DBWIxL.rst b/Misc/NEWS.d/next/Library/2022-10-10-19-14-51.gh-issue-98169.DBWIxL.rst deleted file mode 100644 index 24c3aeecc83f18..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-10-10-19-14-51.gh-issue-98169.DBWIxL.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix :func:`dataclasses.astuple` crash when :class:`collections.defaultdict` -is present in the attributes. diff --git a/Misc/NEWS.d/next/Library/2022-10-21-17-20-57.gh-issue-98040.3btbmA.rst b/Misc/NEWS.d/next/Library/2022-10-21-17-20-57.gh-issue-98040.3btbmA.rst new file mode 100644 index 00000000000000..f67bffcb0ddc6c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-10-21-17-20-57.gh-issue-98040.3btbmA.rst @@ -0,0 +1 @@ +Remove the long-deprecated ``imp`` module. diff --git a/Misc/NEWS.d/next/Library/2022-11-10-16-26-47.gh-issue-99353.DQFjnt.rst b/Misc/NEWS.d/next/Library/2022-11-10-16-26-47.gh-issue-99353.DQFjnt.rst new file mode 100644 index 00000000000000..1ad42d5c9aa53d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-10-16-26-47.gh-issue-99353.DQFjnt.rst @@ -0,0 +1,3 @@ +Respect the :class:`http.client.HTTPConnection` ``.debuglevel`` flag +in :class:`urllib.request.AbstractHTTPHandler` when its constructor +parameter ``debuglevel`` is not set. And do the same for ``*HTTPS*``. diff --git a/Misc/NEWS.d/next/Library/2023-01-14-17-54-56.gh-issue-95299.vUhpKz.rst b/Misc/NEWS.d/next/Library/2023-01-14-17-54-56.gh-issue-95299.vUhpKz.rst new file mode 100644 index 00000000000000..29c30848e09a83 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-01-14-17-54-56.gh-issue-95299.vUhpKz.rst @@ -0,0 +1 @@ +Remove the bundled setuptools wheel from ``ensurepip``, and stop installing setuptools in environments created by ``venv``. diff --git a/Misc/NEWS.d/next/Library/2023-02-06-16-45-18.gh-issue-83861.mMbIU3.rst b/Misc/NEWS.d/next/Library/2023-02-06-16-45-18.gh-issue-83861.mMbIU3.rst new file mode 100644 index 00000000000000..e85e7a4ff2e73a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-06-16-45-18.gh-issue-83861.mMbIU3.rst @@ -0,0 +1,4 @@ +Fix datetime.astimezone method return value when invoked on a naive datetime +instance that represents local time falling in a timezone transition gap. +PEP 495 requires that instances with fold=1 produce earlier times than those +with fold=0 in this case. diff --git a/Misc/NEWS.d/next/Library/2023-02-09-19-40-41.gh-issue-101673.mX-Ppq.rst b/Misc/NEWS.d/next/Library/2023-02-09-19-40-41.gh-issue-101673.mX-Ppq.rst deleted file mode 100644 index 4e673ba9811568..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-02-09-19-40-41.gh-issue-101673.mX-Ppq.rst +++ /dev/null @@ -1 +0,0 @@ -Fix a :mod:`pdb` bug where ``ll`` clears the changes to local variables. diff --git a/Misc/NEWS.d/next/Library/2023-02-11-15-01-32.gh-issue-101688.kwXmfM.rst b/Misc/NEWS.d/next/Library/2023-02-11-15-01-32.gh-issue-101688.kwXmfM.rst new file mode 100644 index 00000000000000..6df69463931494 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-11-15-01-32.gh-issue-101688.kwXmfM.rst @@ -0,0 +1,2 @@ +Implement :func:`types.get_original_bases` to provide further introspection +for types. diff --git a/Misc/NEWS.d/next/Library/2023-02-17-21-14-40.gh-issue-78079.z3Szr6.rst b/Misc/NEWS.d/next/Library/2023-02-17-21-14-40.gh-issue-78079.z3Szr6.rst new file mode 100644 index 00000000000000..bbb9ac3e3f8faa --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-17-21-14-40.gh-issue-78079.z3Szr6.rst @@ -0,0 +1,3 @@ +Fix incorrect normalization of UNC device path roots, and partial UNC share +path roots, in :class:`pathlib.PurePath`. Pathlib no longer appends a +trailing slash to such paths. diff --git a/Misc/NEWS.d/next/Library/2023-02-19-12-37-08.gh-issue-62432.GnBFIB.rst b/Misc/NEWS.d/next/Library/2023-02-19-12-37-08.gh-issue-62432.GnBFIB.rst new file mode 100644 index 00000000000000..a8d66ea48c3278 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-19-12-37-08.gh-issue-62432.GnBFIB.rst @@ -0,0 +1,3 @@ +The :mod:`unittest` runner will now exit with status code 5 if no tests +were run. It is common for test runner misconfiguration to fail to find +any tests, this should be an error. diff --git a/Misc/NEWS.d/next/Library/2023-02-20-16-47-56.gh-issue-102069.FS7f1j.rst b/Misc/NEWS.d/next/Library/2023-02-20-16-47-56.gh-issue-102069.FS7f1j.rst deleted file mode 100644 index 04c87e515cca93..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-02-20-16-47-56.gh-issue-102069.FS7f1j.rst +++ /dev/null @@ -1 +0,0 @@ -Fix ``__weakref__`` descriptor generation for custom dataclasses. diff --git a/Misc/NEWS.d/next/Library/2023-02-21-11-56-16.gh-issue-102103.Dj0WEj.rst b/Misc/NEWS.d/next/Library/2023-02-21-11-56-16.gh-issue-102103.Dj0WEj.rst deleted file mode 100644 index feba433f5bee89..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-02-21-11-56-16.gh-issue-102103.Dj0WEj.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add ``module`` argument to :func:`dataclasses.make_dataclass` and make -classes produced by it pickleable. diff --git a/Misc/NEWS.d/next/Library/2023-02-21-14-57-34.gh-issue-102114.uUDQzb.rst b/Misc/NEWS.d/next/Library/2023-02-21-14-57-34.gh-issue-102114.uUDQzb.rst new file mode 100644 index 00000000000000..4140c9a96cd272 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-21-14-57-34.gh-issue-102114.uUDQzb.rst @@ -0,0 +1 @@ +Functions in the :mod:`dis` module that accept a source code string as argument now print a more concise traceback when the string contains a syntax or indentation error. diff --git a/Misc/NEWS.d/next/Library/2023-02-26-17-29-57.gh-issue-79940.SAfmAy.rst b/Misc/NEWS.d/next/Library/2023-02-26-17-29-57.gh-issue-79940.SAfmAy.rst deleted file mode 100644 index 31b8ead8433279..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-02-26-17-29-57.gh-issue-79940.SAfmAy.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add :func:`inspect.getasyncgenstate` and :func:`inspect.getasyncgenlocals`. -Patch by Thomas Krennwallner. diff --git a/Misc/NEWS.d/next/Library/2023-03-03-19-53-08.gh-issue-102378.kRdOZc.rst b/Misc/NEWS.d/next/Library/2023-03-03-19-53-08.gh-issue-102378.kRdOZc.rst deleted file mode 100644 index d30f65f30d109a..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-03-03-19-53-08.gh-issue-102378.kRdOZc.rst +++ /dev/null @@ -1 +0,0 @@ -Private helper method ``inspect._signature_strip_non_python_syntax`` will no longer strip ``/`` from the input string. diff --git a/Misc/NEWS.d/next/Library/2023-03-06-18-49-57.gh-issue-101362.eSSy6L.rst b/Misc/NEWS.d/next/Library/2023-03-06-18-49-57.gh-issue-101362.eSSy6L.rst new file mode 100644 index 00000000000000..87617a503c0dba --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-03-06-18-49-57.gh-issue-101362.eSSy6L.rst @@ -0,0 +1,2 @@ +Speed up :class:`pathlib.Path` construction by omitting the path anchor from +the internal list of path parts. diff --git a/Misc/NEWS.d/next/Library/2023-03-08-23-08-38.gh-issue-102519.wlcsFI.rst b/Misc/NEWS.d/next/Library/2023-03-08-23-08-38.gh-issue-102519.wlcsFI.rst deleted file mode 100644 index f47e4f70b1301d..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-03-08-23-08-38.gh-issue-102519.wlcsFI.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add :func:`os.listdrives`, :func:`os.listvolumes` and :func:`os.listmounts` -functions on Windows for enumerating drives, volumes and mount points diff --git a/Misc/NEWS.d/next/Library/2023-03-10-13-21-16.gh-issue-102578.-gujoI.rst b/Misc/NEWS.d/next/Library/2023-03-10-13-21-16.gh-issue-102578.-gujoI.rst deleted file mode 100644 index 7307148d9a81ef..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-03-10-13-21-16.gh-issue-102578.-gujoI.rst +++ /dev/null @@ -1,4 +0,0 @@ -Speed up setting or deleting mutable attributes on non-dataclass subclasses of -frozen dataclasses. Due to the implementation of ``__setattr__`` and -``__delattr__`` for frozen dataclasses, this previously had a time complexity -of ``O(n)``. It now has a time complexity of ``O(1)``. diff --git a/Misc/NEWS.d/next/Library/2023-03-10-13-51-21.gh-issue-100112.VHh4mw.rst b/Misc/NEWS.d/next/Library/2023-03-10-13-51-21.gh-issue-100112.VHh4mw.rst deleted file mode 100644 index eff77c40e30c48..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-03-10-13-51-21.gh-issue-100112.VHh4mw.rst +++ /dev/null @@ -1 +0,0 @@ -:meth:`asyncio.Task.get_coro` now always returns a coroutine when wrapping an awaitable object. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2023-03-13-12-05-55.gh-issue-102615.NcA_ZL.rst b/Misc/NEWS.d/next/Library/2023-03-13-12-05-55.gh-issue-102615.NcA_ZL.rst deleted file mode 100644 index 333068369bc4f7..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-03-13-12-05-55.gh-issue-102615.NcA_ZL.rst +++ /dev/null @@ -1,3 +0,0 @@ -Typing: Improve the ``repr`` of generic aliases for classes generic over a -:class:`~typing.ParamSpec`. (Use square brackets to represent a parameter -list.) diff --git a/Misc/NEWS.d/next/Library/2023-03-13-18-27-00.gh-issue-102670.GyoThv.rst b/Misc/NEWS.d/next/Library/2023-03-13-18-27-00.gh-issue-102670.GyoThv.rst deleted file mode 100644 index 3de09f86754f3e..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-03-13-18-27-00.gh-issue-102670.GyoThv.rst +++ /dev/null @@ -1,2 +0,0 @@ -Optimized fmean(), correlation(), covariance(), and linear_regression() -using the new math.sumprod() function. diff --git a/Misc/NEWS.d/next/Library/2023-03-15-12-18-07.gh-issue-97696.DtnpIC.rst b/Misc/NEWS.d/next/Library/2023-03-15-12-18-07.gh-issue-97696.DtnpIC.rst new file mode 100644 index 00000000000000..0b3854d74eb991 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-03-15-12-18-07.gh-issue-97696.DtnpIC.rst @@ -0,0 +1,6 @@ +Implemented an eager task factory in asyncio. +When used as a task factory on an event loop, it performs eager execution of +coroutines. Coroutines that are able to complete synchronously (e.g. return or +raise without blocking) are returned immediately as a finished task, and the +task is never scheduled to the event loop. If the coroutine blocks, the +(pending) task is scheduled and returned. diff --git a/Misc/NEWS.d/next/Library/2023-03-16-08-17-29.gh-issue-102748.WNACpI.rst b/Misc/NEWS.d/next/Library/2023-03-16-08-17-29.gh-issue-102748.WNACpI.rst deleted file mode 100644 index b1dc67f38fe85d..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-03-16-08-17-29.gh-issue-102748.WNACpI.rst +++ /dev/null @@ -1,3 +0,0 @@ -:func:`asyncio.iscoroutine` now returns ``False`` for generators as -:mod:`asyncio` does not support legacy generator-based coroutines. -Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2023-03-16-16-43-04.gh-issue-78530.Lr8eq_.rst b/Misc/NEWS.d/next/Library/2023-03-16-16-43-04.gh-issue-78530.Lr8eq_.rst deleted file mode 100644 index bdb46d08c5c4af..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-03-16-16-43-04.gh-issue-78530.Lr8eq_.rst +++ /dev/null @@ -1 +0,0 @@ -:func:`asyncio.wait` now accepts generators yielding tasks. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2023-03-23-15-24-38.gh-issue-102953.YR4KaK.rst b/Misc/NEWS.d/next/Library/2023-03-23-15-24-38.gh-issue-102953.YR4KaK.rst new file mode 100644 index 00000000000000..48a105a4a17b29 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-03-23-15-24-38.gh-issue-102953.YR4KaK.rst @@ -0,0 +1,4 @@ +The extraction methods in :mod:`tarfile`, and :func:`shutil.unpack_archive`, +have a new a *filter* argument that allows limiting tar features than may be +surprising or dangerous, such as creating files outside the destination +directory. See :ref:`tarfile-extraction-filter` for details. diff --git a/Misc/NEWS.d/next/Library/2023-03-24-20-49-48.gh-issue-103000.6eVNZI.rst b/Misc/NEWS.d/next/Library/2023-03-24-20-49-48.gh-issue-103000.6eVNZI.rst new file mode 100644 index 00000000000000..15f16d9eb4c1bf --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-03-24-20-49-48.gh-issue-103000.6eVNZI.rst @@ -0,0 +1,2 @@ +Improve performance of :func:`dataclasses.astuple` and +:func:`dataclasses.asdict` in cases where the contents are common Python types. diff --git a/Misc/NEWS.d/next/Library/2023-03-28-09-13-31.gh-issue-103015.ETTfNf.rst b/Misc/NEWS.d/next/Library/2023-03-28-09-13-31.gh-issue-103015.ETTfNf.rst new file mode 100644 index 00000000000000..dcac1a28ca5847 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-03-28-09-13-31.gh-issue-103015.ETTfNf.rst @@ -0,0 +1,3 @@ +Add *entrypoint* keyword-only parameter to +:meth:`sqlite3.Connection.load_extension`, for overriding the SQLite +extension entry point. Patch by Erlend E. Aasland. diff --git a/Misc/NEWS.d/next/Library/2023-03-31-01-13-00.gh-issue-103143.6eMluy.rst b/Misc/NEWS.d/next/Library/2023-03-31-01-13-00.gh-issue-103143.6eMluy.rst new file mode 100644 index 00000000000000..32bd62d27c7c6d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-03-31-01-13-00.gh-issue-103143.6eMluy.rst @@ -0,0 +1 @@ +Polish the help messages and docstrings of :mod:`pdb`. diff --git a/Misc/NEWS.d/next/Library/2023-04-01-23-01-31.gh-issue-103176.FBsdxa.rst b/Misc/NEWS.d/next/Library/2023-04-01-23-01-31.gh-issue-103176.FBsdxa.rst new file mode 100644 index 00000000000000..b89f9bae595457 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-01-23-01-31.gh-issue-103176.FBsdxa.rst @@ -0,0 +1,2 @@ +:func:`sys._current_exceptions` now returns a mapping from thread-id to an +exception instance, rather than to a ``(typ, exc, tb)`` tuple. diff --git a/Misc/NEWS.d/next/Library/2023-04-02-17-51-08.gh-issue-103193.xrZbM1.rst b/Misc/NEWS.d/next/Library/2023-04-02-17-51-08.gh-issue-103193.xrZbM1.rst new file mode 100644 index 00000000000000..f0b76a605a5610 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-02-17-51-08.gh-issue-103193.xrZbM1.rst @@ -0,0 +1,2 @@ +Improve performance of :func:`inspect.getattr_static`. Patch by Alex +Waygood. diff --git a/Misc/NEWS.d/next/Library/2023-04-02-22-04-26.gh-issue-75586.526iJm.rst b/Misc/NEWS.d/next/Library/2023-04-02-22-04-26.gh-issue-75586.526iJm.rst new file mode 100644 index 00000000000000..8ec568ec4e4775 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-02-22-04-26.gh-issue-75586.526iJm.rst @@ -0,0 +1 @@ +Fix various Windows-specific issues with ``shutil.which``. diff --git a/Misc/NEWS.d/next/Library/2023-04-03-21-08-53.gh-issue-103220.OW_Bj5.rst b/Misc/NEWS.d/next/Library/2023-04-03-21-08-53.gh-issue-103220.OW_Bj5.rst new file mode 100644 index 00000000000000..9cf26c26873b2a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-03-21-08-53.gh-issue-103220.OW_Bj5.rst @@ -0,0 +1,2 @@ +Fix issue where :func:`os.path.join` added a slash when joining onto an +incomplete UNC drive with a trailing slash on Windows. diff --git a/Misc/NEWS.d/next/Library/2023-04-03-23-43-12.gh-issue-103092.3xqk4y.rst b/Misc/NEWS.d/next/Library/2023-04-03-23-43-12.gh-issue-103092.3xqk4y.rst new file mode 100644 index 00000000000000..e7586a223c1415 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-03-23-43-12.gh-issue-103092.3xqk4y.rst @@ -0,0 +1 @@ +Isolate :mod:`!_socket` (apply :pep:`687`). Patch by Erlend E. Aasland. diff --git a/Misc/NEWS.d/next/Library/2023-04-03-23-44-34.gh-issue-102978.gy9eVk.rst b/Misc/NEWS.d/next/Library/2023-04-03-23-44-34.gh-issue-102978.gy9eVk.rst new file mode 100644 index 00000000000000..df63af10a385eb --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-03-23-44-34.gh-issue-102978.gy9eVk.rst @@ -0,0 +1,3 @@ +Fixes :func:`unittest.mock.patch` not enforcing function signatures for methods +decorated with ``@classmethod`` or ``@staticmethod`` when patch is called with +``autospec=True``. diff --git a/Misc/NEWS.d/next/Library/2023-04-04-12-43-38.gh-issue-93910.jurMzv.rst b/Misc/NEWS.d/next/Library/2023-04-04-12-43-38.gh-issue-93910.jurMzv.rst new file mode 100644 index 00000000000000..783aefae0770a9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-04-12-43-38.gh-issue-93910.jurMzv.rst @@ -0,0 +1 @@ +Remove deprecation of enum ``memmber.member`` access. diff --git a/Misc/NEWS.d/next/Library/2023-04-04-21-27-51.gh-issue-103092.7s7Bzf.rst b/Misc/NEWS.d/next/Library/2023-04-04-21-27-51.gh-issue-103092.7s7Bzf.rst new file mode 100644 index 00000000000000..39c62ffbe8c659 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-04-21-27-51.gh-issue-103092.7s7Bzf.rst @@ -0,0 +1 @@ +Adapt the :mod:`winsound` extension module to :pep:`687`. diff --git a/Misc/NEWS.d/next/Library/2023-04-04-21-44-25.gh-issue-103092.Dz0_Xn.rst b/Misc/NEWS.d/next/Library/2023-04-04-21-44-25.gh-issue-103092.Dz0_Xn.rst new file mode 100644 index 00000000000000..7bd191e3c22b2b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-04-21-44-25.gh-issue-103092.Dz0_Xn.rst @@ -0,0 +1 @@ +Adapt the :mod:`msvcrt` extension module to :pep:`687`. diff --git a/Misc/NEWS.d/next/Library/2023-04-05-01-28-53.gh-issue-103225.QD3JVU.rst b/Misc/NEWS.d/next/Library/2023-04-05-01-28-53.gh-issue-103225.QD3JVU.rst new file mode 100644 index 00000000000000..5d1a063acdeb8c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-05-01-28-53.gh-issue-103225.QD3JVU.rst @@ -0,0 +1 @@ +Fix a bug in :mod:`pdb` when displaying line numbers of module-level source code. diff --git a/Misc/NEWS.d/next/Library/2023-04-06-04-35-59.gh-issue-103285.rCZ9-G.rst b/Misc/NEWS.d/next/Library/2023-04-06-04-35-59.gh-issue-103285.rCZ9-G.rst new file mode 100644 index 00000000000000..62b4364c2b1665 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-06-04-35-59.gh-issue-103285.rCZ9-G.rst @@ -0,0 +1 @@ +Improve performance of :func:`ast.get_source_segment`. diff --git a/Misc/NEWS.d/next/Library/2023-04-06-16-55-51.gh-issue-102778.BWeAmE.rst b/Misc/NEWS.d/next/Library/2023-04-06-16-55-51.gh-issue-102778.BWeAmE.rst new file mode 100644 index 00000000000000..64ae5b5b6d564b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-06-16-55-51.gh-issue-102778.BWeAmE.rst @@ -0,0 +1 @@ +Support ``sys.last_exc`` in :mod:`idlelib`. diff --git a/Misc/NEWS.d/next/Library/2023-04-06-17-28-36.gh-issue-103256.1syxfs.rst b/Misc/NEWS.d/next/Library/2023-04-06-17-28-36.gh-issue-103256.1syxfs.rst new file mode 100644 index 00000000000000..894c046dcdf0fd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-06-17-28-36.gh-issue-103256.1syxfs.rst @@ -0,0 +1,6 @@ +Fixed a bug that caused :mod:`hmac` to raise an exception when the requested +hash algorithm was not available in OpenSSL despite being available +separately as part of ``hashlib`` itself. It now falls back properly to the +built-in. This could happen when, for example, your OpenSSL does not include +SHA3 support and you want to compute ``hmac.digest(b'K', b'M', +'sha3_256')``. diff --git a/Misc/NEWS.d/next/Library/2023-04-07-15-09-26.gh-issue-74690.0f886b.rst b/Misc/NEWS.d/next/Library/2023-04-07-15-09-26.gh-issue-74690.0f886b.rst new file mode 100644 index 00000000000000..0a103ae11970d4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-07-15-09-26.gh-issue-74690.0f886b.rst @@ -0,0 +1,3 @@ +The members of a runtime-checkable protocol are now considered "frozen" at +runtime as soon as the class has been created. See +:ref:`"What's new in Python 3.12" <whatsnew-typing-py312>` for more details. diff --git a/Misc/NEWS.d/next/Library/2023-04-07-15-15-40.gh-issue-74690.un84hh.rst b/Misc/NEWS.d/next/Library/2023-04-07-15-15-40.gh-issue-74690.un84hh.rst new file mode 100644 index 00000000000000..48f11aac692ddb --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-07-15-15-40.gh-issue-74690.un84hh.rst @@ -0,0 +1,8 @@ +The performance of :func:`isinstance` checks against +:func:`runtime-checkable protocols <typing.runtime_checkable>` has been +considerably improved for protocols that only have a few members. To achieve +this improvement, several internal implementation details of the +:mod:`typing` module have been refactored, including +``typing._ProtocolMeta.__instancecheck__``, +``typing._is_callable_members_only``, and ``typing._get_protocol_attrs``. +Patches by Alex Waygood. diff --git a/Misc/NEWS.d/next/Library/2023-04-08-00-48-40.gh-issue-103092.5EFts0.rst b/Misc/NEWS.d/next/Library/2023-04-08-00-48-40.gh-issue-103092.5EFts0.rst new file mode 100644 index 00000000000000..0f2108fee763d0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-08-00-48-40.gh-issue-103092.5EFts0.rst @@ -0,0 +1 @@ +Adapt the :mod:`winreg` extension module to :pep:`687`. diff --git a/Misc/NEWS.d/next/Library/2023-04-08-01-33-12.gh-issue-103357.vjin28.rst b/Misc/NEWS.d/next/Library/2023-04-08-01-33-12.gh-issue-103357.vjin28.rst new file mode 100644 index 00000000000000..83dce56ed0b7c5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-08-01-33-12.gh-issue-103357.vjin28.rst @@ -0,0 +1,3 @@ +Added support for :class:`logging.Formatter` ``defaults`` parameter to +:func:`logging.config.dictConfig` and :func:`logging.config.fileConfig`. +Patch by Bar Harel. diff --git a/Misc/NEWS.d/next/Library/2023-04-09-06-59-36.gh-issue-103092.vskbro.rst b/Misc/NEWS.d/next/Library/2023-04-09-06-59-36.gh-issue-103092.vskbro.rst new file mode 100644 index 00000000000000..6977c1489a29cb --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-09-06-59-36.gh-issue-103092.vskbro.rst @@ -0,0 +1 @@ +Isolate :mod:`!_collections` (apply :pep:`687`). Patch by Erlend E. Aasland. diff --git a/Misc/NEWS.d/next/Library/2023-04-11-21-38-39.gh-issue-103449.-nxmhb.rst b/Misc/NEWS.d/next/Library/2023-04-11-21-38-39.gh-issue-103449.-nxmhb.rst new file mode 100644 index 00000000000000..0b2b47af1cbaab --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-11-21-38-39.gh-issue-103449.-nxmhb.rst @@ -0,0 +1 @@ +Fix a bug in doc string generation in :func:`dataclasses.dataclass`. diff --git a/Misc/NEWS.d/next/Library/2023-04-12-06-00-02.gh-issue-103462.w6yBlM.rst b/Misc/NEWS.d/next/Library/2023-04-12-06-00-02.gh-issue-103462.w6yBlM.rst new file mode 100644 index 00000000000000..50758c89cc2856 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-12-06-00-02.gh-issue-103462.w6yBlM.rst @@ -0,0 +1,4 @@ +Fixed an issue with using :meth:`~asyncio.WriteTransport.writelines` in :mod:`asyncio` to send very +large payloads that exceed the amount of data that can be written in one +call to :meth:`socket.socket.send` or :meth:`socket.socket.sendmsg`, +resulting in the remaining buffer being left unwritten. diff --git a/Misc/NEWS.d/next/Library/2023-04-12-17-59-55.gh-issue-103365.UBEE0U.rst b/Misc/NEWS.d/next/Library/2023-04-12-17-59-55.gh-issue-103365.UBEE0U.rst new file mode 100644 index 00000000000000..4d69f6f6fff713 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-12-17-59-55.gh-issue-103365.UBEE0U.rst @@ -0,0 +1 @@ +Set default Flag boundary to ``STRICT`` and fix bitwise operations. diff --git a/Misc/NEWS.d/next/Library/2023-04-13-13-17-47.gh-issue-103489.ZSZgmu.rst b/Misc/NEWS.d/next/Library/2023-04-13-13-17-47.gh-issue-103489.ZSZgmu.rst new file mode 100644 index 00000000000000..264564d018ceb4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-13-13-17-47.gh-issue-103489.ZSZgmu.rst @@ -0,0 +1,4 @@ +Add :meth:`~sqlite3.Connection.getconfig` and +:meth:`~sqlite3.Connection.setconfig` to :class:`~sqlite3.Connection` to +make configuration changes to a database connection. Patch by Erlend E. +Aasland. diff --git a/Misc/NEWS.d/next/Library/2023-04-15-11-21-38.gh-issue-103559.a9rYHG.rst b/Misc/NEWS.d/next/Library/2023-04-15-11-21-38.gh-issue-103559.a9rYHG.rst new file mode 100644 index 00000000000000..2c9d67e2c4bf71 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-15-11-21-38.gh-issue-103559.a9rYHG.rst @@ -0,0 +1 @@ +Update the bundled copy of pip to version 23.1.1. diff --git a/Misc/NEWS.d/next/Library/2023-04-15-12-19-14.gh-issue-103556.TEf-2m.rst b/Misc/NEWS.d/next/Library/2023-04-15-12-19-14.gh-issue-103556.TEf-2m.rst new file mode 100644 index 00000000000000..fe2267b7b79019 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-15-12-19-14.gh-issue-103556.TEf-2m.rst @@ -0,0 +1,3 @@ +Now creating :class:`inspect.Signature` objects with positional-only +parameter with a default followed by a positional-or-keyword parameter +without one is impossible. diff --git a/Misc/NEWS.d/next/Library/2023-04-16-18-29-04.gh-issue-103578.fly1wc.rst b/Misc/NEWS.d/next/Library/2023-04-16-18-29-04.gh-issue-103578.fly1wc.rst new file mode 100644 index 00000000000000..69986c2a15b39e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-16-18-29-04.gh-issue-103578.fly1wc.rst @@ -0,0 +1 @@ +Fixed a bug where :mod:`pdb` crashes when reading source file with different encoding by replacing :func:`io.open` with :func:`io.open_code`. The new method would also call into the hook set by :func:`PyFile_SetOpenCodeHook`. diff --git a/Misc/NEWS.d/next/Library/2023-04-16-19-48-21.gh-issue-103584.3mBTuM.rst b/Misc/NEWS.d/next/Library/2023-04-16-19-48-21.gh-issue-103584.3mBTuM.rst new file mode 100644 index 00000000000000..6d7c93ade9cd94 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-16-19-48-21.gh-issue-103584.3mBTuM.rst @@ -0,0 +1,12 @@ +Updated ``importlib.metadata`` with changes from ``importlib_metadata`` 5.2 +through 6.5.0, including: Support ``installed-files.txt`` for +``Distribution.files`` when present. ``PackageMetadata`` now stipulates an +additional ``get`` method allowing for easy querying of metadata keys that +may not be present. ``packages_distributions`` now honors packages and +modules with Python modules that not ``.py`` sources (e.g. ``.pyc``, +``.so``). Expand protocol for ``PackageMetadata.get_all`` to match the +upstream implementation of ``email.message.Message.get_all`` in +python/typeshed#9620. Deprecated use of ``Distribution`` without defining +abstract methods. Deprecated expectation that +``PackageMetadata.__getitem__`` will return ``None`` for missing keys. In +the future, it will raise a ``KeyError``. diff --git a/Misc/NEWS.d/next/Library/2023-04-17-14-47-28.gh-issue-103596.ME1y3_.rst b/Misc/NEWS.d/next/Library/2023-04-17-14-47-28.gh-issue-103596.ME1y3_.rst new file mode 100644 index 00000000000000..2fa27e60b58efe --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-17-14-47-28.gh-issue-103596.ME1y3_.rst @@ -0,0 +1,2 @@ +Attributes/methods are no longer shadowed by same-named enum members, +although they may be shadowed by enum.property's. diff --git a/Misc/NEWS.d/next/Library/2023-04-21-10-25-39.gh-issue-103636.YK6NEa.rst b/Misc/NEWS.d/next/Library/2023-04-21-10-25-39.gh-issue-103636.YK6NEa.rst new file mode 100644 index 00000000000000..b3b5085250f078 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-21-10-25-39.gh-issue-103636.YK6NEa.rst @@ -0,0 +1 @@ +Added Enum for months and days in the calendar module. diff --git a/Misc/NEWS.d/next/Library/2023-04-22-02-41-06.gh-issue-103673.oE7S_k.rst b/Misc/NEWS.d/next/Library/2023-04-22-02-41-06.gh-issue-103673.oE7S_k.rst new file mode 100644 index 00000000000000..bd5317744ff140 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-22-02-41-06.gh-issue-103673.oE7S_k.rst @@ -0,0 +1,2 @@ +:mod:`socketserver` gains ``ForkingUnixStreamServer`` and +``ForkingUnixDatagramServer`` classes. Patch by Jay Berry. diff --git a/Misc/NEWS.d/next/Library/2023-04-22-11-20-27.gh-issue-89415.YHk760.rst b/Misc/NEWS.d/next/Library/2023-04-22-11-20-27.gh-issue-89415.YHk760.rst new file mode 100644 index 00000000000000..a5b99a2f1360f0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-22-11-20-27.gh-issue-89415.YHk760.rst @@ -0,0 +1,2 @@ +Add :mod:`socket` constants for source-specific multicast. +Patch by Reese Hyde. diff --git a/Misc/NEWS.d/next/Library/2023-04-22-22-37-39.gh-issue-103699.NizCjc.rst b/Misc/NEWS.d/next/Library/2023-04-22-22-37-39.gh-issue-103699.NizCjc.rst new file mode 100644 index 00000000000000..60547a25a109bc --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-22-22-37-39.gh-issue-103699.NizCjc.rst @@ -0,0 +1,2 @@ +Add ``__orig_bases__`` to non-generic TypedDicts, call-based TypedDicts, and +call-based NamedTuples. Other TypedDicts and NamedTuples already had the attribute. diff --git a/Misc/NEWS.d/next/Library/2023-04-23-15-39-17.gh-issue-81403.zVz9Td.rst b/Misc/NEWS.d/next/Library/2023-04-23-15-39-17.gh-issue-81403.zVz9Td.rst new file mode 100644 index 00000000000000..6adb71f7677229 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-23-15-39-17.gh-issue-81403.zVz9Td.rst @@ -0,0 +1,3 @@ +:class:`urllib.request.CacheFTPHandler` no longer raises :class:`URLError` +if a cached FTP instance is reused. ftplib's endtransfer method calls +voidresp to drain the connection to handle FTP instance reuse properly. diff --git a/Misc/NEWS.d/next/Library/2023-04-24-00-34-23.gh-issue-103685.U14jBM.rst b/Misc/NEWS.d/next/Library/2023-04-24-00-34-23.gh-issue-103685.U14jBM.rst new file mode 100644 index 00000000000000..31df04790721a8 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-24-00-34-23.gh-issue-103685.U14jBM.rst @@ -0,0 +1 @@ +Prepare :meth:`tkinter.Menu.index` for Tk 8.7 so that it does not raise ``TclError: expected integer but got ""`` when it should return ``None``. diff --git a/Misc/NEWS.d/next/Library/2023-04-24-16-00-28.gh-issue-90750.da0Xi8.rst b/Misc/NEWS.d/next/Library/2023-04-24-16-00-28.gh-issue-90750.da0Xi8.rst new file mode 100644 index 00000000000000..99e10f140f5049 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-24-16-00-28.gh-issue-90750.da0Xi8.rst @@ -0,0 +1,3 @@ +Use :meth:`datetime.datetime.fromisocalendar` in the implementation of +:meth:`datetime.datetime.strptime`, which should now accept only valid ISO +dates. (Patch by Paul Ganssle) diff --git a/Misc/NEWS.d/next/Library/2023-04-24-23-07-56.gh-issue-103791.bBPWdS.rst b/Misc/NEWS.d/next/Library/2023-04-24-23-07-56.gh-issue-103791.bBPWdS.rst new file mode 100644 index 00000000000000..f00384cde9706e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-24-23-07-56.gh-issue-103791.bBPWdS.rst @@ -0,0 +1,3 @@ +:class:`contextlib.suppress` now supports suppressing exceptions raised as +part of an :exc:`ExceptionGroup`. If other exceptions exist on the group, they +are re-raised in a group that does not contain the suppressed exceptions. diff --git a/Misc/NEWS.d/next/Library/2023-04-25-17-03-18.gh-issue-103857.Mr2Cak.rst b/Misc/NEWS.d/next/Library/2023-04-25-17-03-18.gh-issue-103857.Mr2Cak.rst new file mode 100644 index 00000000000000..3bd370dabf4ed5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-25-17-03-18.gh-issue-103857.Mr2Cak.rst @@ -0,0 +1,2 @@ +Deprecated :meth:`datetime.datetime.utcnow` and +:meth:`datetime.datetime.utcfromtimestamp`. (Patch by Paul Ganssle) diff --git a/Misc/NEWS.d/next/Library/2023-04-25-22-06-00.gh-issue-74940.TOacQ9.rst b/Misc/NEWS.d/next/Library/2023-04-25-22-06-00.gh-issue-74940.TOacQ9.rst new file mode 100644 index 00000000000000..c37d795f3eb33d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-25-22-06-00.gh-issue-74940.TOacQ9.rst @@ -0,0 +1,2 @@ +The C.UTF-8 locale is no longer converted to en_US.UTF-8, enabling the use +of UTF-8 encoding on systems which have no locales installed. diff --git a/Misc/NEWS.d/next/Library/2023-04-25-22-59-06.gh-issue-99944.pst8iT.rst b/Misc/NEWS.d/next/Library/2023-04-25-22-59-06.gh-issue-99944.pst8iT.rst new file mode 100644 index 00000000000000..80238a65e32a41 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-25-22-59-06.gh-issue-99944.pst8iT.rst @@ -0,0 +1 @@ +Make :mod:`dis` display the value of oparg of :opcode:`KW_NAMES`. diff --git a/Misc/NEWS.d/next/Library/2023-04-26-09-38-47.gh-issue-103872.8LBsDz.rst b/Misc/NEWS.d/next/Library/2023-04-26-09-38-47.gh-issue-103872.8LBsDz.rst new file mode 100644 index 00000000000000..b840f9f5769f08 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-26-09-38-47.gh-issue-103872.8LBsDz.rst @@ -0,0 +1 @@ +Update the bundled copy of pip to version 23.1.2. diff --git a/Misc/NEWS.d/next/Library/2023-04-26-15-14-36.gh-issue-103583.iCMDFt.rst b/Misc/NEWS.d/next/Library/2023-04-26-15-14-36.gh-issue-103583.iCMDFt.rst new file mode 100644 index 00000000000000..8c92ee40831619 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-26-15-14-36.gh-issue-103583.iCMDFt.rst @@ -0,0 +1,2 @@ +Isolate :mod:`!_multibytecodec` and codecs extension modules. Patches by +Erlend E. Aasland. diff --git a/Misc/NEWS.d/next/Library/2023-04-26-18-12-13.gh-issue-103636.-KvCgO.rst b/Misc/NEWS.d/next/Library/2023-04-26-18-12-13.gh-issue-103636.-KvCgO.rst new file mode 100644 index 00000000000000..a05a6f5cbcdb99 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-26-18-12-13.gh-issue-103636.-KvCgO.rst @@ -0,0 +1 @@ +Module-level attributes ``January`` and ``February`` are deprecated from :mod:`calendar`. diff --git a/Misc/NEWS.d/next/Library/2023-04-27-00-05-32.gh-issue-102628.X230E-.rst b/Misc/NEWS.d/next/Library/2023-04-27-00-05-32.gh-issue-102628.X230E-.rst new file mode 100644 index 00000000000000..eaaca5b41ba5e2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-27-00-05-32.gh-issue-102628.X230E-.rst @@ -0,0 +1,2 @@ +Substitute CTRL-D with CTRL-Z in :mod:`sqlite3` CLI banner when running on +Windows. diff --git a/Misc/NEWS.d/next/Library/2023-04-27-20-03-08.gh-issue-103935.Uaf2M0.rst b/Misc/NEWS.d/next/Library/2023-04-27-20-03-08.gh-issue-103935.Uaf2M0.rst new file mode 100644 index 00000000000000..71b2d87249c47b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-27-20-03-08.gh-issue-103935.Uaf2M0.rst @@ -0,0 +1 @@ +Use :func:`io.open_code` for files to be executed instead of raw :func:`open` diff --git a/Misc/NEWS.d/next/Library/2023-04-28-18-04-23.gh-issue-88773.xXCNJw.rst b/Misc/NEWS.d/next/Library/2023-04-28-18-04-23.gh-issue-88773.xXCNJw.rst new file mode 100644 index 00000000000000..f14c9533f3af87 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-28-18-04-23.gh-issue-88773.xXCNJw.rst @@ -0,0 +1 @@ +Added :func:`turtle.teleport` to the :mod:`turtle` module to move a turtle to a new point without tracing a line, visible or invisible. Patch by Liam Gersten. diff --git a/Misc/NEWS.d/next/Library/2023-04-28-19-08-50.gh-issue-103977.msF70A.rst b/Misc/NEWS.d/next/Library/2023-04-28-19-08-50.gh-issue-103977.msF70A.rst new file mode 100644 index 00000000000000..ff4005774a95d2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-28-19-08-50.gh-issue-103977.msF70A.rst @@ -0,0 +1 @@ +Improve import time of :mod:`platform` module. diff --git a/Misc/NEWS.d/next/Library/2023-05-01-16-43-28.gh-issue-104035.MrJBw8.rst b/Misc/NEWS.d/next/Library/2023-05-01-16-43-28.gh-issue-104035.MrJBw8.rst new file mode 100644 index 00000000000000..8c8e3d6ba5fbc1 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-01-16-43-28.gh-issue-104035.MrJBw8.rst @@ -0,0 +1,2 @@ +Do not ignore user-defined ``__getstate__`` and ``__setstate__`` methods for +slotted frozen dataclasses. diff --git a/Misc/NEWS.d/next/Library/2023-05-01-19-10-05.gh-issue-103629.81bpZz.rst b/Misc/NEWS.d/next/Library/2023-05-01-19-10-05.gh-issue-103629.81bpZz.rst new file mode 100644 index 00000000000000..7971ab66359c3d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-01-19-10-05.gh-issue-103629.81bpZz.rst @@ -0,0 +1 @@ +Update the ``repr`` of :class:`typing.Unpack` according to :pep:`692`. diff --git a/Misc/NEWS.d/next/Tests/2022-11-06-18-42-38.gh-issue-75729.uGYJrv.rst b/Misc/NEWS.d/next/Tests/2022-11-06-18-42-38.gh-issue-75729.uGYJrv.rst new file mode 100644 index 00000000000000..8baecdfc31881f --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2022-11-06-18-42-38.gh-issue-75729.uGYJrv.rst @@ -0,0 +1,2 @@ +Fix the :func:`os.spawn* <os.spawnl>` tests failing on Windows +when the working directory or interpreter path contains spaces. diff --git a/Misc/NEWS.d/next/Tests/2023-01-27-18-10-40.gh-issue-101377.IJGpqh.rst b/Misc/NEWS.d/next/Tests/2023-01-27-18-10-40.gh-issue-101377.IJGpqh.rst deleted file mode 100644 index a9c19ce060e3ab..00000000000000 --- a/Misc/NEWS.d/next/Tests/2023-01-27-18-10-40.gh-issue-101377.IJGpqh.rst +++ /dev/null @@ -1 +0,0 @@ -Improved test_locale_calendar_formatweekday of calendar. diff --git a/Misc/NEWS.d/next/Tests/2023-03-08-13-54-20.gh-issue-102537.Vfplpb.rst b/Misc/NEWS.d/next/Tests/2023-03-08-13-54-20.gh-issue-102537.Vfplpb.rst deleted file mode 100644 index 94d160dd4127a6..00000000000000 --- a/Misc/NEWS.d/next/Tests/2023-03-08-13-54-20.gh-issue-102537.Vfplpb.rst +++ /dev/null @@ -1,2 +0,0 @@ -Adjust the error handling strategy in -``test_zoneinfo.TzPathTest.python_tzpath_context``. Patch by Paul Ganssle. diff --git a/Misc/NEWS.d/next/Tests/2023-04-08-00-50-23.gh-issue-103329.M38tqF.rst b/Misc/NEWS.d/next/Tests/2023-04-08-00-50-23.gh-issue-103329.M38tqF.rst new file mode 100644 index 00000000000000..79448ed728040d --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2023-04-08-00-50-23.gh-issue-103329.M38tqF.rst @@ -0,0 +1 @@ +Regression tests for the behaviour of ``unittest.mock.PropertyMock`` were added. diff --git a/Misc/NEWS.d/next/Windows/2023-02-22-17-26-10.gh-issue-99726.76t957.rst b/Misc/NEWS.d/next/Windows/2023-02-22-17-26-10.gh-issue-99726.76t957.rst deleted file mode 100644 index e2578620017894..00000000000000 --- a/Misc/NEWS.d/next/Windows/2023-02-22-17-26-10.gh-issue-99726.76t957.rst +++ /dev/null @@ -1,2 +0,0 @@ -Improves correctness of stat results for Windows, and uses faster API when -available diff --git a/Misc/NEWS.d/next/Windows/2023-03-14-10-52-43.gh-issue-102690.sbXtqk.rst b/Misc/NEWS.d/next/Windows/2023-03-14-10-52-43.gh-issue-102690.sbXtqk.rst deleted file mode 100644 index 5669ebbb442c24..00000000000000 --- a/Misc/NEWS.d/next/Windows/2023-03-14-10-52-43.gh-issue-102690.sbXtqk.rst +++ /dev/null @@ -1 +0,0 @@ -Update :mod:`webbrowser` to fall back to Microsoft Edge instead of Internet Explorer. diff --git a/Misc/NEWS.d/next/Windows/2023-03-18-21-38-00.gh-issue-88013.Z3loxC.rst b/Misc/NEWS.d/next/Windows/2023-03-18-21-38-00.gh-issue-88013.Z3loxC.rst new file mode 100644 index 00000000000000..4ca3185ea1f65e --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2023-03-18-21-38-00.gh-issue-88013.Z3loxC.rst @@ -0,0 +1,2 @@ +Fixed a bug where :exc:`TypeError` was raised when calling +:func:`ntpath.realpath` with a bytes parameter in some cases. diff --git a/Misc/NEWS.d/next/Windows/2023-04-11-09-22-22.gh-issue-103088.6AJEuR.rst b/Misc/NEWS.d/next/Windows/2023-04-11-09-22-22.gh-issue-103088.6AJEuR.rst new file mode 100644 index 00000000000000..f9f5343f4210dc --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2023-04-11-09-22-22.gh-issue-103088.6AJEuR.rst @@ -0,0 +1 @@ +Fixes venvs not working in bash on Windows across different disks diff --git a/Misc/NEWS.d/next/Windows/2023-04-12-10-49-21.gh-issue-103088.Yjj-qJ.rst b/Misc/NEWS.d/next/Windows/2023-04-12-10-49-21.gh-issue-103088.Yjj-qJ.rst new file mode 100644 index 00000000000000..1fee99da240378 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2023-04-12-10-49-21.gh-issue-103088.Yjj-qJ.rst @@ -0,0 +1 @@ +Fix virtual environment :file:`activate` script having incorrect line endings for Cygwin. diff --git a/Misc/NEWS.d/next/Windows/2023-04-24-15-51-11.gh-issue-82814.GI3UkZ.rst b/Misc/NEWS.d/next/Windows/2023-04-24-15-51-11.gh-issue-82814.GI3UkZ.rst new file mode 100644 index 00000000000000..5bd005ffacb800 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2023-04-24-15-51-11.gh-issue-82814.GI3UkZ.rst @@ -0,0 +1,3 @@ +Fix a potential ``[Errno 13] Permission denied`` when using :func:`shutil.copystat` +within Windows Subsystem for Linux (WSL) on a mounted filesystem by adding +``errno.EACCES`` to the list of ignored errors within the internal implementation. diff --git a/Misc/NEWS.d/next/macOS/2023-04-04-13-37-28.gh-issue-103207.x0vvQp.rst b/Misc/NEWS.d/next/macOS/2023-04-04-13-37-28.gh-issue-103207.x0vvQp.rst new file mode 100644 index 00000000000000..3c176e3a6b5310 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2023-04-04-13-37-28.gh-issue-103207.x0vvQp.rst @@ -0,0 +1,2 @@ +Add instructions to the macOS installer welcome display on how to workaround +the macOS 13 Ventura “The installer encountered an error” failure. diff --git a/Misc/NEWS.d/next/macOS/2023-04-24-18-37-48.gh-issue-60436.in-IyF.rst b/Misc/NEWS.d/next/macOS/2023-04-24-18-37-48.gh-issue-60436.in-IyF.rst new file mode 100644 index 00000000000000..f274d3b898f15d --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2023-04-24-18-37-48.gh-issue-60436.in-IyF.rst @@ -0,0 +1 @@ +update curses textbox to additionally handle backspace using the ``curses.ascii.DEL`` key press. diff --git a/Misc/README b/Misc/README index e4dd2005411a8d..3dab768ba1a7a4 100644 --- a/Misc/README +++ b/Misc/README @@ -8,7 +8,6 @@ Files found here ---------------- ACKS Acknowledgements -gdbinit Handy stuff to put in your .gdbinit file, if you use gdb HISTORY News from previous releases -- oldest last indent.pro GNU indent profile approximating my C style NEWS News for this release (for some meaning of "this") diff --git a/Misc/gdbinit b/Misc/gdbinit deleted file mode 100644 index e8f62ba6476423..00000000000000 --- a/Misc/gdbinit +++ /dev/null @@ -1,176 +0,0 @@ -# If you use the GNU debugger gdb to debug the Python C runtime, you -# might find some of the following commands useful. Copy this to your -# ~/.gdbinit file and it'll get loaded into gdb automatically when you -# start it up. Then, at the gdb prompt you can do things like: -# -# (gdb) pyo apyobjectptr -# <module 'foobar' (built-in)> -# refcounts: 1 -# address : 84a7a2c -# $1 = void -# (gdb) -# -# NOTE: If you have gdb 7 or later, it supports debugging of Python directly -# with embedded macros that you may find superior to what is in here. -# See Tools/gdb/libpython.py and http://bugs.python.org/issue8032. - -define pyo - # side effect of calling _PyObject_Dump is to dump the object's - # info - assigning just prevents gdb from printing the - # NULL return value - set $_unused_void = _PyObject_Dump($arg0) -end -document pyo - Prints a representation of the object to stderr, along with the - number of reference counts it currently has and the hex address the - object is allocated at. The argument must be a PyObject* -end - -define pyg - print _PyGC_Dump($arg0) -end -document pyg - Prints a representation of the object to stderr, along with the - number of reference counts it currently has and the hex address the - object is allocated at. The argument must be a PyGC_Head* -end - -define pylocals - set $_i = 0 - while $_i < f->f_code->co_nlocals - if f->f_localsplus + $_i != 0 - set $_names = f->f_code->co_varnames - set $_name = PyUnicode_AsUTF8(PyTuple_GetItem($_names, $_i)) - printf "%s:\n", $_name - pyo f->f_localsplus[$_i] - end - set $_i = $_i + 1 - end -end -document pylocals - Print the local variables of the current frame. -end - -# A rewrite of the Python interpreter's line number calculator in GDB's -# command language -define lineno - set $__continue = 1 - set $__co = f->f_code - set $__lasti = f->f_lasti - set $__sz = ((PyVarObject *)$__co->co_lnotab)->ob_size/2 - set $__p = (unsigned char *)((PyBytesObject *)$__co->co_lnotab)->ob_sval - set $__li = $__co->co_firstlineno - set $__ad = 0 - while ($__sz-1 >= 0 && $__continue) - set $__sz = $__sz - 1 - set $__ad = $__ad + *$__p - set $__p = $__p + 1 - if ($__ad > $__lasti) - set $__continue = 0 - else - set $__li = $__li + *$__p - set $__p = $__p + 1 - end - end - printf "%d", $__li -end - -define pyframev - pyframe - pylocals -end -document pyframev - Print the current frame - verbose -end - -define pyframe - set $__fn = PyUnicode_AsUTF8(f->f_code->co_filename) - set $__n = PyUnicode_AsUTF8(f->f_code->co_name) - printf "%s (", $__fn - lineno - printf "): %s\n", $__n -### Uncomment these lines when using from within Emacs/XEmacs so it will -### automatically track/display the current Python source line -# printf "%c%c%s:", 032, 032, $__fn -# lineno -# printf ":1\n" -end - -### Use these at your own risk. It appears that a bug in gdb causes it -### to crash in certain circumstances. - -#define up -# up-silently 1 -# printframe -#end - -#define down -# down-silently 1 -# printframe -#end - -define printframe - if $pc > PyEval_EvalFrameEx && $pc < _PyEval_EvalFrameDefault - pyframe - else - frame - end -end - -# Here's a somewhat fragile way to print the entire Python stack from gdb. -# It's fragile because the tests for the value of $pc depend on the layout -# of specific functions in the C source code. - -# Explanation of while and if tests: We want to pop up the stack until we -# land in Py_Main (this is probably an incorrect assumption in an embedded -# interpreter, but the test can be extended by an interested party). If -# Py_Main <= $pc <= Py_GetArgcArv is true, $pc is in Py_Main(), so the while -# tests succeeds as long as it's not true. In a similar fashion the if -# statement tests to see if we are in PyEval_EvalFrameEx(). - -# Note: The name of the main interpreter function and the function which -# follow it has changed over time. This version of pystack works with this -# version of Python. If you try using it with older or newer versions of -# the interpreter you may will have to change the functions you compare with -# $pc. - -define pystack - while $pc < Py_Main || $pc > Py_GetArgcArgv - if $pc > PyEval_EvalFrameEx && $pc < _PyEval_EvalFrameDefault - pyframe - end - up-silently 1 - end - select-frame 0 -end -document pystack - Print the entire Python call stack -end - -define pystackv - while $pc < Py_Main || $pc > Py_GetArgcArgv - if $pc > PyEval_EvalFrameEx && $pc < _PyEval_EvalFrameDefault - pyframev - end - up-silently 1 - end - select-frame 0 -end -document pystackv - Print the entire Python call stack - verbose mode -end - -define pu - set $uni = $arg0 - set $i = 0 - while (*$uni && $i++<100) - if (*$uni < 0x80) - print *(char*)$uni++ - else - print /x *(short*)$uni++ - end - end -end -document pu - Generally useful macro to print a Unicode string -end diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in index b12290d436cbeb..fe1b9f8f5380c1 100644 --- a/Modules/Setup.stdlib.in +++ b/Modules/Setup.stdlib.in @@ -169,7 +169,7 @@ @MODULE__XXTESTFUZZ_TRUE@_xxtestfuzz _xxtestfuzz/_xxtestfuzz.c _xxtestfuzz/fuzzer.c @MODULE__TESTBUFFER_TRUE@_testbuffer _testbuffer.c @MODULE__TESTINTERNALCAPI_TRUE@_testinternalcapi _testinternalcapi.c -@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/vectorcall_limited.c _testcapi/heaptype.c _testcapi/unicode.c _testcapi/getargs.c _testcapi/pytime.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c +@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/vectorcall_limited.c _testcapi/heaptype.c _testcapi/unicode.c _testcapi/getargs.c _testcapi/pytime.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/pyos.c @MODULE__TESTCLINIC_TRUE@_testclinic _testclinic.c # Some testing modules MUST be built as shared libraries. diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index 13d98eedf32f0e..8b1a29b6d33e8b 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -8,6 +8,7 @@ #include "pycore_runtime_init.h" // _Py_ID() #include "pycore_moduleobject.h" // _PyModule_GetState() #include "structmember.h" // PyMemberDef +#include "cpython/context.h" #include <stddef.h> // offsetof() @@ -31,8 +32,11 @@ typedef struct { all running event loops. {EventLoop: Task} */ PyObject *current_tasks; - /* WeakSet containing all alive tasks. */ - PyObject *all_tasks; + /* WeakSet containing all tasks scheduled to run on event loops. */ + PyObject *scheduled_tasks; + + /* Set containing all eagerly executing tasks. */ + PyObject *eager_tasks; /* An isinstance type cache for the 'is_coroutine()' function. */ PyObject *iscoroutine_typecache; @@ -156,6 +160,9 @@ class _asyncio.Future "FutureObj *" "&Future_Type" /* Get FutureIter from Future */ static PyObject * future_new_iter(PyObject *); +static PyObject * +task_step_handle_result_impl(asyncio_state *state, TaskObj *task, PyObject *result); + static int _is_coroutine(asyncio_state *state, PyObject *coro) @@ -355,33 +362,26 @@ call_soon(asyncio_state *state, PyObject *loop, PyObject *func, PyObject *arg, PyObject *ctx) { PyObject *handle; - PyObject *stack[3]; - Py_ssize_t nargs; if (ctx == NULL) { - handle = PyObject_CallMethodObjArgs( - loop, &_Py_ID(call_soon), func, arg, NULL); + PyObject *stack[] = {loop, func, arg}; + size_t nargsf = 3 | PY_VECTORCALL_ARGUMENTS_OFFSET; + handle = PyObject_VectorcallMethod(&_Py_ID(call_soon), stack, nargsf, NULL); } else { - /* Use FASTCALL to pass a keyword-only argument to call_soon */ - - PyObject *callable = PyObject_GetAttr(loop, &_Py_ID(call_soon)); - if (callable == NULL) { - return -1; - } - /* All refs in 'stack' are borrowed. */ - nargs = 1; - stack[0] = func; + PyObject *stack[4]; + size_t nargs = 2; + stack[0] = loop; + stack[1] = func; if (arg != NULL) { - stack[1] = arg; + stack[2] = arg; nargs++; } stack[nargs] = (PyObject *)ctx; - EVAL_CALL_STAT_INC_IF_FUNCTION(EVAL_CALL_API, callable); - handle = PyObject_Vectorcall(callable, stack, nargs, - state->context_kwname); - Py_DECREF(callable); + size_t nargsf = nargs | PY_VECTORCALL_ARGUMENTS_OFFSET; + handle = PyObject_VectorcallMethod(&_Py_ID(call_soon), stack, nargsf, + state->context_kwname); } if (handle == NULL) { @@ -1837,6 +1837,7 @@ class _asyncio.Task "TaskObj *" "&Task_Type" static int task_call_step_soon(asyncio_state *state, TaskObj *, PyObject *); static PyObject * task_wakeup(TaskObj *, PyObject *); static PyObject * task_step(asyncio_state *, TaskObj *, PyObject *); +static int task_eager_start(asyncio_state *state, TaskObj *task); /* ----- Task._step wrapper */ @@ -1947,7 +1948,7 @@ static PyMethodDef TaskWakeupDef = { static int register_task(asyncio_state *state, PyObject *task) { - PyObject *res = PyObject_CallMethodOneArg(state->all_tasks, + PyObject *res = PyObject_CallMethodOneArg(state->scheduled_tasks, &_Py_ID(add), task); if (res == NULL) { return -1; @@ -1956,11 +1957,16 @@ register_task(asyncio_state *state, PyObject *task) return 0; } +static int +register_eager_task(asyncio_state *state, PyObject *task) +{ + return PySet_Add(state->eager_tasks, task); +} static int unregister_task(asyncio_state *state, PyObject *task) { - PyObject *res = PyObject_CallMethodOneArg(state->all_tasks, + PyObject *res = PyObject_CallMethodOneArg(state->scheduled_tasks, &_Py_ID(discard), task); if (res == NULL) { return -1; @@ -1969,6 +1975,11 @@ unregister_task(asyncio_state *state, PyObject *task) return 0; } +static int +unregister_eager_task(asyncio_state *state, PyObject *task) +{ + return PySet_Discard(state->eager_tasks, task); +} static int enter_task(asyncio_state *state, PyObject *loop, PyObject *task) @@ -2022,6 +2033,54 @@ leave_task(asyncio_state *state, PyObject *loop, PyObject *task) return _PyDict_DelItem_KnownHash(state->current_tasks, loop, hash); } +static PyObject * +swap_current_task(asyncio_state *state, PyObject *loop, PyObject *task) +{ + PyObject *prev_task; + Py_hash_t hash; + hash = PyObject_Hash(loop); + if (hash == -1) { + return NULL; + } + + prev_task = _PyDict_GetItem_KnownHash(state->current_tasks, loop, hash); + if (prev_task == NULL) { + if (PyErr_Occurred()) { + return NULL; + } + prev_task = Py_None; + } + + if (task == Py_None) { + if (_PyDict_DelItem_KnownHash(state->current_tasks, loop, hash) == -1) { + return NULL; + } + } else { + if (_PyDict_SetItem_KnownHash(state->current_tasks, loop, task, hash) == -1) { + return NULL; + } + } + + Py_INCREF(prev_task); + + return prev_task; +} + +static int +is_loop_running(PyObject *loop) +{ + PyObject *func = PyObject_GetAttr(loop, &_Py_ID(is_running)); + if (func == NULL) { + PyErr_Format(PyExc_TypeError, "Loop missing is_running()"); + return -1; + } + PyObject *res = PyObject_CallNoArgs(func); + int retval = Py_IsTrue(res); + Py_DECREF(func); + Py_DECREF(res); + return !!retval; +} + /* ----- Task */ /*[clinic input] @@ -2032,15 +2091,16 @@ _asyncio.Task.__init__ loop: object = None name: object = None context: object = None + eager_start: bool = False A coroutine wrapped in a Future. [clinic start generated code]*/ static int _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, - PyObject *name, PyObject *context) -/*[clinic end generated code: output=49ac96fe33d0e5c7 input=924522490c8ce825]*/ - + PyObject *name, PyObject *context, + int eager_start) +/*[clinic end generated code: output=7aced2d27836f1a1 input=18e3f113a51b829d]*/ { if (future_init((FutureObj*)self, loop)) { return -1; @@ -2076,8 +2136,10 @@ _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, Py_XSETREF(self->task_coro, coro); if (name == Py_None) { - name = PyUnicode_FromFormat("Task-%" PRIu64, - ++state->task_name_counter); + // optimization: defer task name formatting + // store the task counter as PyLong in the name + // for deferred formatting in get_name + name = PyLong_FromUnsignedLongLong(++state->task_name_counter); } else if (!PyUnicode_CheckExact(name)) { name = PyObject_Str(name); } else { @@ -2088,6 +2150,19 @@ _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, return -1; } + if (eager_start) { + int loop_running = is_loop_running(self->task_loop); + if (loop_running == -1) { + return -1; + } + if (loop_running) { + if (task_eager_start(state, self)) { + return -1; + } + return 0; + } + } + if (task_call_step_soon(state, self, NULL)) { return -1; } @@ -2359,8 +2434,9 @@ _asyncio_Task_get_stack_impl(TaskObj *self, PyTypeObject *cls, /*[clinic end generated code: output=6774dfc10d3857fa input=8e01c9b2618ae953]*/ { asyncio_state *state = get_asyncio_state_by_cls(cls); - return PyObject_CallFunctionObjArgs( - state->asyncio_task_get_stack_func, self, limit, NULL); + PyObject *stack[] = {(PyObject *)self, limit}; + return PyObject_Vectorcall(state->asyncio_task_get_stack_func, + stack, 2, NULL); } /*[clinic input] @@ -2387,8 +2463,9 @@ _asyncio_Task_print_stack_impl(TaskObj *self, PyTypeObject *cls, /*[clinic end generated code: output=b38affe9289ec826 input=150b35ba2d3a7dee]*/ { asyncio_state *state = get_asyncio_state_by_cls(cls); - return PyObject_CallFunctionObjArgs( - state->asyncio_task_print_stack_func, self, limit, file, NULL); + PyObject *stack[] = {(PyObject *)self, limit, file}; + return PyObject_Vectorcall(state->asyncio_task_print_stack_func, + stack, 3, NULL); } /*[clinic input] @@ -2454,6 +2531,13 @@ _asyncio_Task_get_name_impl(TaskObj *self) /*[clinic end generated code: output=0ecf1570c3b37a8f input=a4a6595d12f4f0f8]*/ { if (self->task_name) { + if (PyLong_CheckExact(self->task_name)) { + PyObject *name = PyUnicode_FromFormat("Task-%S", self->task_name); + if (name == NULL) { + return NULL; + } + Py_SETREF(self->task_name, name); + } return Py_NewRef(self->task_name); } @@ -2827,6 +2911,20 @@ task_step_impl(asyncio_state *state, TaskObj *task, PyObject *exc) Py_RETURN_NONE; } + PyObject *ret = task_step_handle_result_impl(state, task, result); + return ret; + +fail: + return NULL; +} + + +static PyObject * +task_step_handle_result_impl(asyncio_state *state, TaskObj *task, PyObject *result) +{ + int res; + PyObject *o; + if (result == (PyObject*)task) { /* We have a task that wants to await on itself */ goto self_await; @@ -3058,6 +3156,65 @@ task_step(asyncio_state *state, TaskObj *task, PyObject *exc) } } +static int +task_eager_start(asyncio_state *state, TaskObj *task) +{ + assert(task != NULL); + PyObject *prevtask = swap_current_task(state, task->task_loop, (PyObject *)task); + if (prevtask == NULL) { + return -1; + } + + if (register_eager_task(state, (PyObject *)task) == -1) { + Py_DECREF(prevtask); + return -1; + } + + if (PyContext_Enter(task->task_context) == -1) { + Py_DECREF(prevtask); + return -1; + } + + int retval = 0; + + PyObject *stepres = task_step_impl(state, task, NULL); + if (stepres == NULL) { + PyObject *exc = PyErr_GetRaisedException(); + _PyErr_ChainExceptions1(exc); + retval = -1; + } else { + Py_DECREF(stepres); + } + + PyObject *curtask = swap_current_task(state, task->task_loop, prevtask); + Py_DECREF(prevtask); + if (curtask == NULL) { + retval = -1; + } else { + assert(curtask == (PyObject *)task); + Py_DECREF(curtask); + } + + if (unregister_eager_task(state, (PyObject *)task) == -1) { + retval = -1; + } + + if (PyContext_Exit(task->task_context) == -1) { + retval = -1; + } + + if (task->task_state == STATE_PENDING) { + if (register_task(state, (PyObject *)task) == -1) { + retval = -1; + } + } else { + // This seems to really help performance on pyperformance benchmarks + Py_CLEAR(task->task_coro); + } + + return retval; +} + static PyObject * task_wakeup(TaskObj *task, PyObject *o) { @@ -3221,6 +3378,27 @@ _asyncio__register_task_impl(PyObject *module, PyObject *task) Py_RETURN_NONE; } +/*[clinic input] +_asyncio._register_eager_task + + task: object + +Register a new task in asyncio as executed by loop. + +Returns None. +[clinic start generated code]*/ + +static PyObject * +_asyncio__register_eager_task_impl(PyObject *module, PyObject *task) +/*[clinic end generated code: output=dfe1d45367c73f1a input=237f684683398c51]*/ +{ + asyncio_state *state = get_asyncio_state(module); + if (register_eager_task(state, task) < 0) { + return NULL; + } + Py_RETURN_NONE; +} + /*[clinic input] _asyncio._unregister_task @@ -3243,6 +3421,27 @@ _asyncio__unregister_task_impl(PyObject *module, PyObject *task) Py_RETURN_NONE; } +/*[clinic input] +_asyncio._unregister_eager_task + + task: object + +Unregister a task. + +Returns None. +[clinic start generated code]*/ + +static PyObject * +_asyncio__unregister_eager_task_impl(PyObject *module, PyObject *task) +/*[clinic end generated code: output=a426922bd07f23d1 input=9d07401ef14ee048]*/ +{ + asyncio_state *state = get_asyncio_state(module); + if (unregister_eager_task(state, task) < 0) { + return NULL; + } + Py_RETURN_NONE; +} + /*[clinic input] _asyncio._enter_task @@ -3294,6 +3493,27 @@ _asyncio__leave_task_impl(PyObject *module, PyObject *loop, PyObject *task) } +/*[clinic input] +_asyncio._swap_current_task + + loop: object + task: object + +Temporarily swap in the supplied task and return the original one (or None). + +This is intended for use during eager coroutine execution. + +[clinic start generated code]*/ + +static PyObject * +_asyncio__swap_current_task_impl(PyObject *module, PyObject *loop, + PyObject *task) +/*[clinic end generated code: output=9f88de958df74c7e input=c9c72208d3d38b6c]*/ +{ + return swap_current_task(get_asyncio_state(module), loop, task); +} + + /*[clinic input] _asyncio.current_task @@ -3375,7 +3595,8 @@ module_traverse(PyObject *mod, visitproc visit, void *arg) Py_VISIT(state->asyncio_InvalidStateError); Py_VISIT(state->asyncio_CancelledError); - Py_VISIT(state->all_tasks); + Py_VISIT(state->scheduled_tasks); + Py_VISIT(state->eager_tasks); Py_VISIT(state->current_tasks); Py_VISIT(state->iscoroutine_typecache); @@ -3412,7 +3633,8 @@ module_clear(PyObject *mod) Py_CLEAR(state->asyncio_InvalidStateError); Py_CLEAR(state->asyncio_CancelledError); - Py_CLEAR(state->all_tasks); + Py_CLEAR(state->scheduled_tasks); + Py_CLEAR(state->eager_tasks); Py_CLEAR(state->current_tasks); Py_CLEAR(state->iscoroutine_typecache); @@ -3492,9 +3714,14 @@ module_init(asyncio_state *state) PyObject *weak_set; WITH_MOD("weakref") GET_MOD_ATTR(weak_set, "WeakSet"); - state->all_tasks = PyObject_CallNoArgs(weak_set); + state->scheduled_tasks = PyObject_CallNoArgs(weak_set); Py_CLEAR(weak_set); - if (state->all_tasks == NULL) { + if (state->scheduled_tasks == NULL) { + goto fail; + } + + state->eager_tasks = PySet_New(NULL); + if (state->eager_tasks == NULL) { goto fail; } @@ -3518,9 +3745,12 @@ static PyMethodDef asyncio_methods[] = { _ASYNCIO__GET_RUNNING_LOOP_METHODDEF _ASYNCIO__SET_RUNNING_LOOP_METHODDEF _ASYNCIO__REGISTER_TASK_METHODDEF + _ASYNCIO__REGISTER_EAGER_TASK_METHODDEF _ASYNCIO__UNREGISTER_TASK_METHODDEF + _ASYNCIO__UNREGISTER_EAGER_TASK_METHODDEF _ASYNCIO__ENTER_TASK_METHODDEF _ASYNCIO__LEAVE_TASK_METHODDEF + _ASYNCIO__SWAP_CURRENT_TASK_METHODDEF {NULL, NULL} }; @@ -3557,7 +3787,11 @@ module_exec(PyObject *mod) return -1; } - if (PyModule_AddObjectRef(mod, "_all_tasks", state->all_tasks) < 0) { + if (PyModule_AddObjectRef(mod, "_scheduled_tasks", state->scheduled_tasks) < 0) { + return -1; + } + + if (PyModule_AddObjectRef(mod, "_eager_tasks", state->eager_tasks) < 0) { return -1; } diff --git a/Modules/_bisectmodule.c b/Modules/_bisectmodule.c index d3bec535ee512d..30801c2f87eee7 100644 --- a/Modules/_bisectmodule.c +++ b/Modules/_bisectmodule.c @@ -162,12 +162,14 @@ insert just after the rightmost x already there. Optional args lo (default 0) and hi (default len(a)) bound the slice of a to be searched. + +A custom key function can be supplied to customize the sort order. [clinic start generated code]*/ static Py_ssize_t _bisect_bisect_right_impl(PyObject *module, PyObject *a, PyObject *x, Py_ssize_t lo, Py_ssize_t hi, PyObject *key) -/*[clinic end generated code: output=3a4bc09cc7c8a73d input=40fcc5afa06ae593]*/ +/*[clinic end generated code: output=3a4bc09cc7c8a73d input=43071869772dd53a]*/ { return internal_bisect_right(a, x, lo, hi, key); } @@ -188,12 +190,14 @@ If x is already in a, insert it to the right of the rightmost x. Optional args lo (default 0) and hi (default len(a)) bound the slice of a to be searched. + +A custom key function can be supplied to customize the sort order. [clinic start generated code]*/ static PyObject * _bisect_insort_right_impl(PyObject *module, PyObject *a, PyObject *x, Py_ssize_t lo, Py_ssize_t hi, PyObject *key) -/*[clinic end generated code: output=ac3bf26d07aedda2 input=44e1708e26b7b802]*/ +/*[clinic end generated code: output=ac3bf26d07aedda2 input=f60777d2b6ddb239]*/ { PyObject *result, *key_x; Py_ssize_t index; @@ -343,12 +347,14 @@ insert just before the leftmost x already there. Optional args lo (default 0) and hi (default len(a)) bound the slice of a to be searched. + +A custom key function can be supplied to customize the sort order. [clinic start generated code]*/ static Py_ssize_t _bisect_bisect_left_impl(PyObject *module, PyObject *a, PyObject *x, Py_ssize_t lo, Py_ssize_t hi, PyObject *key) -/*[clinic end generated code: output=70749d6e5cae9284 input=90dd35b50ceb05e3]*/ +/*[clinic end generated code: output=70749d6e5cae9284 input=f29c4fe7f9b797c7]*/ { return internal_bisect_left(a, x, lo, hi, key); } @@ -370,12 +376,14 @@ If x is already in a, insert it to the left of the leftmost x. Optional args lo (default 0) and hi (default len(a)) bound the slice of a to be searched. + +A custom key function can be supplied to customize the sort order. [clinic start generated code]*/ static PyObject * _bisect_insort_left_impl(PyObject *module, PyObject *a, PyObject *x, Py_ssize_t lo, Py_ssize_t hi, PyObject *key) -/*[clinic end generated code: output=b1d33e5e7ffff11e input=3ab65d8784f585b1]*/ +/*[clinic end generated code: output=b1d33e5e7ffff11e input=0a700a82edbd472c]*/ { PyObject *result, *key_x; Py_ssize_t index; diff --git a/Modules/_blake2/impl/blake2-config.h b/Modules/_blake2/impl/blake2-config.h index f5dd6faa9e6867..c09cb4bcf06723 100644 --- a/Modules/_blake2/impl/blake2-config.h +++ b/Modules/_blake2/impl/blake2-config.h @@ -53,7 +53,7 @@ #endif #endif -#ifdef HAVE_SSE41 +#ifdef HAVE_SSE4_1 #ifndef HAVE_SSSE3 #define HAVE_SSSE3 #endif diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c index 68131f3b54d2ea..a9b1425177c3d7 100644 --- a/Modules/_collectionsmodule.c +++ b/Modules/_collectionsmodule.c @@ -1,17 +1,56 @@ #include "Python.h" #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_long.h" // _PyLong_GetZero() +#include "pycore_moduleobject.h" // _PyModule_GetState() +#include "pycore_typeobject.h" // _PyType_GetModuleState() #include "structmember.h" // PyMemberDef #include <stddef.h> +typedef struct { + PyTypeObject *deque_type; + PyTypeObject *defdict_type; + PyTypeObject *dequeiter_type; + PyTypeObject *dequereviter_type; + PyTypeObject *tuplegetter_type; +} collections_state; + +static inline collections_state * +get_module_state(PyObject *mod) +{ + void *state = _PyModule_GetState(mod); + assert(state != NULL); + return (collections_state *)state; +} + +static inline collections_state * +get_module_state_by_cls(PyTypeObject *cls) +{ + void *state = _PyType_GetModuleState(cls); + assert(state != NULL); + return (collections_state *)state; +} + +static struct PyModuleDef _collectionsmodule; + +static inline collections_state * +find_module_state_by_def(PyTypeObject *type) +{ + PyObject *mod = PyType_GetModuleByDef(type, &_collectionsmodule); + assert(mod != NULL); + return get_module_state(mod); +} + /*[clinic input] module _collections -class _tuplegetter "_tuplegetterobject *" "&tuplegetter_type" +class _tuplegetter "_tuplegetterobject *" "clinic_state()->tuplegetter_type" [clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=a8ece4ccad7e30ac]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=7356042a89862e0e]*/ -static PyTypeObject tuplegetter_type; +/* We can safely assume type to be the defining class, + * since tuplegetter is not a base type */ +#define clinic_state() (get_module_state_by_cls(type)) #include "clinic/_collectionsmodule.c.h" +#undef clinic_state /* collections module implementation of a deque() datatype Written and maintained by Raymond D. Hettinger <python@rcn.com> @@ -94,8 +133,6 @@ typedef struct { PyObject *weakreflist; } dequeobject; -static PyTypeObject deque_type; - /* For debug builds, add error checking to track the endpoints * in the chain of links. The goal is to make sure that link * assignments only take place at endpoints so that links already @@ -484,11 +521,13 @@ deque_copy(PyObject *deque, PyObject *Py_UNUSED(ignored)) { PyObject *result; dequeobject *old_deque = (dequeobject *)deque; - if (Py_IS_TYPE(deque, &deque_type)) { + collections_state *state = find_module_state_by_def(Py_TYPE(deque)); + if (Py_IS_TYPE(deque, state->deque_type)) { dequeobject *new_deque; PyObject *rv; - new_deque = (dequeobject *)deque_new(&deque_type, (PyObject *)NULL, (PyObject *)NULL); + new_deque = (dequeobject *)deque_new(state->deque_type, + (PyObject *)NULL, (PyObject *)NULL); if (new_deque == NULL) return NULL; new_deque->maxlen = old_deque->maxlen; @@ -511,7 +550,7 @@ deque_copy(PyObject *deque, PyObject *Py_UNUSED(ignored)) else result = PyObject_CallFunction((PyObject *)(Py_TYPE(deque)), "Oi", deque, old_deque->maxlen, NULL); - if (result != NULL && !PyObject_TypeCheck(result, &deque_type)) { + if (result != NULL && !PyObject_TypeCheck(result, state->deque_type)) { PyErr_Format(PyExc_TypeError, "%.200s() must return a deque, not %.200s", Py_TYPE(deque)->tp_name, Py_TYPE(result)->tp_name); @@ -529,7 +568,8 @@ deque_concat(dequeobject *deque, PyObject *other) PyObject *new_deque, *result; int rv; - rv = PyObject_IsInstance(other, (PyObject *)&deque_type); + collections_state *state = find_module_state_by_def(Py_TYPE(deque)); + rv = PyObject_IsInstance(other, (PyObject *)state->deque_type); if (rv <= 0) { if (rv == 0) { PyErr_Format(PyExc_TypeError, @@ -990,7 +1030,7 @@ deque_count(dequeobject *deque, PyObject *v) } PyDoc_STRVAR(count_doc, -"D.count(value) -> integer -- return number of occurrences of value"); +"D.count(value) -- return number of occurrences of value"); static int deque_contains(dequeobject *deque, PyObject *v) @@ -1098,7 +1138,7 @@ deque_index(dequeobject *deque, PyObject *const *args, Py_ssize_t nargs) } PyDoc_STRVAR(index_doc, -"D.index(value, [start, [stop]]) -> integer -- return first index of value.\n" +"D.index(value, [start, [stop]]) -- return first index of value.\n" "Raises ValueError if the value is not present."); /* insert(), remove(), and delitem() are implemented in terms of @@ -1288,6 +1328,7 @@ deque_ass_item(dequeobject *deque, Py_ssize_t i, PyObject *v) static void deque_dealloc(dequeobject *deque) { + PyTypeObject *tp = Py_TYPE(deque); Py_ssize_t i; PyObject_GC_UnTrack(deque); @@ -1303,12 +1344,15 @@ deque_dealloc(dequeobject *deque) for (i=0 ; i < deque->numfreeblocks ; i++) { PyMem_Free(deque->freeblocks[i]); } - Py_TYPE(deque)->tp_free(deque); + tp->tp_free(deque); + Py_DECREF(tp); } static int deque_traverse(dequeobject *deque, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(deque)); + block *b; PyObject *item; Py_ssize_t index; @@ -1393,8 +1437,9 @@ deque_richcompare(PyObject *v, PyObject *w, int op) Py_ssize_t vs, ws; int b, cmp=-1; - if (!PyObject_TypeCheck(v, &deque_type) || - !PyObject_TypeCheck(w, &deque_type)) { + collections_state *state = find_module_state_by_def(Py_TYPE(v)); + if (!PyObject_TypeCheck(v, state->deque_type) || + !PyObject_TypeCheck(w, state->deque_type)) { Py_RETURN_NOTIMPLEMENTED; } @@ -1537,19 +1582,6 @@ static PyGetSetDef deque_getset[] = { {0} }; -static PySequenceMethods deque_as_sequence = { - (lenfunc)deque_len, /* sq_length */ - (binaryfunc)deque_concat, /* sq_concat */ - (ssizeargfunc)deque_repeat, /* sq_repeat */ - (ssizeargfunc)deque_item, /* sq_item */ - 0, /* sq_slice */ - (ssizeobjargproc)deque_ass_item, /* sq_ass_item */ - 0, /* sq_ass_slice */ - (objobjproc)deque_contains, /* sq_contains */ - (binaryfunc)deque_inplace_concat, /* sq_inplace_concat */ - (ssizeargfunc)deque_inplace_repeat, /* sq_inplace_repeat */ -}; - static PyObject *deque_iter(dequeobject *deque); static PyObject *deque_reviter(dequeobject *deque, PyObject *Py_UNUSED(ignored)); PyDoc_STRVAR(reversed_doc, @@ -1597,54 +1629,53 @@ static PyMethodDef deque_methods[] = { {NULL, NULL} /* sentinel */ }; +static PyMemberDef deque_members[] = { + {"__weaklistoffset__", T_PYSSIZET, offsetof(dequeobject, weakreflist), READONLY}, + {NULL}, +}; + PyDoc_STRVAR(deque_doc, "deque([iterable[, maxlen]]) --> deque object\n\ \n\ A list-like sequence optimized for data accesses near its endpoints."); -static PyTypeObject deque_type = { - PyVarObject_HEAD_INIT(NULL, 0) - "collections.deque", /* tp_name */ - sizeof(dequeobject), /* tp_basicsize */ - 0, /* tp_itemsize */ - /* methods */ - (destructor)deque_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - deque_repr, /* tp_repr */ - 0, /* tp_as_number */ - &deque_as_sequence, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - PyObject_HashNotImplemented, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | - Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_SEQUENCE, - /* tp_flags */ - deque_doc, /* tp_doc */ - (traverseproc)deque_traverse, /* tp_traverse */ - (inquiry)deque_clear, /* tp_clear */ - (richcmpfunc)deque_richcompare, /* tp_richcompare */ - offsetof(dequeobject, weakreflist), /* tp_weaklistoffset*/ - (getiterfunc)deque_iter, /* tp_iter */ - 0, /* tp_iternext */ - deque_methods, /* tp_methods */ - 0, /* tp_members */ - deque_getset, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc)deque_init, /* tp_init */ - PyType_GenericAlloc, /* tp_alloc */ - deque_new, /* tp_new */ - PyObject_GC_Del, /* tp_free */ +static PyType_Slot deque_slots[] = { + {Py_tp_dealloc, deque_dealloc}, + {Py_tp_repr, deque_repr}, + {Py_tp_hash, PyObject_HashNotImplemented}, + {Py_tp_getattro, PyObject_GenericGetAttr}, + {Py_tp_doc, (void *)deque_doc}, + {Py_tp_traverse, deque_traverse}, + {Py_tp_clear, deque_clear}, + {Py_tp_richcompare, deque_richcompare}, + {Py_tp_iter, deque_iter}, + {Py_tp_getset, deque_getset}, + {Py_tp_init, deque_init}, + {Py_tp_alloc, PyType_GenericAlloc}, + {Py_tp_new, deque_new}, + {Py_tp_free, PyObject_GC_Del}, + {Py_tp_methods, deque_methods}, + {Py_tp_members, deque_members}, + + // Sequence protocol + {Py_sq_length, deque_len}, + {Py_sq_concat, deque_concat}, + {Py_sq_repeat, deque_repeat}, + {Py_sq_item, deque_item}, + {Py_sq_ass_item, deque_ass_item}, + {Py_sq_contains, deque_contains}, + {Py_sq_inplace_concat, deque_inplace_concat}, + {Py_sq_inplace_repeat, deque_inplace_repeat}, + {0, NULL}, +}; + +static PyType_Spec deque_spec = { + .name = "collections.deque", + .basicsize = sizeof(dequeobject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_SEQUENCE | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = deque_slots, }; /*********************** Deque Iterator **************************/ @@ -1658,14 +1689,13 @@ typedef struct { Py_ssize_t counter; /* number of items remaining for iteration */ } dequeiterobject; -static PyTypeObject dequeiter_type; - static PyObject * deque_iter(dequeobject *deque) { dequeiterobject *it; - it = PyObject_GC_New(dequeiterobject, &dequeiter_type); + collections_state *state = find_module_state_by_def(Py_TYPE(deque)); + it = PyObject_GC_New(dequeiterobject, state->dequeiter_type); if (it == NULL) return NULL; it->b = deque->leftblock; @@ -1680,17 +1710,27 @@ deque_iter(dequeobject *deque) static int dequeiter_traverse(dequeiterobject *dio, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(dio)); Py_VISIT(dio->deque); return 0; } +static int +dequeiter_clear(dequeiterobject *dio) +{ + Py_CLEAR(dio->deque); + return 0; +} + static void dequeiter_dealloc(dequeiterobject *dio) { /* bpo-31095: UnTrack is needed before calling any callbacks */ + PyTypeObject *tp = Py_TYPE(dio); PyObject_GC_UnTrack(dio); - Py_XDECREF(dio->deque); + (void)dequeiter_clear(dio); PyObject_GC_Del(dio); + Py_DECREF(tp); } static PyObject * @@ -1726,9 +1766,10 @@ dequeiter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) Py_ssize_t i, index=0; PyObject *deque; dequeiterobject *it; - if (!PyArg_ParseTuple(args, "O!|n", &deque_type, &deque, &index)) + collections_state *state = get_module_state_by_cls(type); + if (!PyArg_ParseTuple(args, "O!|n", state->deque_type, &deque, &index)) return NULL; - assert(type == &dequeiter_type); + assert(type == state->dequeiter_type); it = (dequeiterobject*)deque_iter((dequeobject *)deque); if (!it) @@ -1769,59 +1810,35 @@ static PyMethodDef dequeiter_methods[] = { {NULL, NULL} /* sentinel */ }; -static PyTypeObject dequeiter_type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_collections._deque_iterator", /* tp_name */ - sizeof(dequeiterobject), /* tp_basicsize */ - 0, /* tp_itemsize */ - /* methods */ - (destructor)dequeiter_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - 0, /* tp_doc */ - (traverseproc)dequeiter_traverse, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - PyObject_SelfIter, /* tp_iter */ - (iternextfunc)dequeiter_next, /* tp_iternext */ - dequeiter_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - dequeiter_new, /* tp_new */ - 0, +static PyType_Slot dequeiter_slots[] = { + {Py_tp_dealloc, dequeiter_dealloc}, + {Py_tp_getattro, PyObject_GenericGetAttr}, + {Py_tp_traverse, dequeiter_traverse}, + {Py_tp_clear, dequeiter_clear}, + {Py_tp_iter, PyObject_SelfIter}, + {Py_tp_iternext, dequeiter_next}, + {Py_tp_methods, dequeiter_methods}, + {Py_tp_new, dequeiter_new}, + {0, NULL}, }; -/*********************** Deque Reverse Iterator **************************/ +static PyType_Spec dequeiter_spec = { + .name = "collections._deque_iterator", + .basicsize = sizeof(dequeiterobject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = dequeiter_slots, +}; -static PyTypeObject dequereviter_type; +/*********************** Deque Reverse Iterator **************************/ static PyObject * deque_reviter(dequeobject *deque, PyObject *Py_UNUSED(ignored)) { dequeiterobject *it; + collections_state *state = find_module_state_by_def(Py_TYPE(deque)); - it = PyObject_GC_New(dequeiterobject, &dequereviter_type); + it = PyObject_GC_New(dequeiterobject, state->dequereviter_type); if (it == NULL) return NULL; it->b = deque->rightblock; @@ -1866,9 +1883,10 @@ dequereviter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) Py_ssize_t i, index=0; PyObject *deque; dequeiterobject *it; - if (!PyArg_ParseTuple(args, "O!|n", &deque_type, &deque, &index)) + collections_state *state = get_module_state_by_cls(type); + if (!PyArg_ParseTuple(args, "O!|n", state->deque_type, &deque, &index)) return NULL; - assert(type == &dequereviter_type); + assert(type == state->dequereviter_type); it = (dequeiterobject*)deque_reviter((dequeobject *)deque, NULL); if (!it) @@ -1889,47 +1907,24 @@ dequereviter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return (PyObject*)it; } -static PyTypeObject dequereviter_type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_collections._deque_reverse_iterator", /* tp_name */ - sizeof(dequeiterobject), /* tp_basicsize */ - 0, /* tp_itemsize */ - /* methods */ - (destructor)dequeiter_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - 0, /* tp_doc */ - (traverseproc)dequeiter_traverse, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - PyObject_SelfIter, /* tp_iter */ - (iternextfunc)dequereviter_next, /* tp_iternext */ - dequeiter_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - dequereviter_new, /* tp_new */ - 0, +static PyType_Slot dequereviter_slots[] = { + {Py_tp_dealloc, dequeiter_dealloc}, + {Py_tp_getattro, PyObject_GenericGetAttr}, + {Py_tp_traverse, dequeiter_traverse}, + {Py_tp_clear, dequeiter_clear}, + {Py_tp_iter, PyObject_SelfIter}, + {Py_tp_iternext, dequereviter_next}, + {Py_tp_methods, dequeiter_methods}, + {Py_tp_new, dequereviter_new}, + {0, NULL}, +}; + +static PyType_Spec dequereviter_spec = { + .name = "collections._deque_reverse_iterator", + .basicsize = sizeof(dequeiterobject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = dequereviter_slots, }; /* defaultdict type *********************************************************/ @@ -1939,8 +1934,6 @@ typedef struct { PyObject *default_factory; } defdictobject; -static PyTypeObject defdict_type; /* Forward */ - PyDoc_STRVAR(defdict_missing_doc, "__missing__(key) # Called by __getitem__ for missing key; pseudo-code:\n\ if self.default_factory is None: raise KeyError((key,))\n\ @@ -2071,9 +2064,11 @@ static void defdict_dealloc(defdictobject *dd) { /* bpo-31095: UnTrack is needed before calling any callbacks */ + PyTypeObject *tp = Py_TYPE(dd); PyObject_GC_UnTrack(dd); Py_CLEAR(dd->default_factory); PyDict_Type.tp_dealloc((PyObject *)dd); + Py_DECREF(tp); } static PyObject * @@ -2117,11 +2112,24 @@ static PyObject* defdict_or(PyObject* left, PyObject* right) { PyObject *self, *other; - if (PyObject_TypeCheck(left, &defdict_type)) { + + // Find module state + PyTypeObject *tp = Py_TYPE(left); + PyObject *mod = PyType_GetModuleByDef(tp, &_collectionsmodule); + if (mod == NULL) { + PyErr_Clear(); + tp = Py_TYPE(right); + mod = PyType_GetModuleByDef(tp, &_collectionsmodule); + } + assert(mod != NULL); + collections_state *state = get_module_state(mod); + + if (PyObject_TypeCheck(left, state->defdict_type)) { self = left; other = right; } else { + assert(PyObject_TypeCheck(right, state->defdict_type)); self = right; other = left; } @@ -2141,13 +2149,10 @@ defdict_or(PyObject* left, PyObject* right) return new; } -static PyNumberMethods defdict_as_number = { - .nb_or = defdict_or, -}; - static int defdict_traverse(PyObject *self, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(self)); Py_VISIT(((defdictobject *)self)->default_factory); return PyDict_Type.tp_traverse(self, visit, arg); } @@ -2203,48 +2208,28 @@ passed to the dict constructor, including keyword arguments.\n\ /* See comment in xxsubtype.c */ #define DEFERRED_ADDRESS(ADDR) 0 -static PyTypeObject defdict_type = { - PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) - "collections.defaultdict", /* tp_name */ - sizeof(defdictobject), /* tp_basicsize */ - 0, /* tp_itemsize */ - /* methods */ - (destructor)defdict_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - (reprfunc)defdict_repr, /* tp_repr */ - &defdict_as_number, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - /* tp_flags */ - defdict_doc, /* tp_doc */ - defdict_traverse, /* tp_traverse */ - (inquiry)defdict_tp_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset*/ - 0, /* tp_iter */ - 0, /* tp_iternext */ - defdict_methods, /* tp_methods */ - defdict_members, /* tp_members */ - 0, /* tp_getset */ - DEFERRED_ADDRESS(&PyDict_Type), /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - defdict_init, /* tp_init */ - PyType_GenericAlloc, /* tp_alloc */ - 0, /* tp_new */ - PyObject_GC_Del, /* tp_free */ +static PyType_Slot defdict_slots[] = { + {Py_tp_dealloc, defdict_dealloc}, + {Py_tp_repr, defdict_repr}, + {Py_nb_or, defdict_or}, + {Py_tp_getattro, PyObject_GenericGetAttr}, + {Py_tp_doc, (void *)defdict_doc}, + {Py_tp_traverse, defdict_traverse}, + {Py_tp_clear, defdict_tp_clear}, + {Py_tp_methods, defdict_methods}, + {Py_tp_members, defdict_members}, + {Py_tp_init, defdict_init}, + {Py_tp_alloc, PyType_GenericAlloc}, + {Py_tp_free, PyObject_GC_Del}, + {0, NULL}, +}; + +static PyType_Spec defdict_spec = { + .name = "collections.defaultdict", + .basicsize = sizeof(defdictobject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = defdict_slots, }; /* helper function for Counter *********************************************/ @@ -2442,6 +2427,7 @@ static int tuplegetter_traverse(PyObject *self, visitproc visit, void *arg) { _tuplegetterobject *tuplegetter = (_tuplegetterobject *)self; + Py_VISIT(Py_TYPE(tuplegetter)); Py_VISIT(tuplegetter->doc); return 0; } @@ -2457,9 +2443,11 @@ tuplegetter_clear(PyObject *self) static void tuplegetter_dealloc(_tuplegetterobject *self) { + PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); tuplegetter_clear((PyObject*)self); - Py_TYPE(self)->tp_free((PyObject*)self); + tp->tp_free((PyObject*)self); + Py_DECREF(tp); } static PyObject* @@ -2487,52 +2475,60 @@ static PyMethodDef tuplegetter_methods[] = { {NULL}, }; -static PyTypeObject tuplegetter_type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_collections._tuplegetter", /* tp_name */ - sizeof(_tuplegetterobject), /* tp_basicsize */ - 0, /* tp_itemsize */ - /* methods */ - (destructor)tuplegetter_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - (reprfunc)tuplegetter_repr, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - 0, /* tp_doc */ - (traverseproc)tuplegetter_traverse, /* tp_traverse */ - (inquiry)tuplegetter_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - tuplegetter_methods, /* tp_methods */ - tuplegetter_members, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - tuplegetter_descr_get, /* tp_descr_get */ - tuplegetter_descr_set, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - tuplegetter_new, /* tp_new */ - 0, +static PyType_Slot tuplegetter_slots[] = { + {Py_tp_dealloc, tuplegetter_dealloc}, + {Py_tp_repr, tuplegetter_repr}, + {Py_tp_traverse, tuplegetter_traverse}, + {Py_tp_clear, tuplegetter_clear}, + {Py_tp_methods, tuplegetter_methods}, + {Py_tp_members, tuplegetter_members}, + {Py_tp_descr_get, tuplegetter_descr_get}, + {Py_tp_descr_set, tuplegetter_descr_set}, + {Py_tp_new, tuplegetter_new}, + {0, NULL}, +}; + +static PyType_Spec tuplegetter_spec = { + .name = "collections._tuplegetter", + .basicsize = sizeof(_tuplegetterobject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = tuplegetter_slots, }; /* module level code ********************************************************/ +static int +collections_traverse(PyObject *mod, visitproc visit, void *arg) +{ + collections_state *state = get_module_state(mod); + Py_VISIT(state->deque_type); + Py_VISIT(state->defdict_type); + Py_VISIT(state->dequeiter_type); + Py_VISIT(state->dequereviter_type); + Py_VISIT(state->tuplegetter_type); + return 0; +} + +static int +collections_clear(PyObject *mod) +{ + collections_state *state = get_module_state(mod); + Py_CLEAR(state->deque_type); + Py_CLEAR(state->defdict_type); + Py_CLEAR(state->dequeiter_type); + Py_CLEAR(state->dequereviter_type); + Py_CLEAR(state->tuplegetter_type); + return 0; +} + +static void +collections_free(void *module) +{ + collections_clear((PyObject *)module); +} + PyDoc_STRVAR(collections_doc, "High performance data structures.\n\ - deque: ordered collection accessible from endpoints only\n\ @@ -2544,43 +2540,50 @@ static struct PyMethodDef collections_methods[] = { {NULL, NULL} /* sentinel */ }; +#define ADD_TYPE(MOD, SPEC, TYPE, BASE) do { \ + TYPE = (PyTypeObject *)PyType_FromMetaclass(NULL, MOD, SPEC, \ + (PyObject *)BASE); \ + if (TYPE == NULL) { \ + return -1; \ + } \ + if (PyModule_AddType(MOD, TYPE) < 0) { \ + return -1; \ + } \ +} while (0) + static int collections_exec(PyObject *module) { - PyTypeObject *typelist[] = { - &deque_type, - &defdict_type, - &PyODict_Type, - &dequeiter_type, - &dequereviter_type, - &tuplegetter_type - }; - - defdict_type.tp_base = &PyDict_Type; - - for (size_t i = 0; i < Py_ARRAY_LENGTH(typelist); i++) { - if (PyModule_AddType(module, typelist[i]) < 0) { - return -1; - } + collections_state *state = get_module_state(module); + ADD_TYPE(module, &deque_spec, state->deque_type, NULL); + ADD_TYPE(module, &defdict_spec, state->defdict_type, &PyDict_Type); + ADD_TYPE(module, &dequeiter_spec, state->dequeiter_type, NULL); + ADD_TYPE(module, &dequereviter_spec, state->dequereviter_type, NULL); + ADD_TYPE(module, &tuplegetter_spec, state->tuplegetter_type, NULL); + + if (PyModule_AddType(module, &PyODict_Type) < 0) { + return -1; } return 0; } +#undef ADD_TYPE + static struct PyModuleDef_Slot collections_slots[] = { {Py_mod_exec, collections_exec}, {0, NULL} }; static struct PyModuleDef _collectionsmodule = { - PyModuleDef_HEAD_INIT, - "_collections", - collections_doc, - 0, - collections_methods, - collections_slots, - NULL, - NULL, - NULL + .m_base = PyModuleDef_HEAD_INIT, + .m_name = "_collections", + .m_doc = collections_doc, + .m_size = sizeof(collections_state), + .m_methods = collections_methods, + .m_slots = collections_slots, + .m_traverse = collections_traverse, + .m_clear = collections_clear, + .m_free = collections_free, }; PyMODINIT_FUNC diff --git a/Modules/_csv.c b/Modules/_csv.c index bd337084dbff81..2217cc2ca7a775 100644 --- a/Modules/_csv.c +++ b/Modules/_csv.c @@ -82,7 +82,8 @@ typedef enum { } ParserState; typedef enum { - QUOTE_MINIMAL, QUOTE_ALL, QUOTE_NONNUMERIC, QUOTE_NONE + QUOTE_MINIMAL, QUOTE_ALL, QUOTE_NONNUMERIC, QUOTE_NONE, + QUOTE_STRINGS, QUOTE_NOTNULL } QuoteStyle; typedef struct { @@ -95,6 +96,8 @@ static const StyleDesc quote_styles[] = { { QUOTE_ALL, "QUOTE_ALL" }, { QUOTE_NONNUMERIC, "QUOTE_NONNUMERIC" }, { QUOTE_NONE, "QUOTE_NONE" }, + { QUOTE_STRINGS, "QUOTE_STRINGS" }, + { QUOTE_NOTNULL, "QUOTE_NOTNULL" }, { 0 } }; @@ -1264,6 +1267,12 @@ csv_writerow(WriterObj *self, PyObject *seq) case QUOTE_ALL: quoted = 1; break; + case QUOTE_STRINGS: + quoted = PyUnicode_Check(field); + break; + case QUOTE_NOTNULL: + quoted = field != Py_None; + break; default: quoted = 0; break; @@ -1659,6 +1668,11 @@ PyDoc_STRVAR(csv_module_doc, " csv.QUOTE_NONNUMERIC means that quotes are always placed around\n" " fields which do not parse as integers or floating point\n" " numbers.\n" +" csv.QUOTE_STRINGS means that quotes are always placed around\n" +" fields which are strings. Note that the Python value None\n" +" is not a string.\n" +" csv.QUOTE_NOTNULL means that quotes are only placed around fields\n" +" that are not the Python value None.\n" " csv.QUOTE_NONE means that quotes are never placed around fields.\n" " * escapechar - specifies a one-character string used to escape\n" " the delimiter when quoting is set to QUOTE_NONE.\n" diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index 6f92ca08dd537b..c7ed6bd2229c79 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -126,6 +126,8 @@ bytes(cdata) #include "pycore_long.h" // _PyLong_GetZero() +ctypes_state global_state; + PyObject *PyExc_ArgError = NULL; /* This dict maps ctypes types to POINTER types */ @@ -150,13 +152,32 @@ typedef struct { PyObject *dict; } DictRemoverObject; +static int +_DictRemover_traverse(DictRemoverObject *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + Py_VISIT(self->key); + Py_VISIT(self->dict); + return 0; +} + +static int +_DictRemover_clear(DictRemoverObject *self) +{ + Py_CLEAR(self->key); + Py_CLEAR(self->dict); + return 0; +} + static void _DictRemover_dealloc(PyObject *myself) { + PyTypeObject *tp = Py_TYPE(myself); DictRemoverObject *self = (DictRemoverObject *)myself; - Py_XDECREF(self->key); - Py_XDECREF(self->dict); - Py_TYPE(self)->tp_free(myself); + PyObject_GC_UnTrack(myself); + (void)_DictRemover_clear(self); + tp->tp_free(myself); + Py_DECREF(tp); } static PyObject * @@ -173,47 +194,23 @@ _DictRemover_call(PyObject *myself, PyObject *args, PyObject *kw) Py_RETURN_NONE; } -static PyTypeObject DictRemover_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.DictRemover", /* tp_name */ - sizeof(DictRemoverObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - _DictRemover_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - _DictRemover_call, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ -/* XXX should participate in GC? */ - Py_TPFLAGS_DEFAULT, /* tp_flags */ - PyDoc_STR("deletes a key from a dictionary"), /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - 0, /* tp_new */ - 0, /* tp_free */ +PyDoc_STRVAR(dictremover_doc, "deletes a key from a dictionary"); + +static PyType_Slot dictremover_slots[] = { + {Py_tp_dealloc, _DictRemover_dealloc}, + {Py_tp_traverse, _DictRemover_traverse}, + {Py_tp_clear, _DictRemover_clear}, + {Py_tp_call, _DictRemover_call}, + {Py_tp_doc, (void *)dictremover_doc}, + {0, NULL}, +}; + +static PyType_Spec dictremover_spec = { + .name = "_ctypes.DictRemover", + .basicsize = sizeof(DictRemoverObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = dictremover_slots, }; int @@ -224,7 +221,8 @@ PyDict_SetItemProxy(PyObject *dict, PyObject *key, PyObject *item) PyObject *proxy; int result; - obj = _PyObject_CallNoArgs((PyObject *)&DictRemover_Type); + ctypes_state *st = GLOBAL_STATE(); + obj = _PyObject_CallNoArgs((PyObject *)st->DictRemover_Type); if (obj == NULL) return -1; @@ -415,23 +413,45 @@ typedef struct { PyObject *keep; // If set, a reference to the original CDataObject. } StructParamObject; +static int +StructParam_traverse(StructParamObject *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + return 0; +} + +static int +StructParam_clear(StructParamObject *self) +{ + Py_CLEAR(self->keep); + return 0; +} static void StructParam_dealloc(PyObject *myself) { StructParamObject *self = (StructParamObject *)myself; - Py_XDECREF(self->keep); + PyTypeObject *tp = Py_TYPE(self); + PyObject_GC_UnTrack(myself); + (void)StructParam_clear(self); PyMem_Free(self->ptr); - Py_TYPE(self)->tp_free(myself); + tp->tp_free(myself); + Py_DECREF(tp); } +static PyType_Slot structparam_slots[] = { + {Py_tp_traverse, StructParam_traverse}, + {Py_tp_clear, StructParam_clear}, + {Py_tp_dealloc, StructParam_dealloc}, + {0, NULL}, +}; -static PyTypeObject StructParam_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - .tp_name = "_ctypes.StructParam_Type", - .tp_basicsize = sizeof(StructParamObject), - .tp_dealloc = StructParam_dealloc, - .tp_flags = Py_TPFLAGS_DEFAULT, +static PyType_Spec structparam_spec = { + .name = "_ctypes.StructParam_Type", + .basicsize = sizeof(StructParamObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE | + Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_DISALLOW_INSTANTIATION), + .slots = structparam_slots, }; @@ -460,7 +480,9 @@ StructUnionType_paramfunc(CDataObject *self) /* Create a Python object which calls PyMem_Free(ptr) in its deallocator. The object will be destroyed at _ctypes_callproc() cleanup. */ - obj = (&StructParam_Type)->tp_alloc(&StructParam_Type, 0); + ctypes_state *st = GLOBAL_STATE(); + PyTypeObject *tp = st->StructParam_Type; + obj = tp->tp_alloc(tp, 0); if (obj == NULL) { PyMem_Free(ptr); return NULL; @@ -800,7 +822,8 @@ CDataType_from_param(PyObject *type, PyObject *value) if (res) { return Py_NewRef(value); } - if (PyCArg_CheckExact(value)) { + ctypes_state *st = GLOBAL_STATE(); + if (PyCArg_CheckExact(st, value)) { PyCArgObject *p = (PyCArgObject *)value; PyObject *ob = p->obj; const char *ob_name; @@ -1683,7 +1706,8 @@ c_wchar_p_from_param(PyObject *type, PyObject *value) return Py_NewRef(value); } } - if (PyCArg_CheckExact(value)) { + ctypes_state *st = GLOBAL_STATE(); + if (PyCArg_CheckExact(st, value)) { /* byref(c_char(...)) */ PyCArgObject *a = (PyCArgObject *)value; StgDictObject *dict = PyObject_stgdict(a->obj); @@ -1746,7 +1770,8 @@ c_char_p_from_param(PyObject *type, PyObject *value) return Py_NewRef(value); } } - if (PyCArg_CheckExact(value)) { + ctypes_state *st = GLOBAL_STATE(); + if (PyCArg_CheckExact(st, value)) { /* byref(c_char(...)) */ PyCArgObject *a = (PyCArgObject *)value; StgDictObject *dict = PyObject_stgdict(a->obj); @@ -1847,7 +1872,8 @@ c_void_p_from_param(PyObject *type, PyObject *value) return Py_NewRef(value); } /* byref(...) */ - if (PyCArg_CheckExact(value)) { + ctypes_state *st = GLOBAL_STATE(); + if (PyCArg_CheckExact(st, value)) { /* byref(c_xxx()) */ PyCArgObject *a = (PyCArgObject *)value; if (a->tag == 'P') { @@ -5635,12 +5661,22 @@ _ctypes_add_types(PyObject *mod) } \ } while (0) +#define CREATE_TYPE(MOD, TP, SPEC) do { \ + PyObject *type = PyType_FromMetaclass(NULL, MOD, SPEC, NULL); \ + if (type == NULL) { \ + return -1; \ + } \ + TP = (PyTypeObject *)type; \ +} while (0) + + ctypes_state *st = GLOBAL_STATE(); + /* Note: ob_type is the metatype (the 'type'), defaults to PyType_Type, tp_base is the base type, defaults to 'object' aka PyBaseObject_Type. */ - TYPE_READY(&PyCArg_Type); - TYPE_READY(&PyCThunk_Type); + CREATE_TYPE(mod, st->PyCArg_Type, &carg_spec); + CREATE_TYPE(mod, st->PyCThunk_Type, &cthunk_spec); TYPE_READY(&PyCData_Type); /* StgDict is derived from PyDict_Type */ TYPE_READY_BASE(&PyCStgDict_Type, &PyDict_Type); @@ -5673,17 +5709,15 @@ _ctypes_add_types(PyObject *mod) * Simple classes */ - /* PyCField_Type is derived from PyBaseObject_Type */ - TYPE_READY(&PyCField_Type); + CREATE_TYPE(mod, st->PyCField_Type, &cfield_spec); /************************************************* * * Other stuff */ - DictRemover_Type.tp_new = PyType_GenericNew; - TYPE_READY(&DictRemover_Type); - TYPE_READY(&StructParam_Type); + CREATE_TYPE(mod, st->DictRemover_Type, &dictremover_spec); + CREATE_TYPE(mod, st->StructParam_Type, &structparam_spec); #ifdef MS_WIN32 TYPE_READY_BASE(&PyComError_Type, (PyTypeObject*)PyExc_Exception); @@ -5692,6 +5726,7 @@ _ctypes_add_types(PyObject *mod) #undef TYPE_READY #undef TYPE_READY_BASE #undef MOD_ADD_TYPE +#undef CREATE_TYPE return 0; } diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c index bc8750091f65f3..8e694ba852c1d4 100644 --- a/Modules/_ctypes/callbacks.c +++ b/Modules/_ctypes/callbacks.c @@ -28,23 +28,11 @@ /**************************************************************/ -static void -CThunkObject_dealloc(PyObject *myself) -{ - CThunkObject *self = (CThunkObject *)myself; - PyObject_GC_UnTrack(self); - Py_XDECREF(self->converters); - Py_XDECREF(self->callable); - Py_XDECREF(self->restype); - if (self->pcl_write) - Py_ffi_closure_free(self->pcl_write); - PyObject_GC_Del(self); -} - static int CThunkObject_traverse(PyObject *myself, visitproc visit, void *arg) { CThunkObject *self = (CThunkObject *)myself; + Py_VISIT(Py_TYPE(self)); Py_VISIT(self->converters); Py_VISIT(self->callable); Py_VISIT(self->restype); @@ -61,36 +49,35 @@ CThunkObject_clear(PyObject *myself) return 0; } -PyTypeObject PyCThunk_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.CThunkObject", - sizeof(CThunkObject), /* tp_basicsize */ - sizeof(ffi_type), /* tp_itemsize */ - CThunkObject_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - PyDoc_STR("CThunkObject"), /* tp_doc */ - CThunkObject_traverse, /* tp_traverse */ - CThunkObject_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - 0, /* tp_members */ +static void +CThunkObject_dealloc(PyObject *myself) +{ + CThunkObject *self = (CThunkObject *)myself; + PyTypeObject *tp = Py_TYPE(myself); + PyObject_GC_UnTrack(self); + (void)CThunkObject_clear(myself); + if (self->pcl_write) { + Py_ffi_closure_free(self->pcl_write); + } + PyObject_GC_Del(self); + Py_DECREF(tp); +} + +static PyType_Slot cthunk_slots[] = { + {Py_tp_doc, (void *)PyDoc_STR("CThunkObject")}, + {Py_tp_dealloc, CThunkObject_dealloc}, + {Py_tp_traverse, CThunkObject_traverse}, + {Py_tp_clear, CThunkObject_clear}, + {0, NULL}, +}; + +PyType_Spec cthunk_spec = { + .name = "_ctypes.CThunkObject", + .basicsize = sizeof(CThunkObject), + .itemsize = sizeof(ffi_type), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION), + .slots = cthunk_slots, }; /**************************************************************/ @@ -320,7 +307,8 @@ static CThunkObject* CThunkObject_new(Py_ssize_t nargs) CThunkObject *p; Py_ssize_t i; - p = PyObject_GC_NewVar(CThunkObject, &PyCThunk_Type, nargs); + ctypes_state *st = GLOBAL_STATE(); + p = PyObject_GC_NewVar(CThunkObject, st->PyCThunk_Type, nargs); if (p == NULL) { return NULL; } @@ -357,7 +345,10 @@ CThunkObject *_ctypes_alloc_callback(PyObject *callable, if (p == NULL) return NULL; - assert(CThunk_CheckExact((PyObject *)p)); +#ifdef Py_DEBUG + ctypes_state *st = GLOBAL_STATE(); + assert(CThunk_CheckExact(st, (PyObject *)p)); +#endif p->pcl_write = Py_ffi_closure_alloc(sizeof(ffi_closure), &p->pcl_exec); if (p->pcl_write == NULL) { diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index 4438727332bc11..f10cf58216acf7 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -469,21 +469,41 @@ PyCArgObject * PyCArgObject_new(void) { PyCArgObject *p; - p = PyObject_New(PyCArgObject, &PyCArg_Type); + ctypes_state *st = GLOBAL_STATE(); + p = PyObject_GC_New(PyCArgObject, st->PyCArg_Type); if (p == NULL) return NULL; p->pffi_type = NULL; p->tag = '\0'; p->obj = NULL; memset(&p->value, 0, sizeof(p->value)); + PyObject_GC_Track(p); return p; } +static int +PyCArg_traverse(PyCArgObject *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + Py_VISIT(self->obj); + return 0; +} + +static int +PyCArg_clear(PyCArgObject *self) +{ + Py_CLEAR(self->obj); + return 0; +} + static void PyCArg_dealloc(PyCArgObject *self) { - Py_XDECREF(self->obj); - PyObject_Free(self); + PyTypeObject *tp = Py_TYPE(self); + PyObject_GC_UnTrack(self); + (void)PyCArg_clear(self); + tp->tp_free((PyObject *)self); + Py_DECREF(tp); } static int @@ -567,36 +587,21 @@ static PyMemberDef PyCArgType_members[] = { { NULL }, }; -PyTypeObject PyCArg_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "CArgObject", - sizeof(PyCArgObject), - 0, - (destructor)PyCArg_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - (reprfunc)PyCArg_repr, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT, /* tp_flags */ - 0, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - PyCArgType_members, /* tp_members */ +static PyType_Slot carg_slots[] = { + {Py_tp_dealloc, PyCArg_dealloc}, + {Py_tp_traverse, PyCArg_traverse}, + {Py_tp_clear, PyCArg_clear}, + {Py_tp_repr, PyCArg_repr}, + {Py_tp_members, PyCArgType_members}, + {0, NULL}, +}; + +PyType_Spec carg_spec = { + .name = "_ctypes.CArgObject", + .basicsize = sizeof(PyCArgObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION), + .slots = carg_slots, }; /****************************************************************/ @@ -669,7 +674,8 @@ static int ConvParam(PyObject *obj, Py_ssize_t index, struct argument *pa) return 0; } - if (PyCArg_CheckExact(obj)) { + ctypes_state *st = GLOBAL_STATE(); + if (PyCArg_CheckExact(st, obj)) { PyCArgObject *carg = (PyCArgObject *)obj; pa->ffi_type = carg->pffi_type; pa->keep = Py_NewRef(obj); @@ -1817,7 +1823,7 @@ resize(PyObject *self, PyObject *args) dict = PyObject_stgdict((PyObject *)obj); if (dict == NULL) { PyErr_SetString(PyExc_TypeError, - "excepted ctypes instance"); + "expected ctypes instance"); return NULL; } if (size < dict->size) { diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c index 796a1bec966de1..128506a9eed920 100644 --- a/Modules/_ctypes/cfield.c +++ b/Modules/_ctypes/cfield.c @@ -61,7 +61,9 @@ PyCField_FromDesc(PyObject *desc, Py_ssize_t index, #define CONT_BITFIELD 2 #define EXPAND_BITFIELD 3 - self = (CFieldObject *)PyCField_Type.tp_alloc((PyTypeObject *)&PyCField_Type, 0); + ctypes_state *st = GLOBAL_STATE(); + PyTypeObject *tp = st->PyCField_Type; + self = (CFieldObject *)tp->tp_alloc(tp, 0); if (self == NULL) return NULL; dict = PyType_stgdict(desc); @@ -256,6 +258,7 @@ static PyGetSetDef PyCField_getset[] = { static int PyCField_traverse(CFieldObject *self, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(self)); Py_VISIT(self->proto); return 0; } @@ -270,9 +273,11 @@ PyCField_clear(CFieldObject *self) static void PyCField_dealloc(PyObject *self) { + PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); - PyCField_clear((CFieldObject *)self); + (void)PyCField_clear((CFieldObject *)self); Py_TYPE(self)->tp_free((PyObject *)self); + Py_DECREF(tp); } static PyObject * @@ -296,46 +301,24 @@ PyCField_repr(CFieldObject *self) return result; } -PyTypeObject PyCField_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.CField", /* tp_name */ - sizeof(CFieldObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - PyCField_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - (reprfunc)PyCField_repr, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - PyDoc_STR("Structure/Union member"), /* tp_doc */ - (traverseproc)PyCField_traverse, /* tp_traverse */ - (inquiry)PyCField_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - 0, /* tp_members */ - PyCField_getset, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - (descrgetfunc)PyCField_get, /* tp_descr_get */ - (descrsetfunc)PyCField_set, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - 0, /* tp_new */ - 0, /* tp_free */ +static PyType_Slot cfield_slots[] = { + {Py_tp_dealloc, PyCField_dealloc}, + {Py_tp_repr, PyCField_repr}, + {Py_tp_doc, (void *)PyDoc_STR("Structure/Union member")}, + {Py_tp_traverse, PyCField_traverse}, + {Py_tp_clear, PyCField_clear}, + {Py_tp_getset, PyCField_getset}, + {Py_tp_descr_get, PyCField_get}, + {Py_tp_descr_set, PyCField_set}, + {0, NULL}, +}; + +PyType_Spec cfield_spec = { + .name = "_ctypes.CField", + .basicsize = sizeof(CFieldObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION), + .slots = cfield_slots, }; diff --git a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h index a7029b6e6da2b8..252d9da7dbb56d 100644 --- a/Modules/_ctypes/ctypes.h +++ b/Modules/_ctypes/ctypes.h @@ -32,6 +32,22 @@ #endif #endif +typedef struct { + PyTypeObject *DictRemover_Type; + PyTypeObject *PyCArg_Type; + PyTypeObject *PyCField_Type; + PyTypeObject *PyCThunk_Type; + PyTypeObject *StructParam_Type; +} ctypes_state; + +extern ctypes_state global_state; + +#define GLOBAL_STATE() (&global_state) + +extern PyType_Spec carg_spec; +extern PyType_Spec cfield_spec; +extern PyType_Spec cthunk_spec; + typedef struct tagPyCArgObject PyCArgObject; typedef struct tagCDataObject CDataObject; typedef PyObject *(* GETFUNC)(void *, Py_ssize_t size); @@ -88,8 +104,7 @@ typedef struct { ffi_type *ffi_restype; ffi_type *atypes[1]; } CThunkObject; -extern PyTypeObject PyCThunk_Type; -#define CThunk_CheckExact(v) Py_IS_TYPE(v, &PyCThunk_Type) +#define CThunk_CheckExact(st, v) Py_IS_TYPE(v, st->PyCThunk_Type) typedef struct { /* First part identical to tagCDataObject */ @@ -141,7 +156,6 @@ extern PyTypeObject PyCSimpleType_Type; #define PyCSimpleTypeObject_CheckExact(v) Py_IS_TYPE(v, &PyCSimpleType_Type) #define PyCSimpleTypeObject_Check(v) PyObject_TypeCheck(v, &PyCSimpleType_Type) -extern PyTypeObject PyCField_Type; extern struct fielddesc *_ctypes_get_fielddesc(const char *fmt); @@ -334,8 +348,7 @@ struct tagPyCArgObject { Py_ssize_t size; /* for the 'V' tag */ }; -extern PyTypeObject PyCArg_Type; -#define PyCArg_CheckExact(v) Py_IS_TYPE(v, &PyCArg_Type) +#define PyCArg_CheckExact(st, v) Py_IS_TYPE(v, st->PyCArg_Type) extern PyCArgObject *PyCArgObject_new(void); extern PyObject * diff --git a/Modules/_ctypes/stgdict.c b/Modules/_ctypes/stgdict.c index 83a52757d60979..b1b2bac1455e67 100644 --- a/Modules/_ctypes/stgdict.c +++ b/Modules/_ctypes/stgdict.c @@ -225,6 +225,8 @@ MakeFields(PyObject *type, CFieldObject *descr, if (fieldlist == NULL) return -1; + ctypes_state *st = GLOBAL_STATE(); + PyTypeObject *cfield_tp = st->PyCField_Type; for (i = 0; i < PySequence_Fast_GET_SIZE(fieldlist); ++i) { PyObject *pair = PySequence_Fast_GET_ITEM(fieldlist, i); /* borrowed */ PyObject *fname, *ftype, *bits; @@ -240,7 +242,7 @@ MakeFields(PyObject *type, CFieldObject *descr, Py_DECREF(fieldlist); return -1; } - if (!Py_IS_TYPE(fdescr, &PyCField_Type)) { + if (!Py_IS_TYPE(fdescr, cfield_tp)) { PyErr_SetString(PyExc_TypeError, "unexpected type"); Py_DECREF(fdescr); Py_DECREF(fieldlist); @@ -257,13 +259,13 @@ MakeFields(PyObject *type, CFieldObject *descr, } continue; } - new_descr = (CFieldObject *)PyCField_Type.tp_alloc((PyTypeObject *)&PyCField_Type, 0); + new_descr = (CFieldObject *)cfield_tp->tp_alloc(cfield_tp, 0); if (new_descr == NULL) { Py_DECREF(fdescr); Py_DECREF(fieldlist); return -1; } - assert(Py_IS_TYPE(new_descr, &PyCField_Type)); + assert(Py_IS_TYPE(new_descr, cfield_tp)); new_descr->size = fdescr->size; new_descr->offset = fdescr->offset + offset; new_descr->index = fdescr->index + index; @@ -304,6 +306,8 @@ MakeAnonFields(PyObject *type) if (anon_names == NULL) return -1; + ctypes_state *st = GLOBAL_STATE(); + PyTypeObject *cfield_tp = st->PyCField_Type; for (i = 0; i < PySequence_Fast_GET_SIZE(anon_names); ++i) { PyObject *fname = PySequence_Fast_GET_ITEM(anon_names, i); /* borrowed */ CFieldObject *descr = (CFieldObject *)PyObject_GetAttr(type, fname); @@ -311,7 +315,7 @@ MakeAnonFields(PyObject *type) Py_DECREF(anon_names); return -1; } - if (!Py_IS_TYPE(descr, &PyCField_Type)) { + if (!Py_IS_TYPE(descr, cfield_tp)) { PyErr_Format(PyExc_AttributeError, "'%U' is specified in _anonymous_ but not in " "_fields_", diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c index eda8c5610ba659..8f86fc91966205 100644 --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -5144,6 +5144,13 @@ datetime_datetime_now_impl(PyTypeObject *type, PyObject *tz) static PyObject * datetime_utcnow(PyObject *cls, PyObject *dummy) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "datetime.utcnow() is deprecated and scheduled for removal in a " + "future version. Use timezone-aware objects to represent datetimes " + "in UTC: datetime.now(datetime.UTC).", 2)) + { + return NULL; + } return datetime_best_possible(cls, _PyTime_gmtime, Py_None); } @@ -5180,6 +5187,13 @@ datetime_fromtimestamp(PyObject *cls, PyObject *args, PyObject *kw) static PyObject * datetime_utcfromtimestamp(PyObject *cls, PyObject *args) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "datetime.utcfromtimestamp() is deprecated and scheduled for removal " + "in a future version. Use timezone-aware objects to represent " + "datetimes in UTC: datetime.now(datetime.UTC).", 2)) + { + return NULL; + } PyObject *timestamp; PyObject *result = NULL; @@ -6153,17 +6167,31 @@ local_to_seconds(int year, int month, int day, static PyObject * local_timezone_from_local(PyDateTime_DateTime *local_dt) { - long long seconds; + long long seconds, seconds2; time_t timestamp; + int fold = DATE_GET_FOLD(local_dt); seconds = local_to_seconds(GET_YEAR(local_dt), GET_MONTH(local_dt), GET_DAY(local_dt), DATE_GET_HOUR(local_dt), DATE_GET_MINUTE(local_dt), DATE_GET_SECOND(local_dt), - DATE_GET_FOLD(local_dt)); + fold); if (seconds == -1) return NULL; + seconds2 = local_to_seconds(GET_YEAR(local_dt), + GET_MONTH(local_dt), + GET_DAY(local_dt), + DATE_GET_HOUR(local_dt), + DATE_GET_MINUTE(local_dt), + DATE_GET_SECOND(local_dt), + !fold); + if (seconds2 == -1) + return NULL; + /* Detect gap */ + if (seconds2 != seconds && (seconds2 > seconds) == fold) + seconds = seconds2; + /* XXX: add bounds check */ timestamp = seconds - epoch; return local_timezone_from_timestamp(timestamp); diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index 5936fbaaf35eb0..0e11c879732ab6 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -30,6 +30,7 @@ #endif #include <Python.h> +#include "pycore_long.h" // _PyLong_IsZero() #include "pycore_pystate.h" // _PyThreadState_GET() #include "complexobject.h" #include "mpdecimal.h" @@ -2146,35 +2147,25 @@ dec_from_long(PyTypeObject *type, PyObject *v, { PyObject *dec; PyLongObject *l = (PyLongObject *)v; - Py_ssize_t ob_size; - size_t len; - uint8_t sign; dec = PyDecType_New(type); if (dec == NULL) { return NULL; } - ob_size = Py_SIZE(l); - if (ob_size == 0) { + if (_PyLong_IsZero(l)) { _dec_settriple(dec, MPD_POS, 0, 0); return dec; } - if (ob_size < 0) { - len = -ob_size; - sign = MPD_NEG; - } - else { - len = ob_size; - sign = MPD_POS; - } + uint8_t sign = _PyLong_IsNegative(l) ? MPD_NEG : MPD_POS; - if (len == 1) { - _dec_settriple(dec, sign, *l->long_value.ob_digit, 0); + if (_PyLong_IsCompact(l)) { + _dec_settriple(dec, sign, l->long_value.ob_digit[0], 0); mpd_qfinalize(MPD(dec), ctx, status); return dec; } + size_t len = _PyLong_DigitCount(l); #if PYLONG_BITS_IN_DIGIT == 30 mpd_qimport_u32(MPD(dec), l->long_value.ob_digit, len, sign, PyLong_BASE, @@ -3482,7 +3473,6 @@ dec_as_long(PyObject *dec, PyObject *context, int round) PyLongObject *pylong; digit *ob_digit; size_t n; - Py_ssize_t i; mpd_t *x; mpd_context_t workctx; uint32_t status = 0; @@ -3536,26 +3526,9 @@ dec_as_long(PyObject *dec, PyObject *context, int round) } assert(n > 0); - pylong = _PyLong_New(n); - if (pylong == NULL) { - mpd_free(ob_digit); - mpd_del(x); - return NULL; - } - - memcpy(pylong->long_value.ob_digit, ob_digit, n * sizeof(digit)); + assert(!mpd_iszero(x)); + pylong = _PyLong_FromDigits(mpd_isnegative(x), n, ob_digit); mpd_free(ob_digit); - - i = n; - while ((i > 0) && (pylong->long_value.ob_digit[i-1] == 0)) { - i--; - } - - Py_SET_SIZE(pylong, i); - if (mpd_isnegative(x) && !mpd_iszero(x)) { - Py_SET_SIZE(pylong, -i); - } - mpd_del(x); return (PyObject *) pylong; } diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c index ee8c588020118c..7476e5dc7dd61e 100644 --- a/Modules/_hashopenssl.c +++ b/Modules/_hashopenssl.c @@ -355,7 +355,7 @@ py_digest_by_name(PyObject *module, const char *name, enum Py_hash_type py_ht) } } if (digest == NULL) { - _setException(PyExc_ValueError, "unsupported hash type %s", name); + _setException(state->unsupported_digestmod_error, "unsupported hash type %s", name); return NULL; } return digest; diff --git a/Modules/_io/_iomodule.c b/Modules/_io/_iomodule.c index 1506755427fc0d..a3bfbc9ac5a1b1 100644 --- a/Modules/_io/_iomodule.c +++ b/Modules/_io/_iomodule.c @@ -11,6 +11,7 @@ #include "Python.h" #include "_iomodule.h" #include "pycore_pystate.h" // _PyInterpreterState_GET() +#include "pycore_initconfig.h" // _PyStatus_OK() #ifdef HAVE_SYS_TYPES_H #include <sys/types.h> @@ -615,8 +616,9 @@ iomodule_clear(PyObject *mod) { } static void -iomodule_free(PyObject *mod) { - iomodule_clear(mod); +iomodule_free(void *mod) +{ + (void)iomodule_clear((PyObject *)mod); } @@ -666,12 +668,38 @@ static PyTypeObject* static_types[] = { }; +PyStatus +_PyIO_InitTypes(PyInterpreterState *interp) +{ +#ifdef HAVE_WINDOWS_CONSOLE_IO + if (_Py_IsMainInterpreter(interp)) { + // Set type base classes + PyWindowsConsoleIO_Type.tp_base = &PyRawIOBase_Type; + } +#endif + + for (size_t i=0; i < Py_ARRAY_LENGTH(static_types); i++) { + PyTypeObject *type = static_types[i]; + if (_PyStaticType_InitBuiltin(type) < 0) { + return _PyStatus_ERR("Can't initialize builtin type"); + } + } + + return _PyStatus_OK(); +} + void -_PyIO_Fini(void) +_PyIO_FiniTypes(PyInterpreterState *interp) { + if (!_Py_IsMainInterpreter(interp)) { + return; + } + + // Deallocate types in the reverse order to deallocate subclasses before + // their base classes. for (Py_ssize_t i=Py_ARRAY_LENGTH(static_types) - 1; i >= 0; i--) { - PyTypeObject *exc = static_types[i]; - _PyStaticType_Dealloc(exc); + PyTypeObject *type = static_types[i]; + _PyStaticType_Dealloc(type); } } @@ -717,11 +745,6 @@ PyInit__io(void) goto fail; } - // Set type base classes -#ifdef HAVE_WINDOWS_CONSOLE_IO - PyWindowsConsoleIO_Type.tp_base = &PyRawIOBase_Type; -#endif - // Add types for (size_t i=0; i < Py_ARRAY_LENGTH(static_types); i++) { PyTypeObject *type = static_types[i]; diff --git a/Modules/_pickle.c b/Modules/_pickle.c index a26732af8ba2a1..360c7910f67187 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -22,12 +22,12 @@ PyDoc_STRVAR(pickle_module_doc, /*[clinic input] module _pickle -class _pickle.Pickler "PicklerObject *" "&Pickler_Type" -class _pickle.PicklerMemoProxy "PicklerMemoProxyObject *" "&PicklerMemoProxyType" -class _pickle.Unpickler "UnpicklerObject *" "&Unpickler_Type" -class _pickle.UnpicklerMemoProxy "UnpicklerMemoProxyObject *" "&UnpicklerMemoProxyType" +class _pickle.Pickler "PicklerObject *" "" +class _pickle.PicklerMemoProxy "PicklerMemoProxyObject *" "" +class _pickle.Unpickler "UnpicklerObject *" "" +class _pickle.UnpicklerMemoProxy "UnpicklerMemoProxyObject *" "" [clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=4b3e113468a58e6c]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=b6d7191ab6466cda]*/ /* Bump HIGHEST_PROTOCOL when new opcodes are added to the pickle protocol. Bump DEFAULT_PROTOCOL only when the oldest still supported version of Python @@ -192,24 +192,41 @@ typedef struct { /* functools.partial, used for implementing __newobj_ex__ with protocols 2 and 3 */ PyObject *partial; + + /* Types */ + PyTypeObject *Pickler_Type; + PyTypeObject *Unpickler_Type; + PyTypeObject *Pdata_Type; + PyTypeObject *PicklerMemoProxyType; + PyTypeObject *UnpicklerMemoProxyType; } PickleState; /* Forward declaration of the _pickle module definition. */ static struct PyModuleDef _picklemodule; /* Given a module object, get its per-module state. */ -static PickleState * +static inline PickleState * _Pickle_GetState(PyObject *module) { - return (PickleState *)_PyModule_GetState(module); + void *state = _PyModule_GetState(module); + assert(state != NULL); + return (PickleState *)state; +} + +static inline PickleState * +_Pickle_GetStateByClass(PyTypeObject *cls) +{ + void *state = _PyType_GetModuleState(cls); + assert(state != NULL); + return (PickleState *)state; } -/* Find the module instance imported in the currently running sub-interpreter - and get its state. */ -static PickleState * -_Pickle_GetGlobalState(void) +static inline PickleState * +_Pickle_FindStateByType(PyTypeObject *tp) { - return _Pickle_GetState(PyState_FindModule(&_picklemodule)); + PyObject *module = PyType_GetModuleByDef(tp, &_picklemodule); + assert(module != NULL); + return _Pickle_GetState(module); } /* Clear the given pickle module state. */ @@ -230,6 +247,11 @@ _Pickle_ClearState(PickleState *st) Py_CLEAR(st->codecs_encode); Py_CLEAR(st->getattr); Py_CLEAR(st->partial); + Py_CLEAR(st->Pickler_Type); + Py_CLEAR(st->Unpickler_Type); + Py_CLEAR(st->Pdata_Type); + Py_CLEAR(st->PicklerMemoProxyType); + Py_CLEAR(st->UnpicklerMemoProxyType); } /* Initialize the given pickle module state. */ @@ -439,39 +461,58 @@ typedef struct { Py_ssize_t allocated; /* number of slots in data allocated */ } Pdata; +static int +Pdata_traverse(Pdata *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + return 0; +} + static void Pdata_dealloc(Pdata *self) { + PyTypeObject *tp = Py_TYPE(self); + PyObject_GC_UnTrack(self); Py_ssize_t i = Py_SIZE(self); while (--i >= 0) { Py_DECREF(self->data[i]); } PyMem_Free(self->data); - PyObject_Free(self); + tp->tp_free((PyObject *)self); + Py_DECREF(tp); } -static PyTypeObject Pdata_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_pickle.Pdata", /*tp_name*/ - sizeof(Pdata), /*tp_basicsize*/ - sizeof(PyObject *), /*tp_itemsize*/ - (destructor)Pdata_dealloc, /*tp_dealloc*/ +static PyType_Slot pdata_slots[] = { + {Py_tp_dealloc, Pdata_dealloc}, + {Py_tp_traverse, Pdata_traverse}, + {0, NULL}, +}; + +static PyType_Spec pdata_spec = { + .name = "_pickle.Pdata", + .basicsize = sizeof(Pdata), + .itemsize = sizeof(PyObject *), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = pdata_slots, }; static PyObject * -Pdata_New(void) +Pdata_New(PickleState *state) { Pdata *self; - if (!(self = PyObject_New(Pdata, &Pdata_Type))) + if (!(self = PyObject_GC_New(Pdata, state->Pdata_Type))) return NULL; Py_SET_SIZE(self, 0); self->mark_set = 0; self->fence = 0; self->allocated = 8; self->data = PyMem_Malloc(self->allocated * sizeof(PyObject *)); - if (self->data) + if (self->data) { + PyObject_GC_Track(self); return (PyObject *)self; + } Py_DECREF(self); return PyErr_NoMemory(); } @@ -522,9 +563,8 @@ Pdata_grow(Pdata *self) } static int -Pdata_stack_underflow(Pdata *self) +Pdata_stack_underflow(PickleState *st, Pdata *self) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, self->mark_set ? "unexpected MARK found" : @@ -537,16 +577,16 @@ Pdata_stack_underflow(Pdata *self) * is raised and V is set to NULL. */ static PyObject * -Pdata_pop(Pdata *self) +Pdata_pop(PickleState *state, Pdata *self) { if (Py_SIZE(self) <= self->fence) { - Pdata_stack_underflow(self); + Pdata_stack_underflow(state, self); return NULL; } Py_SET_SIZE(self, Py_SIZE(self) - 1); return self->data[Py_SIZE(self)]; } -#define PDATA_POP(D, V) do { (V) = Pdata_pop((D)); } while (0) +#define PDATA_POP(S, D, V) do { (V) = Pdata_pop(S, (D)); } while (0) static int Pdata_push(Pdata *self, PyObject *obj) @@ -569,13 +609,13 @@ Pdata_push(Pdata *self, PyObject *obj) if (Pdata_push((D), (O)) < 0) return (ER); } while(0) static PyObject * -Pdata_poptuple(Pdata *self, Py_ssize_t start) +Pdata_poptuple(PickleState *state, Pdata *self, Py_ssize_t start) { PyObject *tuple; Py_ssize_t len, i, j; if (start < self->fence) { - Pdata_stack_underflow(self); + Pdata_stack_underflow(state, self); return NULL; } len = Py_SIZE(self) - start; @@ -710,10 +750,8 @@ typedef struct { } UnpicklerMemoProxyObject; /* Forward declarations */ -static int save(PicklerObject *, PyObject *, int); -static int save_reduce(PicklerObject *, PyObject *, PyObject *); -static PyTypeObject Pickler_Type; -static PyTypeObject Unpickler_Type; +static int save(PickleState *state, PicklerObject *, PyObject *, int); +static int save_reduce(PickleState *, PicklerObject *, PyObject *, PyObject *); #include "clinic/_pickle.c.h" @@ -1105,11 +1143,11 @@ _Pickler_Write(PicklerObject *self, const char *s, Py_ssize_t data_len) } static PicklerObject * -_Pickler_New(void) +_Pickler_New(PickleState *st) { PicklerObject *self; - self = PyObject_GC_New(PicklerObject, &Pickler_Type); + self = PyObject_GC_New(PicklerObject, st->Pickler_Type); if (self == NULL) return NULL; @@ -1220,9 +1258,8 @@ _Unpickler_SetStringInput(UnpicklerObject *self, PyObject *input) } static int -bad_readline(void) +bad_readline(PickleState *st) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, "pickle data was truncated"); return -1; } @@ -1317,13 +1354,12 @@ _Unpickler_ReadFromFile(UnpicklerObject *self, Py_ssize_t n) /* Don't call it directly: use _Unpickler_Read() */ static Py_ssize_t -_Unpickler_ReadImpl(UnpicklerObject *self, char **s, Py_ssize_t n) +_Unpickler_ReadImpl(UnpicklerObject *self, PickleState *st, char **s, Py_ssize_t n) { Py_ssize_t num_read; *s = NULL; if (self->next_read_idx > PY_SSIZE_T_MAX - n) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, "read would overflow (invalid bytecode)"); return -1; @@ -1333,14 +1369,14 @@ _Unpickler_ReadImpl(UnpicklerObject *self, char **s, Py_ssize_t n) assert(self->next_read_idx + n > self->input_len); if (!self->read) - return bad_readline(); + return bad_readline(st); /* Extend the buffer to satisfy desired size */ num_read = _Unpickler_ReadFromFile(self, n); if (num_read < 0) return -1; if (num_read < n) - return bad_readline(); + return bad_readline(st); *s = self->input_buffer; self->next_read_idx = n; return n; @@ -1355,7 +1391,8 @@ _Unpickler_ReadImpl(UnpicklerObject *self, char **s, Py_ssize_t n) * _Unpickler_Read() is recommended in most cases. */ static Py_ssize_t -_Unpickler_ReadInto(UnpicklerObject *self, char *buf, Py_ssize_t n) +_Unpickler_ReadInto(PickleState *state, UnpicklerObject *self, char *buf, + Py_ssize_t n) { assert(n != READ_WHOLE_LINE); @@ -1376,7 +1413,7 @@ _Unpickler_ReadInto(UnpicklerObject *self, char *buf, Py_ssize_t n) /* Read from file */ if (!self->read) { /* We're unpickling memory, this means the input is truncated */ - return bad_readline(); + return bad_readline(state); } if (_Unpickler_SkipConsumed(self) < 0) { return -1; @@ -1403,7 +1440,7 @@ _Unpickler_ReadInto(UnpicklerObject *self, char *buf, Py_ssize_t n) Py_ssize_t read_size = PyBytes_GET_SIZE(data); if (read_size < n) { Py_DECREF(data); - return bad_readline(); + return bad_readline(state); } memcpy(buf, PyBytes_AS_STRING(data), n); Py_DECREF(data); @@ -1430,7 +1467,7 @@ _Unpickler_ReadInto(UnpicklerObject *self, char *buf, Py_ssize_t n) return -1; } if (read_size < n) { - return bad_readline(); + return bad_readline(state); } return n; } @@ -1448,12 +1485,12 @@ _Unpickler_ReadInto(UnpicklerObject *self, char *buf, Py_ssize_t n) Returns -1 (with an exception set) on failure. On success, return the number of chars read. */ -#define _Unpickler_Read(self, s, n) \ +#define _Unpickler_Read(self, state, s, n) \ (((n) <= (self)->input_len - (self)->next_read_idx) \ ? (*(s) = (self)->input_buffer + (self)->next_read_idx, \ (self)->next_read_idx += (n), \ (n)) \ - : _Unpickler_ReadImpl(self, (s), (n))) + : _Unpickler_ReadImpl(self, state, (s), (n))) static Py_ssize_t _Unpickler_CopyLine(UnpicklerObject *self, char *line, Py_ssize_t len, @@ -1477,7 +1514,7 @@ _Unpickler_CopyLine(UnpicklerObject *self, char *line, Py_ssize_t len, Returns the number of chars read, or -1 on failure. */ static Py_ssize_t -_Unpickler_Readline(UnpicklerObject *self, char **result) +_Unpickler_Readline(PickleState *state, UnpicklerObject *self, char **result) { Py_ssize_t i, num_read; @@ -1490,13 +1527,13 @@ _Unpickler_Readline(UnpicklerObject *self, char **result) } } if (!self->read) - return bad_readline(); + return bad_readline(state); num_read = _Unpickler_ReadFromFile(self, READ_WHOLE_LINE); if (num_read < 0) return -1; if (num_read == 0 || self->input_buffer[num_read - 1] != '\n') - return bad_readline(); + return bad_readline(state); self->next_read_idx = num_read; return _Unpickler_CopyLine(self, self->input_buffer, num_read, result); } @@ -1586,11 +1623,12 @@ _Unpickler_MemoCleanup(UnpicklerObject *self) } static UnpicklerObject * -_Unpickler_New(void) +_Unpickler_New(PyObject *module) { UnpicklerObject *self; + PickleState *st = _Pickle_GetState(module); - self = PyObject_GC_New(UnpicklerObject, &Unpickler_Type); + self = PyObject_GC_New(UnpicklerObject, st->Unpickler_Type); if (self == NULL) return NULL; @@ -1616,7 +1654,7 @@ _Unpickler_New(void) self->memo_size = 32; self->memo_len = 0; self->memo = _Unpickler_NewMemo(self->memo_size); - self->stack = (Pdata *)Pdata_New(); + self->stack = (Pdata *)Pdata_New(st); if (self->memo == NULL || self->stack == NULL) { Py_DECREF(self); @@ -1695,7 +1733,7 @@ _Unpickler_SetBuffers(UnpicklerObject *self, PyObject *buffers) /* Generate a GET opcode for an object stored in the memo. */ static int -memo_get(PicklerObject *self, PyObject *key) +memo_get(PickleState *st, PicklerObject *self, PyObject *key) { Py_ssize_t *value; char pdata[30]; @@ -1728,7 +1766,6 @@ memo_get(PicklerObject *self, PyObject *key) len = 5; } else { /* unlikely */ - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->PicklingError, "memo id too large for LONG_BINGET"); return -1; @@ -1744,7 +1781,7 @@ memo_get(PicklerObject *self, PyObject *key) /* Store an object in the memo, assign it a new unique ID based on the number of objects currently stored in the memo and generate a PUT opcode. */ static int -memo_put(PicklerObject *self, PyObject *obj) +memo_put(PickleState *st, PicklerObject *self, PyObject *obj) { char pdata[30]; Py_ssize_t len; @@ -1785,7 +1822,6 @@ memo_put(PicklerObject *self, PyObject *obj) len = 5; } else { /* unlikely */ - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->PicklingError, "memo id too large for LONG_BINPUT"); return -1; @@ -2338,8 +2374,8 @@ _Pickler_write_bytes(PicklerObject *self, } static int -_save_bytes_data(PicklerObject *self, PyObject *obj, const char *data, - Py_ssize_t size) +_save_bytes_data(PickleState *st, PicklerObject *self, PyObject *obj, + const char *data, Py_ssize_t size) { assert(self->proto >= 3); @@ -2378,7 +2414,7 @@ _save_bytes_data(PicklerObject *self, PyObject *obj, const char *data, return -1; } - if (memo_put(self, obj) < 0) { + if (memo_put(st, self, obj) < 0) { return -1; } @@ -2386,7 +2422,7 @@ _save_bytes_data(PicklerObject *self, PyObject *obj, const char *data, } static int -save_bytes(PicklerObject *self, PyObject *obj) +save_bytes(PickleState *st, PicklerObject *self, PyObject *obj) { if (self->proto < 3) { /* Older pickle protocols do not have an opcode for pickling bytes @@ -2407,7 +2443,6 @@ save_bytes(PicklerObject *self, PyObject *obj) reduce_value = Py_BuildValue("(O())", (PyObject*)&PyBytes_Type); } else { - PickleState *st = _Pickle_GetGlobalState(); PyObject *unicode_str = PyUnicode_DecodeLatin1(PyBytes_AS_STRING(obj), PyBytes_GET_SIZE(obj), @@ -2425,19 +2460,19 @@ save_bytes(PicklerObject *self, PyObject *obj) return -1; /* save_reduce() will memoize the object automatically. */ - status = save_reduce(self, reduce_value, obj); + status = save_reduce(st, self, reduce_value, obj); Py_DECREF(reduce_value); return status; } else { - return _save_bytes_data(self, obj, PyBytes_AS_STRING(obj), + return _save_bytes_data(st, self, obj, PyBytes_AS_STRING(obj), PyBytes_GET_SIZE(obj)); } } static int -_save_bytearray_data(PicklerObject *self, PyObject *obj, const char *data, - Py_ssize_t size) +_save_bytearray_data(PickleState *state, PicklerObject *self, PyObject *obj, + const char *data, Py_ssize_t size) { assert(self->proto >= 5); @@ -2455,7 +2490,7 @@ _save_bytearray_data(PicklerObject *self, PyObject *obj, const char *data, return -1; } - if (memo_put(self, obj) < 0) { + if (memo_put(state, self, obj) < 0) { return -1; } @@ -2463,7 +2498,7 @@ _save_bytearray_data(PicklerObject *self, PyObject *obj, const char *data, } static int -save_bytearray(PicklerObject *self, PyObject *obj) +save_bytearray(PickleState *state, PicklerObject *self, PyObject *obj) { if (self->proto < 5) { /* Older pickle protocols do not have an opcode for pickling @@ -2488,21 +2523,21 @@ save_bytearray(PicklerObject *self, PyObject *obj) return -1; /* save_reduce() will memoize the object automatically. */ - status = save_reduce(self, reduce_value, obj); + status = save_reduce(state, self, reduce_value, obj); Py_DECREF(reduce_value); return status; } else { - return _save_bytearray_data(self, obj, PyByteArray_AS_STRING(obj), + return _save_bytearray_data(state, self, obj, + PyByteArray_AS_STRING(obj), PyByteArray_GET_SIZE(obj)); } } static int -save_picklebuffer(PicklerObject *self, PyObject *obj) +save_picklebuffer(PickleState *st, PicklerObject *self, PyObject *obj) { if (self->proto < 5) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->PicklingError, "PickleBuffer can only pickled with protocol >= 5"); return -1; @@ -2512,7 +2547,6 @@ save_picklebuffer(PicklerObject *self, PyObject *obj) return -1; } if (view->suboffsets != NULL || !PyBuffer_IsContiguous(view, 'A')) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->PicklingError, "PickleBuffer can not be pickled when " "pointing to a non-contiguous buffer"); @@ -2533,11 +2567,11 @@ save_picklebuffer(PicklerObject *self, PyObject *obj) if (in_band) { /* Write data in-band */ if (view->readonly) { - return _save_bytes_data(self, obj, (const char*) view->buf, + return _save_bytes_data(st, self, obj, (const char *)view->buf, view->len); } else { - return _save_bytearray_data(self, obj, (const char*) view->buf, + return _save_bytearray_data(st, self, obj, (const char *)view->buf, view->len); } } @@ -2692,7 +2726,7 @@ write_unicode_binary(PicklerObject *self, PyObject *obj) } static int -save_unicode(PicklerObject *self, PyObject *obj) +save_unicode(PickleState *state, PicklerObject *self, PyObject *obj) { if (self->bin) { if (write_unicode_binary(self, obj) < 0) @@ -2722,7 +2756,7 @@ save_unicode(PicklerObject *self, PyObject *obj) if (_Pickler_Write(self, "\n", 1) < 0) return -1; } - if (memo_put(self, obj) < 0) + if (memo_put(state, self, obj) < 0) return -1; return 0; @@ -2730,7 +2764,8 @@ save_unicode(PicklerObject *self, PyObject *obj) /* A helper for save_tuple. Push the len elements in tuple t on the stack. */ static int -store_tuple_elements(PicklerObject *self, PyObject *t, Py_ssize_t len) +store_tuple_elements(PickleState *state, PicklerObject *self, PyObject *t, + Py_ssize_t len) { Py_ssize_t i; @@ -2741,7 +2776,7 @@ store_tuple_elements(PicklerObject *self, PyObject *t, Py_ssize_t len) if (element == NULL) return -1; - if (save(self, element, 0) < 0) + if (save(state, self, element, 0) < 0) return -1; } @@ -2755,7 +2790,7 @@ store_tuple_elements(PicklerObject *self, PyObject *t, Py_ssize_t len) * magic so that it works in all cases. IOW, this is a long routine. */ static int -save_tuple(PicklerObject *self, PyObject *obj) +save_tuple(PickleState *state, PicklerObject *self, PyObject *obj) { Py_ssize_t len, i; @@ -2792,7 +2827,7 @@ save_tuple(PicklerObject *self, PyObject *obj) */ if (len <= 3 && self->proto >= 2) { /* Use TUPLE{1,2,3} opcodes. */ - if (store_tuple_elements(self, obj, len) < 0) + if (store_tuple_elements(state, self, obj, len) < 0) return -1; if (PyMemoTable_Get(self->memo, obj)) { @@ -2801,7 +2836,7 @@ save_tuple(PicklerObject *self, PyObject *obj) if (_Pickler_Write(self, &pop_op, 1) < 0) return -1; /* fetch from memo */ - if (memo_get(self, obj) < 0) + if (memo_get(state, self, obj) < 0) return -1; return 0; @@ -2819,7 +2854,7 @@ save_tuple(PicklerObject *self, PyObject *obj) if (_Pickler_Write(self, &mark_op, 1) < 0) return -1; - if (store_tuple_elements(self, obj, len) < 0) + if (store_tuple_elements(state, self, obj, len) < 0) return -1; if (PyMemoTable_Get(self->memo, obj)) { @@ -2837,7 +2872,7 @@ save_tuple(PicklerObject *self, PyObject *obj) return -1; } /* fetch from memo */ - if (memo_get(self, obj) < 0) + if (memo_get(state, self, obj) < 0) return -1; return 0; @@ -2848,7 +2883,7 @@ save_tuple(PicklerObject *self, PyObject *obj) } memoize: - if (memo_put(self, obj) < 0) + if (memo_put(state, self, obj) < 0) return -1; return 0; @@ -2861,7 +2896,7 @@ save_tuple(PicklerObject *self, PyObject *obj) * Returns 0 on success, <0 on error. */ static int -batch_list(PicklerObject *self, PyObject *iter) +batch_list(PickleState *state, PicklerObject *self, PyObject *iter) { PyObject *obj = NULL; PyObject *firstitem = NULL; @@ -2887,7 +2922,7 @@ batch_list(PicklerObject *self, PyObject *iter) return -1; break; } - i = save(self, obj, 0); + i = save(state, self, obj, 0); Py_DECREF(obj); if (i < 0) return -1; @@ -2916,7 +2951,7 @@ batch_list(PicklerObject *self, PyObject *iter) goto error; /* Only one item to write */ - if (save(self, firstitem, 0) < 0) + if (save(state, self, firstitem, 0) < 0) goto error; if (_Pickler_Write(self, &append_op, 1) < 0) goto error; @@ -2930,14 +2965,14 @@ batch_list(PicklerObject *self, PyObject *iter) if (_Pickler_Write(self, &mark_op, 1) < 0) goto error; - if (save(self, firstitem, 0) < 0) + if (save(state, self, firstitem, 0) < 0) goto error; Py_CLEAR(firstitem); n = 1; /* Fetch and save up to BATCHSIZE items */ while (obj) { - if (save(self, obj, 0) < 0) + if (save(state, self, obj, 0) < 0) goto error; Py_CLEAR(obj); n += 1; @@ -2977,7 +3012,7 @@ batch_list(PicklerObject *self, PyObject *iter) * Note that this only works for protocols > 0. */ static int -batch_list_exact(PicklerObject *self, PyObject *obj) +batch_list_exact(PickleState *state, PicklerObject *self, PyObject *obj) { PyObject *item = NULL; Py_ssize_t this_batch, total; @@ -2993,7 +3028,7 @@ batch_list_exact(PicklerObject *self, PyObject *obj) if (PyList_GET_SIZE(obj) == 1) { item = PyList_GET_ITEM(obj, 0); Py_INCREF(item); - int err = save(self, item, 0); + int err = save(state, self, item, 0); Py_DECREF(item); if (err < 0) return -1; @@ -3011,7 +3046,7 @@ batch_list_exact(PicklerObject *self, PyObject *obj) while (total < PyList_GET_SIZE(obj)) { item = PyList_GET_ITEM(obj, total); Py_INCREF(item); - int err = save(self, item, 0); + int err = save(state, self, item, 0); Py_DECREF(item); if (err < 0) return -1; @@ -3028,7 +3063,7 @@ batch_list_exact(PicklerObject *self, PyObject *obj) } static int -save_list(PicklerObject *self, PyObject *obj) +save_list(PickleState *state, PicklerObject *self, PyObject *obj) { char header[3]; Py_ssize_t len; @@ -3055,7 +3090,7 @@ save_list(PicklerObject *self, PyObject *obj) if ((len = PyList_Size(obj)) < 0) goto error; - if (memo_put(self, obj) < 0) + if (memo_put(state, self, obj) < 0) goto error; if (len != 0) { @@ -3063,7 +3098,7 @@ save_list(PicklerObject *self, PyObject *obj) if (PyList_CheckExact(obj) && self->proto > 0) { if (_Py_EnterRecursiveCall(" while pickling an object")) goto error; - status = batch_list_exact(self, obj); + status = batch_list_exact(state, self, obj); _Py_LeaveRecursiveCall(); } else { PyObject *iter = PyObject_GetIter(obj); @@ -3074,7 +3109,7 @@ save_list(PicklerObject *self, PyObject *obj) Py_DECREF(iter); goto error; } - status = batch_list(self, iter); + status = batch_list(state, self, iter); _Py_LeaveRecursiveCall(); Py_DECREF(iter); } @@ -3102,7 +3137,7 @@ save_list(PicklerObject *self, PyObject *obj) * ugly to bear. */ static int -batch_dict(PicklerObject *self, PyObject *iter) +batch_dict(PickleState *state, PicklerObject *self, PyObject *iter) { PyObject *obj = NULL; PyObject *firstitem = NULL; @@ -3128,9 +3163,9 @@ batch_dict(PicklerObject *self, PyObject *iter) "iterator must return 2-tuples"); return -1; } - i = save(self, PyTuple_GET_ITEM(obj, 0), 0); + i = save(state, self, PyTuple_GET_ITEM(obj, 0), 0); if (i >= 0) - i = save(self, PyTuple_GET_ITEM(obj, 1), 0); + i = save(state, self, PyTuple_GET_ITEM(obj, 1), 0); Py_DECREF(obj); if (i < 0) return -1; @@ -3164,9 +3199,9 @@ batch_dict(PicklerObject *self, PyObject *iter) goto error; /* Only one item to write */ - if (save(self, PyTuple_GET_ITEM(firstitem, 0), 0) < 0) + if (save(state, self, PyTuple_GET_ITEM(firstitem, 0), 0) < 0) goto error; - if (save(self, PyTuple_GET_ITEM(firstitem, 1), 0) < 0) + if (save(state, self, PyTuple_GET_ITEM(firstitem, 1), 0) < 0) goto error; if (_Pickler_Write(self, &setitem_op, 1) < 0) goto error; @@ -3180,9 +3215,9 @@ batch_dict(PicklerObject *self, PyObject *iter) if (_Pickler_Write(self, &mark_op, 1) < 0) goto error; - if (save(self, PyTuple_GET_ITEM(firstitem, 0), 0) < 0) + if (save(state, self, PyTuple_GET_ITEM(firstitem, 0), 0) < 0) goto error; - if (save(self, PyTuple_GET_ITEM(firstitem, 1), 0) < 0) + if (save(state, self, PyTuple_GET_ITEM(firstitem, 1), 0) < 0) goto error; Py_CLEAR(firstitem); n = 1; @@ -3194,8 +3229,8 @@ batch_dict(PicklerObject *self, PyObject *iter) "iterator must return 2-tuples"); goto error; } - if (save(self, PyTuple_GET_ITEM(obj, 0), 0) < 0 || - save(self, PyTuple_GET_ITEM(obj, 1), 0) < 0) + if (save(state, self, PyTuple_GET_ITEM(obj, 0), 0) < 0 || + save(state, self, PyTuple_GET_ITEM(obj, 1), 0) < 0) goto error; Py_CLEAR(obj); n += 1; @@ -3233,7 +3268,7 @@ batch_dict(PicklerObject *self, PyObject *iter) * Note that this currently doesn't work for protocol 0. */ static int -batch_dict_exact(PicklerObject *self, PyObject *obj) +batch_dict_exact(PickleState *state, PicklerObject *self, PyObject *obj) { PyObject *key = NULL, *value = NULL; int i; @@ -3253,10 +3288,10 @@ batch_dict_exact(PicklerObject *self, PyObject *obj) PyDict_Next(obj, &ppos, &key, &value); Py_INCREF(key); Py_INCREF(value); - if (save(self, key, 0) < 0) { + if (save(state, self, key, 0) < 0) { goto error; } - if (save(self, value, 0) < 0) { + if (save(state, self, value, 0) < 0) { goto error; } Py_CLEAR(key); @@ -3274,10 +3309,10 @@ batch_dict_exact(PicklerObject *self, PyObject *obj) while (PyDict_Next(obj, &ppos, &key, &value)) { Py_INCREF(key); Py_INCREF(value); - if (save(self, key, 0) < 0) { + if (save(state, self, key, 0) < 0) { goto error; } - if (save(self, value, 0) < 0) { + if (save(state, self, value, 0) < 0) { goto error; } Py_CLEAR(key); @@ -3303,7 +3338,7 @@ batch_dict_exact(PicklerObject *self, PyObject *obj) } static int -save_dict(PicklerObject *self, PyObject *obj) +save_dict(PickleState *state, PicklerObject *self, PyObject *obj) { PyObject *items, *iter; char header[3]; @@ -3328,7 +3363,7 @@ save_dict(PicklerObject *self, PyObject *obj) if (_Pickler_Write(self, header, len) < 0) goto error; - if (memo_put(self, obj) < 0) + if (memo_put(state, self, obj) < 0) goto error; if (PyDict_GET_SIZE(obj)) { @@ -3338,7 +3373,7 @@ save_dict(PicklerObject *self, PyObject *obj) not a dict subclass. */ if (_Py_EnterRecursiveCall(" while pickling an object")) goto error; - status = batch_dict_exact(self, obj); + status = batch_dict_exact(state, self, obj); _Py_LeaveRecursiveCall(); } else { items = PyObject_CallMethodNoArgs(obj, &_Py_ID(items)); @@ -3352,7 +3387,7 @@ save_dict(PicklerObject *self, PyObject *obj) Py_DECREF(iter); goto error; } - status = batch_dict(self, iter); + status = batch_dict(state, self, iter); _Py_LeaveRecursiveCall(); Py_DECREF(iter); } @@ -3370,7 +3405,7 @@ save_dict(PicklerObject *self, PyObject *obj) } static int -save_set(PicklerObject *self, PyObject *obj) +save_set(PickleState *state, PicklerObject *self, PyObject *obj) { PyObject *item; int i; @@ -3396,7 +3431,7 @@ save_set(PicklerObject *self, PyObject *obj) return -1; } /* save_reduce() will memoize the object automatically. */ - status = save_reduce(self, reduce_value, obj); + status = save_reduce(state, self, reduce_value, obj); Py_DECREF(reduce_value); return status; } @@ -3404,7 +3439,7 @@ save_set(PicklerObject *self, PyObject *obj) if (_Pickler_Write(self, &empty_set_op, 1) < 0) return -1; - if (memo_put(self, obj) < 0) + if (memo_put(state, self, obj) < 0) return -1; set_size = PySet_GET_SIZE(obj); @@ -3418,7 +3453,7 @@ save_set(PicklerObject *self, PyObject *obj) return -1; while (_PySet_NextEntry(obj, &ppos, &item, &hash)) { Py_INCREF(item); - int err = save(self, item, 0); + int err = save(state, self, item, 0); Py_CLEAR(item); if (err < 0) return -1; @@ -3439,7 +3474,7 @@ save_set(PicklerObject *self, PyObject *obj) } static int -save_frozenset(PicklerObject *self, PyObject *obj) +save_frozenset(PickleState *state, PicklerObject *self, PyObject *obj) { PyObject *iter; @@ -3465,7 +3500,7 @@ save_frozenset(PicklerObject *self, PyObject *obj) return -1; } /* save_reduce() will memoize the object automatically. */ - status = save_reduce(self, reduce_value, obj); + status = save_reduce(state, self, reduce_value, obj); Py_DECREF(reduce_value); return status; } @@ -3488,7 +3523,7 @@ save_frozenset(PicklerObject *self, PyObject *obj) } break; } - if (save(self, item, 0) < 0) { + if (save(state, self, item, 0) < 0) { Py_DECREF(item); Py_DECREF(iter); return -1; @@ -3505,25 +3540,24 @@ save_frozenset(PicklerObject *self, PyObject *obj) if (_Pickler_Write(self, &pop_mark_op, 1) < 0) return -1; - if (memo_get(self, obj) < 0) + if (memo_get(state, self, obj) < 0) return -1; return 0; } if (_Pickler_Write(self, &frozenset_op, 1) < 0) return -1; - if (memo_put(self, obj) < 0) + if (memo_put(state, self, obj) < 0) return -1; return 0; } static int -fix_imports(PyObject **module_name, PyObject **global_name) +fix_imports(PickleState *st, PyObject **module_name, PyObject **global_name) { PyObject *key; PyObject *item; - PickleState *st = _Pickle_GetGlobalState(); key = PyTuple_Pack(2, *module_name, *global_name); if (key == NULL) @@ -3582,7 +3616,8 @@ fix_imports(PyObject **module_name, PyObject **global_name) } static int -save_global(PicklerObject *self, PyObject *obj, PyObject *name) +save_global(PickleState *st, PicklerObject *self, PyObject *obj, + PyObject *name) { PyObject *global_name = NULL; PyObject *module_name = NULL; @@ -3591,7 +3626,6 @@ save_global(PicklerObject *self, PyObject *obj, PyObject *name) PyObject *dotted_path = NULL; PyObject *lastname = NULL; PyObject *cls; - PickleState *st = _Pickle_GetGlobalState(); int status = 0; const char global_op = GLOBAL; @@ -3727,21 +3761,20 @@ save_global(PicklerObject *self, PyObject *obj, PyObject *name) if (self->proto >= 4) { const char stack_global_op = STACK_GLOBAL; - if (save(self, module_name, 0) < 0) + if (save(st, self, module_name, 0) < 0) goto error; - if (save(self, global_name, 0) < 0) + if (save(st, self, global_name, 0) < 0) goto error; if (_Pickler_Write(self, &stack_global_op, 1) < 0) goto error; } else if (parent != module) { - PickleState *st = _Pickle_GetGlobalState(); PyObject *reduce_value = Py_BuildValue("(O(OO))", st->getattr, parent, lastname); if (reduce_value == NULL) goto error; - status = save_reduce(self, reduce_value, NULL); + status = save_reduce(st, self, reduce_value, NULL); Py_DECREF(reduce_value); if (status < 0) goto error; @@ -3759,7 +3792,7 @@ save_global(PicklerObject *self, PyObject *obj, PyObject *name) /* For protocol < 3 and if the user didn't request against doing so, we convert module names to the old 2.x module names. */ if (self->proto < 3 && self->fix_imports) { - if (fix_imports(&module_name, &global_name) < 0) { + if (fix_imports(st, &module_name, &global_name) < 0) { goto error; } } @@ -3813,7 +3846,7 @@ save_global(PicklerObject *self, PyObject *obj, PyObject *name) goto error; } /* Memoize the object. */ - if (memo_put(self, obj) < 0) + if (memo_put(st, self, obj) < 0) goto error; } @@ -3832,7 +3865,8 @@ save_global(PicklerObject *self, PyObject *obj, PyObject *name) } static int -save_singleton_type(PicklerObject *self, PyObject *obj, PyObject *singleton) +save_singleton_type(PickleState *state, PicklerObject *self, PyObject *obj, + PyObject *singleton) { PyObject *reduce_value; int status; @@ -3841,28 +3875,28 @@ save_singleton_type(PicklerObject *self, PyObject *obj, PyObject *singleton) if (reduce_value == NULL) { return -1; } - status = save_reduce(self, reduce_value, obj); + status = save_reduce(state, self, reduce_value, obj); Py_DECREF(reduce_value); return status; } static int -save_type(PicklerObject *self, PyObject *obj) +save_type(PickleState *state, PicklerObject *self, PyObject *obj) { if (obj == (PyObject *)&_PyNone_Type) { - return save_singleton_type(self, obj, Py_None); + return save_singleton_type(state, self, obj, Py_None); } else if (obj == (PyObject *)&PyEllipsis_Type) { - return save_singleton_type(self, obj, Py_Ellipsis); + return save_singleton_type(state, self, obj, Py_Ellipsis); } else if (obj == (PyObject *)&_PyNotImplemented_Type) { - return save_singleton_type(self, obj, Py_NotImplemented); + return save_singleton_type(state, self, obj, Py_NotImplemented); } - return save_global(self, obj, NULL); + return save_global(state, self, obj, NULL); } static int -save_pers(PicklerObject *self, PyObject *obj) +save_pers(PickleState *state, PicklerObject *self, PyObject *obj) { PyObject *pid = NULL; int status = 0; @@ -3876,7 +3910,7 @@ save_pers(PicklerObject *self, PyObject *obj) if (pid != Py_None) { if (self->bin) { - if (save(self, pid, 1) < 0 || + if (save(state, self, pid, 1) < 0 || _Pickler_Write(self, &binpersid_op, 1) < 0) goto error; } @@ -3890,7 +3924,7 @@ save_pers(PicklerObject *self, PyObject *obj) /* XXX: Should it check whether the pid contains embedded newlines? */ if (!PyUnicode_IS_ASCII(pid_str)) { - PyErr_SetString(_Pickle_GetGlobalState()->PicklingError, + PyErr_SetString(state->PicklingError, "persistent IDs in protocol 0 must be " "ASCII strings"); Py_DECREF(pid_str); @@ -3933,7 +3967,8 @@ get_class(PyObject *obj) * appropriate __reduce__ method for obj. */ static int -save_reduce(PicklerObject *self, PyObject *args, PyObject *obj) +save_reduce(PickleState *st, PicklerObject *self, PyObject *args, + PyObject *obj) { PyObject *callable; PyObject *argtup; @@ -3941,7 +3976,6 @@ save_reduce(PicklerObject *self, PyObject *args, PyObject *obj) PyObject *listitems = Py_None; PyObject *dictitems = Py_None; PyObject *state_setter = Py_None; - PickleState *st = _Pickle_GetGlobalState(); Py_ssize_t size; int use_newobj = 0, use_newobj_ex = 0; @@ -4053,9 +4087,9 @@ save_reduce(PicklerObject *self, PyObject *args, PyObject *obj) } if (self->proto >= 4) { - if (save(self, cls, 0) < 0 || - save(self, args, 0) < 0 || - save(self, kwargs, 0) < 0 || + if (save(st, self, cls, 0) < 0 || + save(st, self, args, 0) < 0 || + save(st, self, kwargs, 0) < 0 || _Pickler_Write(self, &newobj_ex_op, 1) < 0) { return -1; } @@ -4092,8 +4126,8 @@ save_reduce(PicklerObject *self, PyObject *args, PyObject *obj) return -1; } - if (save(self, callable, 0) < 0 || - save(self, newargs, 0) < 0 || + if (save(st, self, callable, 0) < 0 || + save(st, self, newargs, 0) < 0 || _Pickler_Write(self, &reduce_op, 1) < 0) { Py_DECREF(newargs); Py_DECREF(callable); @@ -4163,14 +4197,15 @@ save_reduce(PicklerObject *self, PyObject *args, PyObject *obj) function. */ /* Save the class and its __new__ arguments. */ - if (save(self, cls, 0) < 0) + if (save(st, self, cls, 0) < 0) { return -1; + } newargtup = PyTuple_GetSlice(argtup, 1, PyTuple_GET_SIZE(argtup)); if (newargtup == NULL) return -1; - p = save(self, newargtup, 0); + p = save(st, self, newargtup, 0); Py_DECREF(newargtup); if (p < 0) return -1; @@ -4180,8 +4215,8 @@ save_reduce(PicklerObject *self, PyObject *args, PyObject *obj) return -1; } else { /* Not using NEWOBJ. */ - if (save(self, callable, 0) < 0 || - save(self, argtup, 0) < 0 || + if (save(st, self, callable, 0) < 0 || + save(st, self, argtup, 0) < 0 || _Pickler_Write(self, &reduce_op, 1) < 0) return -1; } @@ -4199,24 +4234,24 @@ save_reduce(PicklerObject *self, PyObject *args, PyObject *obj) if (_Pickler_Write(self, &pop_op, 1) < 0) return -1; - if (memo_get(self, obj) < 0) + if (memo_get(st, self, obj) < 0) return -1; return 0; } - else if (memo_put(self, obj) < 0) + else if (memo_put(st, self, obj) < 0) return -1; } - if (listitems && batch_list(self, listitems) < 0) + if (listitems && batch_list(st, self, listitems) < 0) return -1; - if (dictitems && batch_dict(self, dictitems) < 0) + if (dictitems && batch_dict(st, self, dictitems) < 0) return -1; if (state) { if (state_setter == NULL) { - if (save(self, state, 0) < 0 || + if (save(st, self, state, 0) < 0 || _Pickler_Write(self, &build_op, 1) < 0) return -1; } @@ -4233,8 +4268,8 @@ save_reduce(PicklerObject *self, PyObject *args, PyObject *obj) const char tupletwo_op = TUPLE2; const char pop_op = POP; - if (save(self, state_setter, 0) < 0 || - save(self, obj, 0) < 0 || save(self, state, 0) < 0 || + if (save(st, self, state_setter, 0) < 0 || + save(st, self, obj, 0) < 0 || save(st, self, state, 0) < 0 || _Pickler_Write(self, &tupletwo_op, 1) < 0 || _Pickler_Write(self, &reduce_op, 1) < 0 || _Pickler_Write(self, &pop_op, 1) < 0) @@ -4245,7 +4280,7 @@ save_reduce(PicklerObject *self, PyObject *args, PyObject *obj) } static int -save(PicklerObject *self, PyObject *obj, int pers_save) +save(PickleState *st, PicklerObject *self, PyObject *obj, int pers_save) { PyTypeObject *type; PyObject *reduce_func = NULL; @@ -4263,7 +4298,7 @@ save(PicklerObject *self, PyObject *obj, int pers_save) 0 if it did nothing successfully; 1 if a persistent id was saved. */ - if ((status = save_pers(self, obj)) != 0) + if ((status = save_pers(st, self, obj)) != 0) return status; } @@ -4293,14 +4328,14 @@ save(PicklerObject *self, PyObject *obj, int pers_save) a GET (or BINGET) opcode, instead of pickling the object once again. */ if (PyMemoTable_Get(self->memo, obj)) { - return memo_get(self, obj); + return memo_get(st, self, obj); } if (type == &PyBytes_Type) { - return save_bytes(self, obj); + return save_bytes(st, self, obj); } else if (type == &PyUnicode_Type) { - return save_unicode(self, obj); + return save_unicode(st, self, obj); } /* We're only calling _Py_EnterRecursiveCall here so that atomic @@ -4310,31 +4345,31 @@ save(PicklerObject *self, PyObject *obj, int pers_save) } if (type == &PyDict_Type) { - status = save_dict(self, obj); + status = save_dict(st, self, obj); goto done; } else if (type == &PySet_Type) { - status = save_set(self, obj); + status = save_set(st, self, obj); goto done; } else if (type == &PyFrozenSet_Type) { - status = save_frozenset(self, obj); + status = save_frozenset(st, self, obj); goto done; } else if (type == &PyList_Type) { - status = save_list(self, obj); + status = save_list(st, self, obj); goto done; } else if (type == &PyTuple_Type) { - status = save_tuple(self, obj); + status = save_tuple(st, self, obj); goto done; } else if (type == &PyByteArray_Type) { - status = save_bytearray(self, obj); + status = save_bytearray(st, self, obj); goto done; } else if (type == &PyPickleBuffer_Type) { - status = save_picklebuffer(self, obj); + status = save_picklebuffer(st, self, obj); goto done; } @@ -4354,11 +4389,11 @@ save(PicklerObject *self, PyObject *obj, int pers_save) } if (type == &PyType_Type) { - status = save_type(self, obj); + status = save_type(st, self, obj); goto done; } else if (type == &PyFunction_Type) { - status = save_global(self, obj, NULL); + status = save_global(st, self, obj, NULL); goto done; } @@ -4369,7 +4404,6 @@ save(PicklerObject *self, PyObject *obj, int pers_save) * __reduce_ex__ method, or the object's __reduce__ method. */ if (self->dispatch_table == NULL) { - PickleState *st = _Pickle_GetGlobalState(); reduce_func = PyDict_GetItemWithError(st->dispatch_table, (PyObject *)type); if (reduce_func == NULL) { @@ -4396,7 +4430,7 @@ save(PicklerObject *self, PyObject *obj, int pers_save) reduce_value = _Pickle_FastCall(reduce_func, Py_NewRef(obj)); } else if (PyType_IsSubtype(type, &PyType_Type)) { - status = save_global(self, obj, NULL); + status = save_global(st, self, obj, NULL); goto done; } else { @@ -4428,7 +4462,6 @@ save(PicklerObject *self, PyObject *obj, int pers_save) reduce_value = PyObject_CallNoArgs(reduce_func); } else { - PickleState *st = _Pickle_GetGlobalState(); PyErr_Format(st->PicklingError, "can't pickle '%.200s' object: %R", type->tp_name, obj); @@ -4442,18 +4475,17 @@ save(PicklerObject *self, PyObject *obj, int pers_save) reduce: if (PyUnicode_Check(reduce_value)) { - status = save_global(self, obj, reduce_value); + status = save_global(st, self, obj, reduce_value); goto done; } if (!PyTuple_Check(reduce_value)) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->PicklingError, "__reduce__ must return a string or tuple"); goto error; } - status = save_reduce(self, reduce_value, obj); + status = save_reduce(st, self, reduce_value, obj); if (0) { error: @@ -4469,7 +4501,7 @@ save(PicklerObject *self, PyObject *obj, int pers_save) } static int -dump(PicklerObject *self, PyObject *obj) +dump(PickleState *state, PicklerObject *self, PyObject *obj) { const char stop_op = STOP; int status = -1; @@ -4499,7 +4531,7 @@ dump(PicklerObject *self, PyObject *obj) self->framing = 1; } - if (save(self, obj, 0) < 0 || + if (save(state, self, obj, 0) < 0 || _Pickler_Write(self, &stop_op, 1) < 0 || _Pickler_CommitFrame(self) < 0) goto error; @@ -4546,6 +4578,7 @@ _pickle_Pickler_clear_memo_impl(PicklerObject *self) _pickle.Pickler.dump + cls: defining_class obj: object / @@ -4553,14 +4586,15 @@ Write a pickled representation of the given object to the open file. [clinic start generated code]*/ static PyObject * -_pickle_Pickler_dump(PicklerObject *self, PyObject *obj) -/*[clinic end generated code: output=87ecad1261e02ac7 input=552eb1c0f52260d9]*/ +_pickle_Pickler_dump_impl(PicklerObject *self, PyTypeObject *cls, + PyObject *obj) +/*[clinic end generated code: output=952cf7f68b1445bb input=f949d84151983594]*/ { + PickleState *st = _Pickle_GetStateByClass(cls); /* Check whether the Pickler was initialized correctly (issue3664). Developers often forget to call __init__() in their subclasses, which would trigger a segfault without this check. */ if (self->write == NULL) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_Format(st->PicklingError, "Pickler.__init__() was not called by %s.__init__()", Py_TYPE(self)->tp_name); @@ -4570,7 +4604,7 @@ _pickle_Pickler_dump(PicklerObject *self, PyObject *obj) if (_Pickler_ClearBuffer(self) < 0) return NULL; - if (dump(self, obj) < 0) + if (dump(st, self, obj) < 0) return NULL; if (_Pickler_FlushToFile(self) < 0) @@ -4612,36 +4646,6 @@ static struct PyMethodDef Pickler_methods[] = { {NULL, NULL} /* sentinel */ }; -static void -Pickler_dealloc(PicklerObject *self) -{ - PyObject_GC_UnTrack(self); - - Py_XDECREF(self->output_buffer); - Py_XDECREF(self->write); - Py_XDECREF(self->pers_func); - Py_XDECREF(self->dispatch_table); - Py_XDECREF(self->fast_memo); - Py_XDECREF(self->reducer_override); - Py_XDECREF(self->buffer_callback); - - PyMemoTable_Del(self->memo); - - Py_TYPE(self)->tp_free((PyObject *)self); -} - -static int -Pickler_traverse(PicklerObject *self, visitproc visit, void *arg) -{ - Py_VISIT(self->write); - Py_VISIT(self->pers_func); - Py_VISIT(self->dispatch_table); - Py_VISIT(self->fast_memo); - Py_VISIT(self->reducer_override); - Py_VISIT(self->buffer_callback); - return 0; -} - static int Pickler_clear(PicklerObject *self) { @@ -4661,6 +4665,29 @@ Pickler_clear(PicklerObject *self) return 0; } +static void +Pickler_dealloc(PicklerObject *self) +{ + PyTypeObject *tp = Py_TYPE(self); + PyObject_GC_UnTrack(self); + (void)Pickler_clear(self); + tp->tp_free((PyObject *)self); + Py_DECREF(tp); +} + +static int +Pickler_traverse(PicklerObject *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + Py_VISIT(self->write); + Py_VISIT(self->pers_func); + Py_VISIT(self->dispatch_table); + Py_VISIT(self->fast_memo); + Py_VISIT(self->reducer_override); + Py_VISIT(self->buffer_callback); + return 0; +} + /*[clinic input] @@ -4870,15 +4897,18 @@ static PyMethodDef picklerproxy_methods[] = { static void PicklerMemoProxy_dealloc(PicklerMemoProxyObject *self) { + PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); - Py_XDECREF(self->pickler); - PyObject_GC_Del((PyObject *)self); + Py_CLEAR(self->pickler); + tp->tp_free((PyObject *)self); + Py_DECREF(tp); } static int PicklerMemoProxy_traverse(PicklerMemoProxyObject *self, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(self)); Py_VISIT(self->pickler); return 0; } @@ -4890,43 +4920,29 @@ PicklerMemoProxy_clear(PicklerMemoProxyObject *self) return 0; } -static PyTypeObject PicklerMemoProxyType = { - PyVarObject_HEAD_INIT(NULL, 0) - "_pickle.PicklerMemoProxy", /*tp_name*/ - sizeof(PicklerMemoProxyObject), /*tp_basicsize*/ - 0, - (destructor)PicklerMemoProxy_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - PyObject_HashNotImplemented, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - PyObject_GenericSetAttr, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - 0, /* tp_doc */ - (traverseproc)PicklerMemoProxy_traverse, /* tp_traverse */ - (inquiry)PicklerMemoProxy_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - picklerproxy_methods, /* tp_methods */ +static PyType_Slot memoproxy_slots[] = { + {Py_tp_dealloc, PicklerMemoProxy_dealloc}, + {Py_tp_traverse, PicklerMemoProxy_traverse}, + {Py_tp_clear, PicklerMemoProxy_clear}, + {Py_tp_methods, picklerproxy_methods}, + {Py_tp_hash, PyObject_HashNotImplemented}, + {0, NULL}, +}; + +static PyType_Spec memoproxy_spec = { + .name = "_pickle.PicklerMemoProxy", + .basicsize = sizeof(PicklerMemoProxyObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = memoproxy_slots, }; static PyObject * PicklerMemoProxy_New(PicklerObject *pickler) { PicklerMemoProxyObject *self; - - self = PyObject_GC_New(PicklerMemoProxyObject, &PicklerMemoProxyType); + PickleState *st = _Pickle_FindStateByType(Py_TYPE(pickler)); + self = PyObject_GC_New(PicklerMemoProxyObject, st->PicklerMemoProxyType); if (self == NULL) return NULL; self->pickler = (PicklerObject*)Py_NewRef(pickler); @@ -4953,7 +4969,8 @@ Pickler_set_memo(PicklerObject *self, PyObject *obj, void *Py_UNUSED(ignored)) return -1; } - if (Py_IS_TYPE(obj, &PicklerMemoProxyType)) { + PickleState *st = _Pickle_FindStateByType(Py_TYPE(self)); + if (Py_IS_TYPE(obj, st->PicklerMemoProxyType)) { PicklerObject *pickler = ((PicklerMemoProxyObject *)obj)->pickler; @@ -5049,47 +5066,27 @@ static PyGetSetDef Pickler_getsets[] = { {NULL} }; -static PyTypeObject Pickler_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_pickle.Pickler" , /*tp_name*/ - sizeof(PicklerObject), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - (destructor)Pickler_dealloc, /*tp_dealloc*/ - 0, /*tp_vectorcall_offset*/ - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - 0, /*tp_as_async*/ - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - _pickle_Pickler___init____doc__, /*tp_doc*/ - (traverseproc)Pickler_traverse, /*tp_traverse*/ - (inquiry)Pickler_clear, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - Pickler_methods, /*tp_methods*/ - Pickler_members, /*tp_members*/ - Pickler_getsets, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - _pickle_Pickler___init__, /*tp_init*/ - PyType_GenericAlloc, /*tp_alloc*/ - PyType_GenericNew, /*tp_new*/ - PyObject_GC_Del, /*tp_free*/ - 0, /*tp_is_gc*/ +static PyType_Slot pickler_type_slots[] = { + {Py_tp_dealloc, Pickler_dealloc}, + {Py_tp_methods, Pickler_methods}, + {Py_tp_members, Pickler_members}, + {Py_tp_getset, Pickler_getsets}, + {Py_tp_clear, Pickler_clear}, + {Py_tp_doc, (char*)_pickle_Pickler___init____doc__}, + {Py_tp_traverse, Pickler_traverse}, + {Py_tp_init, _pickle_Pickler___init__}, + {Py_tp_new, PyType_GenericNew}, + {Py_tp_alloc, PyType_GenericAlloc}, + {Py_tp_free, PyObject_GC_Del}, + {0, NULL}, +}; + +static PyType_Spec pickler_type_spec = { + .name = "_pickle.Pickler", + .basicsize = sizeof(PicklerObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = pickler_type_slots, }; /* Temporary helper for calling self.find_class(). @@ -5107,17 +5104,14 @@ find_class(UnpicklerObject *self, PyObject *module_name, PyObject *global_name) } static Py_ssize_t -marker(UnpicklerObject *self) +marker(PickleState *st, UnpicklerObject *self) { - Py_ssize_t mark; - if (self->num_marks < 1) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, "could not find MARK"); return -1; } - mark = self->marks[--self->num_marks]; + Py_ssize_t mark = self->marks[--self->num_marks]; self->stack->mark_set = self->num_marks != 0; self->stack->fence = self->num_marks ? self->marks[self->num_marks - 1] : 0; @@ -5125,24 +5119,24 @@ marker(UnpicklerObject *self) } static int -load_none(UnpicklerObject *self) +load_none(PickleState *state, UnpicklerObject *self) { PDATA_APPEND(self->stack, Py_None, -1); return 0; } static int -load_int(UnpicklerObject *self) +load_int(PickleState *state, UnpicklerObject *self) { PyObject *value; char *endptr, *s; Py_ssize_t len; long x; - if ((len = _Unpickler_Readline(self, &s)) < 0) + if ((len = _Unpickler_Readline(state, self, &s)) < 0) return -1; if (len < 2) - return bad_readline(); + return bad_readline(state); errno = 0; /* XXX: Should the base argument of strtol() be explicitly set to 10? @@ -5177,7 +5171,7 @@ load_int(UnpicklerObject *self) } static int -load_bool(UnpicklerObject *self, PyObject *boolean) +load_bool(PickleState *state, UnpicklerObject *self, PyObject *boolean) { assert(boolean == Py_True || boolean == Py_False); PDATA_APPEND(self->stack, boolean, -1); @@ -5257,49 +5251,46 @@ load_binintx(UnpicklerObject *self, char *s, int size) } static int -load_binint(UnpicklerObject *self) +load_binint(PickleState *state, UnpicklerObject *self) { char *s; - - if (_Unpickler_Read(self, &s, 4) < 0) + if (_Unpickler_Read(self, state, &s, 4) < 0) return -1; return load_binintx(self, s, 4); } static int -load_binint1(UnpicklerObject *self) +load_binint1(PickleState *state, UnpicklerObject *self) { char *s; - - if (_Unpickler_Read(self, &s, 1) < 0) + if (_Unpickler_Read(self, state, &s, 1) < 0) return -1; return load_binintx(self, s, 1); } static int -load_binint2(UnpicklerObject *self) +load_binint2(PickleState *state, UnpicklerObject *self) { char *s; - - if (_Unpickler_Read(self, &s, 2) < 0) + if (_Unpickler_Read(self, state, &s, 2) < 0) return -1; return load_binintx(self, s, 2); } static int -load_long(UnpicklerObject *self) +load_long(PickleState *state, UnpicklerObject *self) { PyObject *value; char *s = NULL; Py_ssize_t len; - if ((len = _Unpickler_Readline(self, &s)) < 0) + if ((len = _Unpickler_Readline(state, self, &s)) < 0) return -1; if (len < 2) - return bad_readline(); + return bad_readline(state); /* s[len-2] will usually be 'L' (and s[len-1] is '\n'); we need to remove the 'L' before calling PyLong_FromString. In order to maintain @@ -5320,19 +5311,18 @@ load_long(UnpicklerObject *self) * data following. */ static int -load_counted_long(UnpicklerObject *self, int size) +load_counted_long(PickleState *st, UnpicklerObject *self, int size) { PyObject *value; char *nbytes; char *pdata; assert(size == 1 || size == 4); - if (_Unpickler_Read(self, &nbytes, size) < 0) + if (_Unpickler_Read(self, st, &nbytes, size) < 0) return -1; size = calc_binint(nbytes, size); if (size < 0) { - PickleState *st = _Pickle_GetGlobalState(); /* Corrupt or hostile pickle -- we never write one like this */ PyErr_SetString(st->UnpicklingError, "LONG pickle has negative byte count"); @@ -5343,7 +5333,7 @@ load_counted_long(UnpicklerObject *self, int size) value = PyLong_FromLong(0L); else { /* Read the raw little-endian bytes and convert. */ - if (_Unpickler_Read(self, &pdata, size) < 0) + if (_Unpickler_Read(self, st, &pdata, size) < 0) return -1; value = _PyLong_FromByteArray((unsigned char *)pdata, (size_t)size, 1 /* little endian */ , 1 /* signed */ ); @@ -5355,17 +5345,17 @@ load_counted_long(UnpicklerObject *self, int size) } static int -load_float(UnpicklerObject *self) +load_float(PickleState *state, UnpicklerObject *self) { PyObject *value; char *endptr, *s; Py_ssize_t len; double d; - if ((len = _Unpickler_Readline(self, &s)) < 0) + if ((len = _Unpickler_Readline(state, self, &s)) < 0) return -1; if (len < 2) - return bad_readline(); + return bad_readline(state); errno = 0; d = PyOS_string_to_double(s, &endptr, PyExc_OverflowError); @@ -5384,13 +5374,13 @@ load_float(UnpicklerObject *self) } static int -load_binfloat(UnpicklerObject *self) +load_binfloat(PickleState *state, UnpicklerObject *self) { PyObject *value; double x; char *s; - if (_Unpickler_Read(self, &s, 8) < 0) + if (_Unpickler_Read(self, state, &s, 8) < 0) return -1; x = PyFloat_Unpack8(s, 0); @@ -5405,14 +5395,14 @@ load_binfloat(UnpicklerObject *self) } static int -load_string(UnpicklerObject *self) +load_string(PickleState *st, UnpicklerObject *self) { PyObject *bytes; PyObject *obj; Py_ssize_t len; char *s, *p; - if ((len = _Unpickler_Readline(self, &s)) < 0) + if ((len = _Unpickler_Readline(st, self, &s)) < 0) return -1; /* Strip the newline */ len--; @@ -5422,7 +5412,6 @@ load_string(UnpicklerObject *self) len -= 2; } else { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, "the STRING opcode argument must be quoted"); return -1; @@ -5453,25 +5442,24 @@ load_string(UnpicklerObject *self) } static int -load_counted_binstring(UnpicklerObject *self, int nbytes) +load_counted_binstring(PickleState *st, UnpicklerObject *self, int nbytes) { PyObject *obj; Py_ssize_t size; char *s; - if (_Unpickler_Read(self, &s, nbytes) < 0) + if (_Unpickler_Read(self, st, &s, nbytes) < 0) return -1; size = calc_binsize(s, nbytes); if (size < 0) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_Format(st->UnpicklingError, "BINSTRING exceeds system's maximum size of %zd bytes", PY_SSIZE_T_MAX); return -1; } - if (_Unpickler_Read(self, &s, size) < 0) + if (_Unpickler_Read(self, st, &s, size) < 0) return -1; /* Convert Python 2.x strings to bytes if the *encoding* given to the @@ -5491,13 +5479,13 @@ load_counted_binstring(UnpicklerObject *self, int nbytes) } static int -load_counted_binbytes(UnpicklerObject *self, int nbytes) +load_counted_binbytes(PickleState *state, UnpicklerObject *self, int nbytes) { PyObject *bytes; Py_ssize_t size; char *s; - if (_Unpickler_Read(self, &s, nbytes) < 0) + if (_Unpickler_Read(self, state, &s, nbytes) < 0) return -1; size = calc_binsize(s, nbytes); @@ -5511,7 +5499,7 @@ load_counted_binbytes(UnpicklerObject *self, int nbytes) bytes = PyBytes_FromStringAndSize(NULL, size); if (bytes == NULL) return -1; - if (_Unpickler_ReadInto(self, PyBytes_AS_STRING(bytes), size) < 0) { + if (_Unpickler_ReadInto(state, self, PyBytes_AS_STRING(bytes), size) < 0) { Py_DECREF(bytes); return -1; } @@ -5521,13 +5509,13 @@ load_counted_binbytes(UnpicklerObject *self, int nbytes) } static int -load_counted_bytearray(UnpicklerObject *self) +load_counted_bytearray(PickleState *state, UnpicklerObject *self) { PyObject *bytearray; Py_ssize_t size; char *s; - if (_Unpickler_Read(self, &s, 8) < 0) { + if (_Unpickler_Read(self, state, &s, 8) < 0) { return -1; } @@ -5543,7 +5531,8 @@ load_counted_bytearray(UnpicklerObject *self) if (bytearray == NULL) { return -1; } - if (_Unpickler_ReadInto(self, PyByteArray_AS_STRING(bytearray), size) < 0) { + char *str = PyByteArray_AS_STRING(bytearray); + if (_Unpickler_ReadInto(state, self, str, size) < 0) { Py_DECREF(bytearray); return -1; } @@ -5553,10 +5542,9 @@ load_counted_bytearray(UnpicklerObject *self) } static int -load_next_buffer(UnpicklerObject *self) +load_next_buffer(PickleState *st, UnpicklerObject *self) { if (self->buffers == NULL) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, "pickle stream refers to out-of-band data " "but no *buffers* argument was given"); @@ -5565,7 +5553,6 @@ load_next_buffer(UnpicklerObject *self) PyObject *buf = PyIter_Next(self->buffers); if (buf == NULL) { if (!PyErr_Occurred()) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, "not enough out-of-band buffers"); } @@ -5577,11 +5564,11 @@ load_next_buffer(UnpicklerObject *self) } static int -load_readonly_buffer(UnpicklerObject *self) +load_readonly_buffer(PickleState *state, UnpicklerObject *self) { Py_ssize_t len = Py_SIZE(self->stack); if (len <= self->stack->fence) { - return Pdata_stack_underflow(self->stack); + return Pdata_stack_underflow(state, self->stack); } PyObject *obj = self->stack->data[len - 1]; @@ -5603,16 +5590,16 @@ load_readonly_buffer(UnpicklerObject *self) } static int -load_unicode(UnpicklerObject *self) +load_unicode(PickleState *state, UnpicklerObject *self) { PyObject *str; Py_ssize_t len; char *s = NULL; - if ((len = _Unpickler_Readline(self, &s)) < 0) + if ((len = _Unpickler_Readline(state, self, &s)) < 0) return -1; if (len < 1) - return bad_readline(); + return bad_readline(state); str = PyUnicode_DecodeRawUnicodeEscape(s, len - 1, NULL); if (str == NULL) @@ -5623,13 +5610,13 @@ load_unicode(UnpicklerObject *self) } static int -load_counted_binunicode(UnpicklerObject *self, int nbytes) +load_counted_binunicode(PickleState *state, UnpicklerObject *self, int nbytes) { PyObject *str; Py_ssize_t size; char *s; - if (_Unpickler_Read(self, &s, nbytes) < 0) + if (_Unpickler_Read(self, state, &s, nbytes) < 0) return -1; size = calc_binsize(s, nbytes); @@ -5640,7 +5627,7 @@ load_counted_binunicode(UnpicklerObject *self, int nbytes) return -1; } - if (_Unpickler_Read(self, &s, size) < 0) + if (_Unpickler_Read(self, state, &s, size) < 0) return -1; str = PyUnicode_DecodeUTF8(s, size, "surrogatepass"); @@ -5652,14 +5639,14 @@ load_counted_binunicode(UnpicklerObject *self, int nbytes) } static int -load_counted_tuple(UnpicklerObject *self, Py_ssize_t len) +load_counted_tuple(PickleState *state, UnpicklerObject *self, Py_ssize_t len) { PyObject *tuple; if (Py_SIZE(self->stack) < len) - return Pdata_stack_underflow(self->stack); + return Pdata_stack_underflow(state, self->stack); - tuple = Pdata_poptuple(self->stack, Py_SIZE(self->stack) - len); + tuple = Pdata_poptuple(state, self->stack, Py_SIZE(self->stack) - len); if (tuple == NULL) return -1; PDATA_PUSH(self->stack, tuple, -1); @@ -5667,18 +5654,18 @@ load_counted_tuple(UnpicklerObject *self, Py_ssize_t len) } static int -load_tuple(UnpicklerObject *self) +load_tuple(PickleState *state, UnpicklerObject *self) { Py_ssize_t i; - if ((i = marker(self)) < 0) + if ((i = marker(state, self)) < 0) return -1; - return load_counted_tuple(self, Py_SIZE(self->stack) - i); + return load_counted_tuple(state, self, Py_SIZE(self->stack) - i); } static int -load_empty_list(UnpicklerObject *self) +load_empty_list(PickleState *state, UnpicklerObject *self) { PyObject *list; @@ -5689,7 +5676,7 @@ load_empty_list(UnpicklerObject *self) } static int -load_empty_dict(UnpicklerObject *self) +load_empty_dict(PickleState *state, UnpicklerObject *self) { PyObject *dict; @@ -5700,7 +5687,7 @@ load_empty_dict(UnpicklerObject *self) } static int -load_empty_set(UnpicklerObject *self) +load_empty_set(PickleState *state, UnpicklerObject *self) { PyObject *set; @@ -5711,12 +5698,12 @@ load_empty_set(UnpicklerObject *self) } static int -load_list(UnpicklerObject *self) +load_list(PickleState *state, UnpicklerObject *self) { PyObject *list; Py_ssize_t i; - if ((i = marker(self)) < 0) + if ((i = marker(state, self)) < 0) return -1; list = Pdata_poplist(self->stack, i); @@ -5727,12 +5714,12 @@ load_list(UnpicklerObject *self) } static int -load_dict(UnpicklerObject *self) +load_dict(PickleState *st, UnpicklerObject *self) { PyObject *dict, *key, *value; Py_ssize_t i, j, k; - if ((i = marker(self)) < 0) + if ((i = marker(st, self)) < 0) return -1; j = Py_SIZE(self->stack); @@ -5740,7 +5727,6 @@ load_dict(UnpicklerObject *self) return -1; if ((j - i) % 2 != 0) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, "odd number of items for DICT"); Py_DECREF(dict); return -1; @@ -5760,16 +5746,16 @@ load_dict(UnpicklerObject *self) } static int -load_frozenset(UnpicklerObject *self) +load_frozenset(PickleState *state, UnpicklerObject *self) { PyObject *items; PyObject *frozenset; Py_ssize_t i; - if ((i = marker(self)) < 0) + if ((i = marker(state, self)) < 0) return -1; - items = Pdata_poptuple(self->stack, i); + items = Pdata_poptuple(state, self->stack, i); if (items == NULL) return -1; @@ -5803,22 +5789,22 @@ instantiate(PyObject *cls, PyObject *args) } static int -load_obj(UnpicklerObject *self) +load_obj(PickleState *state, UnpicklerObject *self) { PyObject *cls, *args, *obj = NULL; Py_ssize_t i; - if ((i = marker(self)) < 0) + if ((i = marker(state, self)) < 0) return -1; if (Py_SIZE(self->stack) - i < 1) - return Pdata_stack_underflow(self->stack); + return Pdata_stack_underflow(state, self->stack); - args = Pdata_poptuple(self->stack, i + 1); + args = Pdata_poptuple(state, self->stack, i + 1); if (args == NULL) return -1; - PDATA_POP(self->stack, cls); + PDATA_POP(state, self->stack, cls); if (cls) { obj = instantiate(cls, args); Py_DECREF(cls); @@ -5832,7 +5818,7 @@ load_obj(UnpicklerObject *self) } static int -load_inst(UnpicklerObject *self) +load_inst(PickleState *state, UnpicklerObject *self) { PyObject *cls = NULL; PyObject *args = NULL; @@ -5843,12 +5829,12 @@ load_inst(UnpicklerObject *self) Py_ssize_t i; char *s; - if ((i = marker(self)) < 0) + if ((i = marker(state, self)) < 0) return -1; - if ((len = _Unpickler_Readline(self, &s)) < 0) + if ((len = _Unpickler_Readline(state, self, &s)) < 0) return -1; if (len < 2) - return bad_readline(); + return bad_readline(state); /* Here it is safe to use PyUnicode_DecodeASCII(), even though non-ASCII identifiers are permitted in Python 3.0, since the INST opcode is only @@ -5857,10 +5843,10 @@ load_inst(UnpicklerObject *self) if (module_name == NULL) return -1; - if ((len = _Unpickler_Readline(self, &s)) >= 0) { + if ((len = _Unpickler_Readline(state, self, &s)) >= 0) { if (len < 2) { Py_DECREF(module_name); - return bad_readline(); + return bad_readline(state); } class_name = PyUnicode_DecodeASCII(s, len - 1, "strict"); if (class_name != NULL) { @@ -5873,7 +5859,7 @@ load_inst(UnpicklerObject *self) if (cls == NULL) return -1; - if ((args = Pdata_poptuple(self->stack, i)) != NULL) { + if ((args = Pdata_poptuple(state, self->stack, i)) != NULL) { obj = instantiate(cls, args); Py_DECREF(args); } @@ -5887,16 +5873,16 @@ load_inst(UnpicklerObject *self) } static void -newobj_unpickling_error(const char * msg, int use_kwargs, PyObject *arg) +newobj_unpickling_error(PickleState *st, const char *msg, int use_kwargs, + PyObject *arg) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_Format(st->UnpicklingError, msg, use_kwargs ? "NEWOBJ_EX" : "NEWOBJ", Py_TYPE(arg)->tp_name); } static int -load_newobj(UnpicklerObject *self, int use_kwargs) +load_newobj(PickleState *state, UnpicklerObject *self, int use_kwargs) { PyObject *cls, *args, *kwargs = NULL; PyObject *obj; @@ -5905,17 +5891,17 @@ load_newobj(UnpicklerObject *self, int use_kwargs) * cls.__new__(cls, *args, **kwargs). */ if (use_kwargs) { - PDATA_POP(self->stack, kwargs); + PDATA_POP(state, self->stack, kwargs); if (kwargs == NULL) { return -1; } } - PDATA_POP(self->stack, args); + PDATA_POP(state, self->stack, args); if (args == NULL) { Py_XDECREF(kwargs); return -1; } - PDATA_POP(self->stack, cls); + PDATA_POP(state, self->stack, cls); if (cls == NULL) { Py_XDECREF(kwargs); Py_DECREF(args); @@ -5923,22 +5909,26 @@ load_newobj(UnpicklerObject *self, int use_kwargs) } if (!PyType_Check(cls)) { - newobj_unpickling_error("%s class argument must be a type, not %.200s", + newobj_unpickling_error(state, + "%s class argument must be a type, not %.200s", use_kwargs, cls); goto error; } if (((PyTypeObject *)cls)->tp_new == NULL) { - newobj_unpickling_error("%s class argument '%.200s' doesn't have __new__", + newobj_unpickling_error(state, + "%s class argument '%.200s' doesn't have __new__", use_kwargs, cls); goto error; } if (!PyTuple_Check(args)) { - newobj_unpickling_error("%s args argument must be a tuple, not %.200s", + newobj_unpickling_error(state, + "%s args argument must be a tuple, not %.200s", use_kwargs, args); goto error; } if (use_kwargs && !PyDict_Check(kwargs)) { - newobj_unpickling_error("%s kwargs argument must be a dict, not %.200s", + newobj_unpickling_error(state, + "%s kwargs argument must be a dict, not %.200s", use_kwargs, kwargs); goto error; } @@ -5961,7 +5951,7 @@ load_newobj(UnpicklerObject *self, int use_kwargs) } static int -load_global(UnpicklerObject *self) +load_global(PickleState *state, UnpicklerObject *self) { PyObject *global = NULL; PyObject *module_name; @@ -5969,18 +5959,18 @@ load_global(UnpicklerObject *self) Py_ssize_t len; char *s; - if ((len = _Unpickler_Readline(self, &s)) < 0) + if ((len = _Unpickler_Readline(state, self, &s)) < 0) return -1; if (len < 2) - return bad_readline(); + return bad_readline(state); module_name = PyUnicode_DecodeUTF8(s, len - 1, "strict"); if (!module_name) return -1; - if ((len = _Unpickler_Readline(self, &s)) >= 0) { + if ((len = _Unpickler_Readline(state, self, &s)) >= 0) { if (len < 2) { Py_DECREF(module_name); - return bad_readline(); + return bad_readline(state); } global_name = PyUnicode_DecodeUTF8(s, len - 1, "strict"); if (global_name) { @@ -5997,17 +5987,16 @@ load_global(UnpicklerObject *self) } static int -load_stack_global(UnpicklerObject *self) +load_stack_global(PickleState *st, UnpicklerObject *self) { PyObject *global; PyObject *module_name; PyObject *global_name; - PDATA_POP(self->stack, global_name); - PDATA_POP(self->stack, module_name); + PDATA_POP(st, self->stack, global_name); + PDATA_POP(st, self->stack, module_name); if (module_name == NULL || !PyUnicode_CheckExact(module_name) || global_name == NULL || !PyUnicode_CheckExact(global_name)) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, "STACK_GLOBAL requires str"); Py_XDECREF(global_name); Py_XDECREF(module_name); @@ -6023,22 +6012,22 @@ load_stack_global(UnpicklerObject *self) } static int -load_persid(UnpicklerObject *self) +load_persid(PickleState *st, UnpicklerObject *self) { PyObject *pid, *obj; Py_ssize_t len; char *s; if (self->pers_func) { - if ((len = _Unpickler_Readline(self, &s)) < 0) + if ((len = _Unpickler_Readline(st, self, &s)) < 0) return -1; if (len < 1) - return bad_readline(); + return bad_readline(st); pid = PyUnicode_DecodeASCII(s, len - 1, "strict"); if (pid == NULL) { if (PyErr_ExceptionMatches(PyExc_UnicodeDecodeError)) { - PyErr_SetString(_Pickle_GetGlobalState()->UnpicklingError, + PyErr_SetString(st->UnpicklingError, "persistent IDs in protocol 0 must be " "ASCII strings"); } @@ -6054,7 +6043,6 @@ load_persid(UnpicklerObject *self) return 0; } else { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, "A load persistent id instruction was encountered, " "but no persistent_load function was specified."); @@ -6063,12 +6051,12 @@ load_persid(UnpicklerObject *self) } static int -load_binpersid(UnpicklerObject *self) +load_binpersid(PickleState *st, UnpicklerObject *self) { PyObject *pid, *obj; if (self->pers_func) { - PDATA_POP(self->stack, pid); + PDATA_POP(st, self->stack, pid); if (pid == NULL) return -1; @@ -6081,7 +6069,6 @@ load_binpersid(UnpicklerObject *self) return 0; } else { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, "A load persistent id instruction was encountered, " "but no persistent_load function was specified."); @@ -6090,7 +6077,7 @@ load_binpersid(UnpicklerObject *self) } static int -load_pop(UnpicklerObject *self) +load_pop(PickleState *state, UnpicklerObject *self) { Py_ssize_t len = Py_SIZE(self->stack); @@ -6107,7 +6094,7 @@ load_pop(UnpicklerObject *self) self->stack->fence = self->num_marks ? self->marks[self->num_marks - 1] : 0; } else if (len <= self->stack->fence) - return Pdata_stack_underflow(self->stack); + return Pdata_stack_underflow(state, self->stack); else { len--; Py_DECREF(self->stack->data[len]); @@ -6117,11 +6104,10 @@ load_pop(UnpicklerObject *self) } static int -load_pop_mark(UnpicklerObject *self) +load_pop_mark(PickleState *state, UnpicklerObject *self) { Py_ssize_t i; - - if ((i = marker(self)) < 0) + if ((i = marker(state, self)) < 0) return -1; Pdata_clear(self->stack, i); @@ -6130,30 +6116,30 @@ load_pop_mark(UnpicklerObject *self) } static int -load_dup(UnpicklerObject *self) +load_dup(PickleState *state, UnpicklerObject *self) { PyObject *last; Py_ssize_t len = Py_SIZE(self->stack); if (len <= self->stack->fence) - return Pdata_stack_underflow(self->stack); + return Pdata_stack_underflow(state, self->stack); last = self->stack->data[len - 1]; PDATA_APPEND(self->stack, last, -1); return 0; } static int -load_get(UnpicklerObject *self) +load_get(PickleState *st, UnpicklerObject *self) { PyObject *key, *value; Py_ssize_t idx; Py_ssize_t len; char *s; - if ((len = _Unpickler_Readline(self, &s)) < 0) + if ((len = _Unpickler_Readline(st, self, &s)) < 0) return -1; if (len < 2) - return bad_readline(); + return bad_readline(st); key = PyLong_FromString(s, NULL, 10); if (key == NULL) @@ -6167,7 +6153,6 @@ load_get(UnpicklerObject *self) value = _Unpickler_MemoGet(self, idx); if (value == NULL) { if (!PyErr_Occurred()) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_Format(st->UnpicklingError, "Memo value not found at index %ld", idx); } Py_DECREF(key); @@ -6180,13 +6165,13 @@ load_get(UnpicklerObject *self) } static int -load_binget(UnpicklerObject *self) +load_binget(PickleState *st, UnpicklerObject *self) { PyObject *value; Py_ssize_t idx; char *s; - if (_Unpickler_Read(self, &s, 1) < 0) + if (_Unpickler_Read(self, st, &s, 1) < 0) return -1; idx = Py_CHARMASK(s[0]); @@ -6195,7 +6180,6 @@ load_binget(UnpicklerObject *self) if (value == NULL) { PyObject *key = PyLong_FromSsize_t(idx); if (key != NULL) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_Format(st->UnpicklingError, "Memo value not found at index %ld", idx); Py_DECREF(key); } @@ -6207,13 +6191,13 @@ load_binget(UnpicklerObject *self) } static int -load_long_binget(UnpicklerObject *self) +load_long_binget(PickleState *st, UnpicklerObject *self) { PyObject *value; Py_ssize_t idx; char *s; - if (_Unpickler_Read(self, &s, 4) < 0) + if (_Unpickler_Read(self, st, &s, 4) < 0) return -1; idx = calc_binsize(s, 4); @@ -6222,7 +6206,6 @@ load_long_binget(UnpicklerObject *self) if (value == NULL) { PyObject *key = PyLong_FromSsize_t(idx); if (key != NULL) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_Format(st->UnpicklingError, "Memo value not found at index %ld", idx); Py_DECREF(key); } @@ -6237,7 +6220,7 @@ load_long_binget(UnpicklerObject *self) * the number of bytes following the opcode, holding the index (code) value. */ static int -load_extension(UnpicklerObject *self, int nbytes) +load_extension(PickleState *st, UnpicklerObject *self, int nbytes) { char *codebytes; /* the nbytes bytes after the opcode */ long code; /* calc_binint returns long */ @@ -6245,10 +6228,9 @@ load_extension(UnpicklerObject *self, int nbytes) PyObject *obj; /* the object to push */ PyObject *pair; /* (module_name, class_name) */ PyObject *module_name, *class_name; - PickleState *st = _Pickle_GetGlobalState(); assert(nbytes == 1 || nbytes == 2 || nbytes == 4); - if (_Unpickler_Read(self, &codebytes, nbytes) < 0) + if (_Unpickler_Read(self, st, &codebytes, nbytes) < 0) return -1; code = calc_binint(codebytes, nbytes); if (code <= 0) { /* note that 0 is forbidden */ @@ -6324,19 +6306,19 @@ load_extension(UnpicklerObject *self, int nbytes) } static int -load_put(UnpicklerObject *self) +load_put(PickleState *state, UnpicklerObject *self) { PyObject *key, *value; Py_ssize_t idx; Py_ssize_t len; char *s = NULL; - if ((len = _Unpickler_Readline(self, &s)) < 0) + if ((len = _Unpickler_Readline(state, self, &s)) < 0) return -1; if (len < 2) - return bad_readline(); + return bad_readline(state); if (Py_SIZE(self->stack) <= self->stack->fence) - return Pdata_stack_underflow(self->stack); + return Pdata_stack_underflow(state, self->stack); value = self->stack->data[Py_SIZE(self->stack) - 1]; key = PyLong_FromString(s, NULL, 10); @@ -6355,17 +6337,17 @@ load_put(UnpicklerObject *self) } static int -load_binput(UnpicklerObject *self) +load_binput(PickleState *state, UnpicklerObject *self) { PyObject *value; Py_ssize_t idx; char *s; - if (_Unpickler_Read(self, &s, 1) < 0) + if (_Unpickler_Read(self, state, &s, 1) < 0) return -1; if (Py_SIZE(self->stack) <= self->stack->fence) - return Pdata_stack_underflow(self->stack); + return Pdata_stack_underflow(state, self->stack); value = self->stack->data[Py_SIZE(self->stack) - 1]; idx = Py_CHARMASK(s[0]); @@ -6374,17 +6356,17 @@ load_binput(UnpicklerObject *self) } static int -load_long_binput(UnpicklerObject *self) +load_long_binput(PickleState *state, UnpicklerObject *self) { PyObject *value; Py_ssize_t idx; char *s; - if (_Unpickler_Read(self, &s, 4) < 0) + if (_Unpickler_Read(self, state, &s, 4) < 0) return -1; if (Py_SIZE(self->stack) <= self->stack->fence) - return Pdata_stack_underflow(self->stack); + return Pdata_stack_underflow(state, self->stack); value = self->stack->data[Py_SIZE(self->stack) - 1]; idx = calc_binsize(s, 4); @@ -6398,19 +6380,19 @@ load_long_binput(UnpicklerObject *self) } static int -load_memoize(UnpicklerObject *self) +load_memoize(PickleState *state, UnpicklerObject *self) { PyObject *value; if (Py_SIZE(self->stack) <= self->stack->fence) - return Pdata_stack_underflow(self->stack); + return Pdata_stack_underflow(state, self->stack); value = self->stack->data[Py_SIZE(self->stack) - 1]; return _Unpickler_MemoPut(self, self->memo_len, value); } static int -do_append(UnpicklerObject *self, Py_ssize_t x) +do_append(PickleState *state, UnpicklerObject *self, Py_ssize_t x) { PyObject *value; PyObject *slice; @@ -6420,7 +6402,7 @@ do_append(UnpicklerObject *self, Py_ssize_t x) len = Py_SIZE(self->stack); if (x > len || x <= self->stack->fence) - return Pdata_stack_underflow(self->stack); + return Pdata_stack_underflow(state, self->stack); if (len == x) /* nothing to do */ return 0; @@ -6485,24 +6467,24 @@ do_append(UnpicklerObject *self, Py_ssize_t x) } static int -load_append(UnpicklerObject *self) +load_append(PickleState *state, UnpicklerObject *self) { if (Py_SIZE(self->stack) - 1 <= self->stack->fence) - return Pdata_stack_underflow(self->stack); - return do_append(self, Py_SIZE(self->stack) - 1); + return Pdata_stack_underflow(state, self->stack); + return do_append(state, self, Py_SIZE(self->stack) - 1); } static int -load_appends(UnpicklerObject *self) +load_appends(PickleState *state, UnpicklerObject *self) { - Py_ssize_t i = marker(self); + Py_ssize_t i = marker(state, self); if (i < 0) return -1; - return do_append(self, i); + return do_append(state, self, i); } static int -do_setitems(UnpicklerObject *self, Py_ssize_t x) +do_setitems(PickleState *st, UnpicklerObject *self, Py_ssize_t x) { PyObject *value, *key; PyObject *dict; @@ -6511,11 +6493,10 @@ do_setitems(UnpicklerObject *self, Py_ssize_t x) len = Py_SIZE(self->stack); if (x > len || x <= self->stack->fence) - return Pdata_stack_underflow(self->stack); + return Pdata_stack_underflow(st, self->stack); if (len == x) /* nothing to do */ return 0; if ((len - x) % 2 != 0) { - PickleState *st = _Pickle_GetGlobalState(); /* Corrupt or hostile pickle -- we never write one like this. */ PyErr_SetString(st->UnpicklingError, "odd number of items for SETITEMS"); @@ -6540,32 +6521,32 @@ do_setitems(UnpicklerObject *self, Py_ssize_t x) } static int -load_setitem(UnpicklerObject *self) +load_setitem(PickleState *state, UnpicklerObject *self) { - return do_setitems(self, Py_SIZE(self->stack) - 2); + return do_setitems(state, self, Py_SIZE(self->stack) - 2); } static int -load_setitems(UnpicklerObject *self) +load_setitems(PickleState *state, UnpicklerObject *self) { - Py_ssize_t i = marker(self); + Py_ssize_t i = marker(state, self); if (i < 0) return -1; - return do_setitems(self, i); + return do_setitems(state, self, i); } static int -load_additems(UnpicklerObject *self) +load_additems(PickleState *state, UnpicklerObject *self) { PyObject *set; Py_ssize_t mark, len, i; - mark = marker(self); + mark = marker(state, self); if (mark < 0) return -1; len = Py_SIZE(self->stack); if (mark > len || mark <= self->stack->fence) - return Pdata_stack_underflow(self->stack); + return Pdata_stack_underflow(state, self->stack); if (len == mark) /* nothing to do */ return 0; @@ -6575,7 +6556,7 @@ load_additems(UnpicklerObject *self) PyObject *items; int status; - items = Pdata_poptuple(self->stack, mark); + items = Pdata_poptuple(state, self->stack, mark); if (items == NULL) return -1; @@ -6609,9 +6590,9 @@ load_additems(UnpicklerObject *self) } static int -load_build(UnpicklerObject *self) +load_build(PickleState *st, UnpicklerObject *self) { - PyObject *state, *inst, *slotstate; + PyObject *inst, *slotstate; PyObject *setstate; int status = 0; @@ -6619,9 +6600,10 @@ load_build(UnpicklerObject *self) * the stack top, possibly mutated via instance.__setstate__(state). */ if (Py_SIZE(self->stack) - 2 < self->stack->fence) - return Pdata_stack_underflow(self->stack); + return Pdata_stack_underflow(st, self->stack); - PDATA_POP(self->stack, state); + PyObject *state; + PDATA_POP(st, self->stack, state); if (state == NULL) return -1; @@ -6665,7 +6647,6 @@ load_build(UnpicklerObject *self) Py_ssize_t i; if (!PyDict_Check(state)) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, "state is not a dictionary"); goto error; } @@ -6695,7 +6676,6 @@ load_build(UnpicklerObject *self) Py_ssize_t i; if (!PyDict_Check(slotstate)) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_SetString(st->UnpicklingError, "slot state is not a dictionary"); goto error; @@ -6718,7 +6698,7 @@ load_build(UnpicklerObject *self) } static int -load_mark(UnpicklerObject *self) +load_mark(PickleState *state, UnpicklerObject *self) { /* Note that we split the (pickle.py) stack into two stacks, an @@ -6745,16 +6725,16 @@ load_mark(UnpicklerObject *self) } static int -load_reduce(UnpicklerObject *self) +load_reduce(PickleState *state, UnpicklerObject *self) { PyObject *callable = NULL; PyObject *argtup = NULL; PyObject *obj = NULL; - PDATA_POP(self->stack, argtup); + PDATA_POP(state, self->stack, argtup); if (argtup == NULL) return -1; - PDATA_POP(self->stack, callable); + PDATA_POP(state, self->stack, callable); if (callable) { obj = PyObject_CallObject(callable, argtup); Py_DECREF(callable); @@ -6772,12 +6752,12 @@ load_reduce(UnpicklerObject *self) * is the first opcode for protocols >= 2. */ static int -load_proto(UnpicklerObject *self) +load_proto(PickleState *state, UnpicklerObject *self) { char *s; int i; - if (_Unpickler_Read(self, &s, 1) < 0) + if (_Unpickler_Read(self, state, &s, 1) < 0) return -1; i = (unsigned char)s[0]; @@ -6791,12 +6771,12 @@ load_proto(UnpicklerObject *self) } static int -load_frame(UnpicklerObject *self) +load_frame(PickleState *state, UnpicklerObject *self) { char *s; Py_ssize_t frame_len; - if (_Unpickler_Read(self, &s, 8) < 0) + if (_Unpickler_Read(self, state, &s, 8) < 0) return -1; frame_len = calc_binsize(s, 8); @@ -6807,7 +6787,7 @@ load_frame(UnpicklerObject *self) return -1; } - if (_Unpickler_Read(self, &s, frame_len) < 0) + if (_Unpickler_Read(self, state, &s, frame_len) < 0) return -1; /* Rewind to start of frame */ @@ -6816,7 +6796,7 @@ load_frame(UnpicklerObject *self) } static PyObject * -load(UnpicklerObject *self) +load(PickleState *st, UnpicklerObject *self) { PyObject *value = NULL; char *s = NULL; @@ -6830,14 +6810,13 @@ load(UnpicklerObject *self) /* Convenient macros for the dispatch while-switch loop just below. */ #define OP(opcode, load_func) \ - case opcode: if (load_func(self) < 0) break; continue; + case opcode: if (load_func(st, self) < 0) break; continue; #define OP_ARG(opcode, load_func, arg) \ - case opcode: if (load_func(self, (arg)) < 0) break; continue; + case opcode: if (load_func(st, self, (arg)) < 0) break; continue; while (1) { - if (_Unpickler_Read(self, &s, 1) < 0) { - PickleState *st = _Pickle_GetGlobalState(); + if (_Unpickler_Read(self, st, &s, 1) < 0) { if (PyErr_ExceptionMatches(st->UnpicklingError)) { PyErr_Format(PyExc_EOFError, "Ran out of input"); } @@ -6918,7 +6897,6 @@ load(UnpicklerObject *self) default: { - PickleState *st = _Pickle_GetGlobalState(); unsigned char c = (unsigned char) *s; if (0x20 <= c && c <= 0x7e && c != '\'' && c != '\\') { PyErr_Format(st->UnpicklingError, @@ -6942,7 +6920,7 @@ load(UnpicklerObject *self) if (_Unpickler_SkipConsumed(self) < 0) return NULL; - PDATA_POP(self->stack, value); + PDATA_POP(st, self->stack, value); return value; } @@ -6950,6 +6928,8 @@ load(UnpicklerObject *self) _pickle.Unpickler.load + cls: defining_class + Load a pickle. Read a pickled object representation from the open file object given @@ -6958,24 +6938,25 @@ specified therein. [clinic start generated code]*/ static PyObject * -_pickle_Unpickler_load_impl(UnpicklerObject *self) -/*[clinic end generated code: output=fdcc488aad675b14 input=acbb91a42fa9b7b9]*/ +_pickle_Unpickler_load_impl(UnpicklerObject *self, PyTypeObject *cls) +/*[clinic end generated code: output=cc88168f608e3007 input=f5d2f87e61d5f07f]*/ { UnpicklerObject *unpickler = (UnpicklerObject*)self; + PickleState *st = _Pickle_GetStateByClass(cls); + /* Check whether the Unpickler was initialized correctly. This prevents segfaulting if a subclass overridden __init__ with a function that does not call Unpickler.__init__(). Here, we simply ensure that self->read is not NULL. */ if (unpickler->read == NULL) { - PickleState *st = _Pickle_GetGlobalState(); PyErr_Format(st->UnpicklingError, "Unpickler.__init__() was not called by %s.__init__()", Py_TYPE(unpickler)->tp_name); return NULL; } - return load(unpickler); + return load(st, unpickler); } /* The name of find_class() is misleading. In newer pickle protocols, this @@ -6986,6 +6967,7 @@ _pickle_Unpickler_load_impl(UnpicklerObject *self) _pickle.Unpickler.find_class + cls: defining_class module_name: object global_name: object / @@ -7001,10 +6983,10 @@ needed. Both arguments passed are str objects. [clinic start generated code]*/ static PyObject * -_pickle_Unpickler_find_class_impl(UnpicklerObject *self, +_pickle_Unpickler_find_class_impl(UnpicklerObject *self, PyTypeObject *cls, PyObject *module_name, PyObject *global_name) -/*[clinic end generated code: output=becc08d7f9ed41e3 input=e2e6a865de093ef4]*/ +/*[clinic end generated code: output=99577948abb0be81 input=9577745719219fc7]*/ { PyObject *global; PyObject *module; @@ -7020,7 +7002,7 @@ _pickle_Unpickler_find_class_impl(UnpicklerObject *self, if (self->proto < 3 && self->fix_imports) { PyObject *key; PyObject *item; - PickleState *st = _Pickle_GetGlobalState(); + PickleState *st = _Pickle_GetStateByClass(cls); /* Check if the global (i.e., a function or a class) was renamed or moved to another module. */ @@ -7114,44 +7096,6 @@ static struct PyMethodDef Unpickler_methods[] = { {NULL, NULL} /* sentinel */ }; -static void -Unpickler_dealloc(UnpicklerObject *self) -{ - PyObject_GC_UnTrack((PyObject *)self); - Py_XDECREF(self->readline); - Py_XDECREF(self->readinto); - Py_XDECREF(self->read); - Py_XDECREF(self->peek); - Py_XDECREF(self->stack); - Py_XDECREF(self->pers_func); - Py_XDECREF(self->buffers); - if (self->buffer.buf != NULL) { - PyBuffer_Release(&self->buffer); - self->buffer.buf = NULL; - } - - _Unpickler_MemoCleanup(self); - PyMem_Free(self->marks); - PyMem_Free(self->input_line); - PyMem_Free(self->encoding); - PyMem_Free(self->errors); - - Py_TYPE(self)->tp_free((PyObject *)self); -} - -static int -Unpickler_traverse(UnpicklerObject *self, visitproc visit, void *arg) -{ - Py_VISIT(self->readline); - Py_VISIT(self->readinto); - Py_VISIT(self->read); - Py_VISIT(self->peek); - Py_VISIT(self->stack); - Py_VISIT(self->pers_func); - Py_VISIT(self->buffers); - return 0; -} - static int Unpickler_clear(UnpicklerObject *self) { @@ -7180,6 +7124,30 @@ Unpickler_clear(UnpicklerObject *self) return 0; } +static void +Unpickler_dealloc(UnpicklerObject *self) +{ + PyTypeObject *tp = Py_TYPE(self); + PyObject_GC_UnTrack((PyObject *)self); + (void)Unpickler_clear(self); + tp->tp_free((PyObject *)self); + Py_DECREF(tp); +} + +static int +Unpickler_traverse(UnpicklerObject *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + Py_VISIT(self->readline); + Py_VISIT(self->readinto); + Py_VISIT(self->read); + Py_VISIT(self->peek); + Py_VISIT(self->stack); + Py_VISIT(self->pers_func); + Py_VISIT(self->buffers); + return 0; +} + /*[clinic input] _pickle.Unpickler.__init__ @@ -7240,7 +7208,9 @@ _pickle_Unpickler___init___impl(UnpicklerObject *self, PyObject *file, return -1; } - self->stack = (Pdata *)Pdata_New(); + PyTypeObject *tp = Py_TYPE(self); + PickleState *state = _Pickle_FindStateByType(tp); + self->stack = (Pdata *)Pdata_New(state); if (self->stack == NULL) return -1; @@ -7366,15 +7336,18 @@ static PyMethodDef unpicklerproxy_methods[] = { static void UnpicklerMemoProxy_dealloc(UnpicklerMemoProxyObject *self) { + PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); - Py_XDECREF(self->unpickler); - PyObject_GC_Del((PyObject *)self); + Py_CLEAR(self->unpickler); + tp->tp_free((PyObject *)self); + Py_DECREF(tp); } static int UnpicklerMemoProxy_traverse(UnpicklerMemoProxyObject *self, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(self)); Py_VISIT(self->unpickler); return 0; } @@ -7386,44 +7359,30 @@ UnpicklerMemoProxy_clear(UnpicklerMemoProxyObject *self) return 0; } -static PyTypeObject UnpicklerMemoProxyType = { - PyVarObject_HEAD_INIT(NULL, 0) - "_pickle.UnpicklerMemoProxy", /*tp_name*/ - sizeof(UnpicklerMemoProxyObject), /*tp_basicsize*/ - 0, - (destructor)UnpicklerMemoProxy_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - PyObject_HashNotImplemented, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - PyObject_GenericSetAttr, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - 0, /* tp_doc */ - (traverseproc)UnpicklerMemoProxy_traverse, /* tp_traverse */ - (inquiry)UnpicklerMemoProxy_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - unpicklerproxy_methods, /* tp_methods */ +static PyType_Slot unpickler_memoproxy_slots[] = { + {Py_tp_dealloc, UnpicklerMemoProxy_dealloc}, + {Py_tp_traverse, UnpicklerMemoProxy_traverse}, + {Py_tp_clear, UnpicklerMemoProxy_clear}, + {Py_tp_methods, unpicklerproxy_methods}, + {Py_tp_hash, PyObject_HashNotImplemented}, + {0, NULL}, +}; + +static PyType_Spec unpickler_memoproxy_spec = { + .name = "_pickle.UnpicklerMemoProxy", + .basicsize = sizeof(UnpicklerMemoProxyObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = unpickler_memoproxy_slots, }; static PyObject * UnpicklerMemoProxy_New(UnpicklerObject *unpickler) { + PickleState *state = _Pickle_FindStateByType(Py_TYPE(unpickler)); UnpicklerMemoProxyObject *self; - self = PyObject_GC_New(UnpicklerMemoProxyObject, - &UnpicklerMemoProxyType); + state->UnpicklerMemoProxyType); if (self == NULL) return NULL; self->unpickler = (UnpicklerObject*)Py_NewRef(unpickler); @@ -7452,7 +7411,8 @@ Unpickler_set_memo(UnpicklerObject *self, PyObject *obj, void *Py_UNUSED(ignored return -1; } - if (Py_IS_TYPE(obj, &UnpicklerMemoProxyType)) { + PickleState *state = _Pickle_FindStateByType(Py_TYPE(self)); + if (Py_IS_TYPE(obj, state->UnpicklerMemoProxyType)) { UnpicklerObject *unpickler = ((UnpicklerMemoProxyObject *)obj)->unpickler; @@ -7554,47 +7514,26 @@ static PyGetSetDef Unpickler_getsets[] = { {NULL} }; -static PyTypeObject Unpickler_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_pickle.Unpickler", /*tp_name*/ - sizeof(UnpicklerObject), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - (destructor)Unpickler_dealloc, /*tp_dealloc*/ - 0, /*tp_vectorcall_offset*/ - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - 0, /*tp_as_async*/ - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - _pickle_Unpickler___init____doc__, /*tp_doc*/ - (traverseproc)Unpickler_traverse, /*tp_traverse*/ - (inquiry)Unpickler_clear, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - Unpickler_methods, /*tp_methods*/ - 0, /*tp_members*/ - Unpickler_getsets, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - _pickle_Unpickler___init__, /*tp_init*/ - PyType_GenericAlloc, /*tp_alloc*/ - PyType_GenericNew, /*tp_new*/ - PyObject_GC_Del, /*tp_free*/ - 0, /*tp_is_gc*/ +static PyType_Slot unpickler_type_slots[] = { + {Py_tp_dealloc, Unpickler_dealloc}, + {Py_tp_doc, (char *)_pickle_Unpickler___init____doc__}, + {Py_tp_traverse, Unpickler_traverse}, + {Py_tp_clear, Unpickler_clear}, + {Py_tp_methods, Unpickler_methods}, + {Py_tp_getset, Unpickler_getsets}, + {Py_tp_init, _pickle_Unpickler___init__}, + {Py_tp_alloc, PyType_GenericAlloc}, + {Py_tp_new, PyType_GenericNew}, + {Py_tp_free, PyObject_GC_Del}, + {0, NULL}, +}; + +static PyType_Spec unpickler_type_spec = { + .name = "_pickle.Unpickler", + .basicsize = sizeof(UnpicklerObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = unpickler_type_slots, }; /*[clinic input] @@ -7643,7 +7582,8 @@ _pickle_dump_impl(PyObject *module, PyObject *obj, PyObject *file, PyObject *buffer_callback) /*[clinic end generated code: output=706186dba996490c input=5ed6653da99cd97c]*/ { - PicklerObject *pickler = _Pickler_New(); + PickleState *state = _Pickle_GetState(module); + PicklerObject *pickler = _Pickler_New(state); if (pickler == NULL) return NULL; @@ -7657,7 +7597,7 @@ _pickle_dump_impl(PyObject *module, PyObject *obj, PyObject *file, if (_Pickler_SetBufferCallback(pickler, buffer_callback) < 0) goto error; - if (dump(pickler, obj) < 0) + if (dump(state, pickler, obj) < 0) goto error; if (_Pickler_FlushToFile(pickler) < 0) @@ -7708,7 +7648,8 @@ _pickle_dumps_impl(PyObject *module, PyObject *obj, PyObject *protocol, /*[clinic end generated code: output=fbab0093a5580fdf input=e543272436c6f987]*/ { PyObject *result; - PicklerObject *pickler = _Pickler_New(); + PickleState *state = _Pickle_GetState(module); + PicklerObject *pickler = _Pickler_New(state); if (pickler == NULL) return NULL; @@ -7719,7 +7660,7 @@ _pickle_dumps_impl(PyObject *module, PyObject *obj, PyObject *protocol, if (_Pickler_SetBufferCallback(pickler, buffer_callback) < 0) goto error; - if (dump(pickler, obj) < 0) + if (dump(state, pickler, obj) < 0) goto error; result = _Pickler_GetString(pickler); @@ -7774,7 +7715,7 @@ _pickle_load_impl(PyObject *module, PyObject *file, int fix_imports, /*[clinic end generated code: output=250452d141c23e76 input=46c7c31c92f4f371]*/ { PyObject *result; - UnpicklerObject *unpickler = _Unpickler_New(); + UnpicklerObject *unpickler = _Unpickler_New(module); if (unpickler == NULL) return NULL; @@ -7790,7 +7731,8 @@ _pickle_load_impl(PyObject *module, PyObject *file, int fix_imports, unpickler->fix_imports = fix_imports; - result = load(unpickler); + PickleState *state = _Pickle_GetState(module); + result = load(state, unpickler); Py_DECREF(unpickler); return result; @@ -7834,7 +7776,7 @@ _pickle_loads_impl(PyObject *module, PyObject *data, int fix_imports, /*[clinic end generated code: output=82ac1e6b588e6d02 input=b3615540d0535087]*/ { PyObject *result; - UnpicklerObject *unpickler = _Unpickler_New(); + UnpicklerObject *unpickler = _Unpickler_New(module); if (unpickler == NULL) return NULL; @@ -7850,7 +7792,8 @@ _pickle_loads_impl(PyObject *module, PyObject *data, int fix_imports, unpickler->fix_imports = fix_imports; - result = load(unpickler); + PickleState *state = _Pickle_GetState(module); + result = load(state, unpickler); Py_DECREF(unpickler); return result; @@ -7898,81 +7841,94 @@ pickle_traverse(PyObject *m, visitproc visit, void *arg) Py_VISIT(st->codecs_encode); Py_VISIT(st->getattr); Py_VISIT(st->partial); + Py_VISIT(st->Pickler_Type); + Py_VISIT(st->Unpickler_Type); + Py_VISIT(st->Pdata_Type); + Py_VISIT(st->PicklerMemoProxyType); + Py_VISIT(st->UnpicklerMemoProxyType); return 0; } -static struct PyModuleDef _picklemodule = { - PyModuleDef_HEAD_INIT, - "_pickle", /* m_name */ - pickle_module_doc, /* m_doc */ - sizeof(PickleState), /* m_size */ - pickle_methods, /* m_methods */ - NULL, /* m_reload */ - pickle_traverse, /* m_traverse */ - pickle_clear, /* m_clear */ - (freefunc)pickle_free /* m_free */ -}; - -PyMODINIT_FUNC -PyInit__pickle(void) +static int +_pickle_exec(PyObject *m) { - PyObject *m; - PickleState *st; + PickleState *st = _Pickle_GetState(m); - m = PyState_FindModule(&_picklemodule); - if (m) { - return Py_NewRef(m); - } +#define CREATE_TYPE(mod, type, spec) \ + do { \ + type = (PyTypeObject *)PyType_FromMetaclass(NULL, mod, spec, NULL); \ + if (type == NULL) { \ + return -1; \ + } \ + } while (0) - if (PyType_Ready(&Pdata_Type) < 0) - return NULL; - if (PyType_Ready(&PicklerMemoProxyType) < 0) - return NULL; - if (PyType_Ready(&UnpicklerMemoProxyType) < 0) - return NULL; + CREATE_TYPE(m, st->Pdata_Type, &pdata_spec); + CREATE_TYPE(m, st->PicklerMemoProxyType, &memoproxy_spec); + CREATE_TYPE(m, st->UnpicklerMemoProxyType, &unpickler_memoproxy_spec); + CREATE_TYPE(m, st->Pickler_Type, &pickler_type_spec); + CREATE_TYPE(m, st->Unpickler_Type, &unpickler_type_spec); - /* Create the module and add the functions. */ - m = PyModule_Create(&_picklemodule); - if (m == NULL) - return NULL; +#undef CREATE_TYPE /* Add types */ - if (PyModule_AddType(m, &Pickler_Type) < 0) { - return NULL; + if (PyModule_AddType(m, &PyPickleBuffer_Type) < 0) { + return -1; } - if (PyModule_AddType(m, &Unpickler_Type) < 0) { - return NULL; + if (PyModule_AddType(m, st->Pickler_Type) < 0) { + return -1; } - if (PyModule_AddType(m, &PyPickleBuffer_Type) < 0) { - return NULL; + if (PyModule_AddType(m, st->Unpickler_Type) < 0) { + return -1; } - st = _Pickle_GetState(m); - /* Initialize the exceptions. */ st->PickleError = PyErr_NewException("_pickle.PickleError", NULL, NULL); if (st->PickleError == NULL) - return NULL; + return -1; st->PicklingError = \ PyErr_NewException("_pickle.PicklingError", st->PickleError, NULL); if (st->PicklingError == NULL) - return NULL; + return -1; st->UnpicklingError = \ PyErr_NewException("_pickle.UnpicklingError", st->PickleError, NULL); if (st->UnpicklingError == NULL) - return NULL; + return -1; if (PyModule_AddObjectRef(m, "PickleError", st->PickleError) < 0) { - return NULL; + return -1; } if (PyModule_AddObjectRef(m, "PicklingError", st->PicklingError) < 0) { - return NULL; + return -1; } if (PyModule_AddObjectRef(m, "UnpicklingError", st->UnpicklingError) < 0) { - return NULL; + return -1; } + if (_Pickle_InitState(st) < 0) - return NULL; + return -1; - return m; + return 0; +} + +static PyModuleDef_Slot pickle_slots[] = { + {Py_mod_exec, _pickle_exec}, + {0, NULL}, +}; + +static struct PyModuleDef _picklemodule = { + PyModuleDef_HEAD_INIT, + .m_name = "_pickle", + .m_doc = pickle_module_doc, + .m_size = sizeof(PickleState), + .m_methods = pickle_methods, + .m_slots = pickle_slots, + .m_traverse = pickle_traverse, + .m_clear = pickle_clear, + .m_free = (freefunc)pickle_free, +}; + +PyMODINIT_FUNC +PyInit__pickle(void) +{ + return PyModuleDef_Init(&_picklemodule); } diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c index f3ff39215eab76..f5bce8cd7628ad 100644 --- a/Modules/_posixsubprocess.c +++ b/Modules/_posixsubprocess.c @@ -75,6 +75,28 @@ static struct PyModuleDef _posixsubprocessmodule; +/*[clinic input] +module _posixsubprocess +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=c62211df27cf7334]*/ + +/*[python input] +class pid_t_converter(CConverter): + type = 'pid_t' + format_unit = '" _Py_PARSE_PID "' + + def parse_arg(self, argname, displayname): + return """ + {paramname} = PyLong_AsPid({argname}); + if ({paramname} == -1 && PyErr_Occurred()) {{{{ + goto exit; + }}}} + """.format(argname=argname, paramname=self.parser_name) +[python start generated code]*/ +/*[python end generated code: output=da39a3ee5e6b4b0d input=5af1c116d56cbb5a]*/ + +#include "clinic/_posixsubprocess.c.h" + /* Convert ASCII to a positive int, no libc call. no overflow. -1 on error. */ static int _pos_int_from_ascii(const char *name) @@ -744,7 +766,7 @@ do_fork_exec(char *const exec_array[], assert(preexec_fn == Py_None); pid = vfork(); - if (pid == -1) { + if (pid == (pid_t)-1) { /* If vfork() fails, fall back to using fork(). When it isn't * allowed in a process by the kernel, vfork can return -1 * with errno EINVAL. https://bugs.python.org/issue47151. */ @@ -784,44 +806,81 @@ do_fork_exec(char *const exec_array[], return 0; /* Dead code to avoid a potential compiler warning. */ } +/*[clinic input] +_posixsubprocess.fork_exec as subprocess_fork_exec + args as process_args: object + executable_list: object + close_fds: bool + pass_fds as py_fds_to_keep: object(subclass_of='&PyTuple_Type') + cwd as cwd_obj: object + env as env_list: object + p2cread: int + p2cwrite: int + c2pread: int + c2pwrite: int + errread: int + errwrite: int + errpipe_read: int + errpipe_write: int + restore_signals: bool + call_setsid: bool + pgid_to_set: pid_t + gid as gid_object: object + extra_groups as extra_groups_packed: object + uid as uid_object: object + child_umask: int + preexec_fn: object + allow_vfork: bool + / + +Spawn a fresh new child process. + +Fork a child process, close parent file descriptors as appropriate in the +child and duplicate the few that are needed before calling exec() in the +child process. + +If close_fds is True, close file descriptors 3 and higher, except those listed +in the sorted tuple pass_fds. + +The preexec_fn, if supplied, will be called immediately before closing file +descriptors and exec. + +WARNING: preexec_fn is NOT SAFE if your application uses threads. + It may trigger infrequent, difficult to debug deadlocks. + +If an error occurs in the child process before the exec, it is +serialized and written to the errpipe_write fd per subprocess.py. + +Returns: the child process's PID. + +Raises: Only on an error in the parent process. +[clinic start generated code]*/ static PyObject * -subprocess_fork_exec(PyObject *module, PyObject *args) +subprocess_fork_exec_impl(PyObject *module, PyObject *process_args, + PyObject *executable_list, int close_fds, + PyObject *py_fds_to_keep, PyObject *cwd_obj, + PyObject *env_list, int p2cread, int p2cwrite, + int c2pread, int c2pwrite, int errread, + int errwrite, int errpipe_read, int errpipe_write, + int restore_signals, int call_setsid, + pid_t pgid_to_set, PyObject *gid_object, + PyObject *extra_groups_packed, + PyObject *uid_object, int child_umask, + PyObject *preexec_fn, int allow_vfork) +/*[clinic end generated code: output=7ee4f6ee5cf22b5b input=51757287ef266ffa]*/ { - PyObject *gc_module = NULL; - PyObject *executable_list, *py_fds_to_keep; - PyObject *env_list, *preexec_fn; - PyObject *process_args, *converted_args = NULL, *fast_args = NULL; + PyObject *converted_args = NULL, *fast_args = NULL; PyObject *preexec_fn_args_tuple = NULL; - PyObject *extra_groups_packed; - PyObject *uid_object, *gid_object; - int p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite; - int errpipe_read, errpipe_write, close_fds, restore_signals; - int call_setsid; - pid_t pgid_to_set = -1; gid_t *extra_groups = NULL; - int child_umask; - PyObject *cwd_obj, *cwd_obj2 = NULL; - const char *cwd; + PyObject *cwd_obj2 = NULL; + const char *cwd = NULL; pid_t pid = -1; int need_to_reenable_gc = 0; - char *const *exec_array, *const *argv = NULL, *const *envp = NULL; - Py_ssize_t arg_num, extra_group_size = 0; + char *const *argv = NULL, *const *envp = NULL; + Py_ssize_t extra_group_size = 0; int need_after_fork = 0; int saved_errno = 0; - int allow_vfork; - - if (!PyArg_ParseTuple( - args, "OOpO!OOiiiiiiiipp" _Py_PARSE_PID "OOOiOp:fork_exec", - &process_args, &executable_list, - &close_fds, &PyTuple_Type, &py_fds_to_keep, - &cwd_obj, &env_list, - &p2cread, &p2cwrite, &c2pread, &c2pwrite, - &errread, &errwrite, &errpipe_read, &errpipe_write, - &restore_signals, &call_setsid, &pgid_to_set, - &gid_object, &extra_groups_packed, &uid_object, &child_umask, - &preexec_fn, &allow_vfork)) - return NULL; PyInterpreterState *interp = PyInterpreterState_Get(); if ((preexec_fn != Py_None) && (interp != PyInterpreterState_Main())) { @@ -844,7 +903,7 @@ subprocess_fork_exec(PyObject *module, PyObject *args) need_to_reenable_gc = PyGC_Disable(); } - exec_array = _PySequence_BytesToCharpArray(executable_list); + char *const *exec_array = _PySequence_BytesToCharpArray(executable_list); if (!exec_array) goto cleanup; @@ -862,7 +921,7 @@ subprocess_fork_exec(PyObject *module, PyObject *args) converted_args = PyTuple_New(num_args); if (converted_args == NULL) goto cleanup; - for (arg_num = 0; arg_num < num_args; ++arg_num) { + for (Py_ssize_t arg_num = 0; arg_num < num_args; ++arg_num) { PyObject *borrowed_arg, *converted_arg; if (PySequence_Fast_GET_SIZE(fast_args) != num_args) { PyErr_SetString(PyExc_RuntimeError, "args changed during iteration"); @@ -891,8 +950,6 @@ subprocess_fork_exec(PyObject *module, PyObject *args) if (PyUnicode_FSConverter(cwd_obj, &cwd_obj2) == 0) goto cleanup; cwd = PyBytes_AsString(cwd_obj2); - } else { - cwd = NULL; } if (extra_groups_packed != Py_None) { @@ -1019,7 +1076,7 @@ subprocess_fork_exec(PyObject *module, PyObject *args) py_fds_to_keep, preexec_fn, preexec_fn_args_tuple); /* Parent (original) process */ - if (pid == -1) { + if (pid == (pid_t)-1) { /* Capture errno for the exception. */ saved_errno = errno; } @@ -1068,47 +1125,17 @@ subprocess_fork_exec(PyObject *module, PyObject *args) if (need_to_reenable_gc) { PyGC_Enable(); } - Py_XDECREF(gc_module); return pid == -1 ? NULL : PyLong_FromPid(pid); } - -PyDoc_STRVAR(subprocess_fork_exec_doc, -"fork_exec(args, executable_list, close_fds, pass_fds, cwd, env,\n\ - p2cread, p2cwrite, c2pread, c2pwrite,\n\ - errread, errwrite, errpipe_read, errpipe_write,\n\ - restore_signals, call_setsid, pgid_to_set,\n\ - gid, extra_groups, uid,\n\ - preexec_fn)\n\ -\n\ -Forks a child process, closes parent file descriptors as appropriate in the\n\ -child and dups the few that are needed before calling exec() in the child\n\ -process.\n\ -\n\ -If close_fds is true, close file descriptors 3 and higher, except those listed\n\ -in the sorted tuple pass_fds.\n\ -\n\ -The preexec_fn, if supplied, will be called immediately before closing file\n\ -descriptors and exec.\n\ -WARNING: preexec_fn is NOT SAFE if your application uses threads.\n\ - It may trigger infrequent, difficult to debug deadlocks.\n\ -\n\ -If an error occurs in the child process before the exec, it is\n\ -serialized and written to the errpipe_write fd per subprocess.py.\n\ -\n\ -Returns: the child process's PID.\n\ -\n\ -Raises: Only on an error in the parent process.\n\ -"); - /* module level code ********************************************************/ PyDoc_STRVAR(module_doc, "A POSIX helper for the subprocess module."); static PyMethodDef module_methods[] = { - {"fork_exec", subprocess_fork_exec, METH_VARARGS, subprocess_fork_exec_doc}, + SUBPROCESS_FORK_EXEC_METHODDEF {NULL, NULL} /* sentinel */ }; diff --git a/Modules/_sqlite/clinic/connection.c.h b/Modules/_sqlite/clinic/connection.c.h index 4c3fd1bd27411b..182754cca36d61 100644 --- a/Modules/_sqlite/clinic/connection.c.h +++ b/Modules/_sqlite/clinic/connection.c.h @@ -846,30 +846,63 @@ pysqlite_connection_enable_load_extension(pysqlite_Connection *self, PyObject *a #if defined(PY_SQLITE_ENABLE_LOAD_EXTENSION) PyDoc_STRVAR(pysqlite_connection_load_extension__doc__, -"load_extension($self, name, /)\n" +"load_extension($self, name, /, *, entrypoint=None)\n" "--\n" "\n" "Load SQLite extension module."); #define PYSQLITE_CONNECTION_LOAD_EXTENSION_METHODDEF \ - {"load_extension", (PyCFunction)pysqlite_connection_load_extension, METH_O, pysqlite_connection_load_extension__doc__}, + {"load_extension", _PyCFunction_CAST(pysqlite_connection_load_extension), METH_FASTCALL|METH_KEYWORDS, pysqlite_connection_load_extension__doc__}, static PyObject * pysqlite_connection_load_extension_impl(pysqlite_Connection *self, - const char *extension_name); + const char *extension_name, + const char *entrypoint); static PyObject * -pysqlite_connection_load_extension(pysqlite_Connection *self, PyObject *arg) +pysqlite_connection_load_extension(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(entrypoint), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "entrypoint", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "load_extension", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; const char *extension_name; + const char *entrypoint = NULL; - if (!PyUnicode_Check(arg)) { - _PyArg_BadArgument("load_extension", "argument", "str", arg); + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!PyUnicode_Check(args[0])) { + _PyArg_BadArgument("load_extension", "argument 1", "str", args[0]); goto exit; } Py_ssize_t extension_name_length; - extension_name = PyUnicode_AsUTF8AndSize(arg, &extension_name_length); + extension_name = PyUnicode_AsUTF8AndSize(args[0], &extension_name_length); if (extension_name == NULL) { goto exit; } @@ -877,7 +910,29 @@ pysqlite_connection_load_extension(pysqlite_Connection *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = pysqlite_connection_load_extension_impl(self, extension_name); + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[1] == Py_None) { + entrypoint = NULL; + } + else if (PyUnicode_Check(args[1])) { + Py_ssize_t entrypoint_length; + entrypoint = PyUnicode_AsUTF8AndSize(args[1], &entrypoint_length); + if (entrypoint == NULL) { + goto exit; + } + if (strlen(entrypoint) != (size_t)entrypoint_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + } + else { + _PyArg_BadArgument("load_extension", "argument 'entrypoint'", "str or None", args[1]); + goto exit; + } +skip_optional_kwonly: + return_value = pysqlite_connection_load_extension_impl(self, extension_name, entrypoint); exit: return return_value; @@ -1513,6 +1568,85 @@ getlimit(pysqlite_Connection *self, PyObject *arg) return return_value; } +PyDoc_STRVAR(setconfig__doc__, +"setconfig($self, op, enable=True, /)\n" +"--\n" +"\n" +"Set a boolean connection configuration option.\n" +"\n" +" op\n" +" The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes."); + +#define SETCONFIG_METHODDEF \ + {"setconfig", _PyCFunction_CAST(setconfig), METH_FASTCALL, setconfig__doc__}, + +static PyObject * +setconfig_impl(pysqlite_Connection *self, int op, int enable); + +static PyObject * +setconfig(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int op; + int enable = 1; + + if (!_PyArg_CheckPositional("setconfig", nargs, 1, 2)) { + goto exit; + } + op = _PyLong_AsInt(args[0]); + if (op == -1 && PyErr_Occurred()) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + enable = PyObject_IsTrue(args[1]); + if (enable < 0) { + goto exit; + } +skip_optional: + return_value = setconfig_impl(self, op, enable); + +exit: + return return_value; +} + +PyDoc_STRVAR(getconfig__doc__, +"getconfig($self, op, /)\n" +"--\n" +"\n" +"Query a boolean connection configuration option.\n" +"\n" +" op\n" +" The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes."); + +#define GETCONFIG_METHODDEF \ + {"getconfig", (PyCFunction)getconfig, METH_O, getconfig__doc__}, + +static int +getconfig_impl(pysqlite_Connection *self, int op); + +static PyObject * +getconfig(pysqlite_Connection *self, PyObject *arg) +{ + PyObject *return_value = NULL; + int op; + int _return_value; + + op = _PyLong_AsInt(arg); + if (op == -1 && PyErr_Occurred()) { + goto exit; + } + _return_value = getconfig_impl(self, op); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyBool_FromLong((long)_return_value); + +exit: + return return_value; +} + #ifndef CREATE_WINDOW_FUNCTION_METHODDEF #define CREATE_WINDOW_FUNCTION_METHODDEF #endif /* !defined(CREATE_WINDOW_FUNCTION_METHODDEF) */ @@ -1532,4 +1666,4 @@ getlimit(pysqlite_Connection *self, PyObject *arg) #ifndef DESERIALIZE_METHODDEF #define DESERIALIZE_METHODDEF #endif /* !defined(DESERIALIZE_METHODDEF) */ -/*[clinic end generated code: output=f10306e10427488b input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8b03149c115ee6da input=a9049054013a1b77]*/ diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c index fb61ef82ef869b..aec3aa8bbf4ed8 100644 --- a/Modules/_sqlite/connection.c +++ b/Modules/_sqlite/connection.c @@ -30,6 +30,8 @@ #include "prepare_protocol.h" #include "util.h" +#include <stdbool.h> + #if SQLITE_VERSION_NUMBER >= 3014000 #define HAVE_TRACE_V2 #endif @@ -1601,14 +1603,17 @@ _sqlite3.Connection.load_extension as pysqlite_connection_load_extension name as extension_name: str / + * + entrypoint: str(accept={str, NoneType}) = None Load SQLite extension module. [clinic start generated code]*/ static PyObject * pysqlite_connection_load_extension_impl(pysqlite_Connection *self, - const char *extension_name) -/*[clinic end generated code: output=47eb1d7312bc97a7 input=edd507389d89d621]*/ + const char *extension_name, + const char *entrypoint) +/*[clinic end generated code: output=7e61a7add9de0286 input=c36b14ea702e04f5]*/ { int rc; char* errmsg; @@ -1621,7 +1626,7 @@ pysqlite_connection_load_extension_impl(pysqlite_Connection *self, return NULL; } - rc = sqlite3_load_extension(self->db, extension_name, 0, &errmsg); + rc = sqlite3_load_extension(self->db, extension_name, entrypoint, &errmsg); if (rc != 0) { PyErr_SetString(self->OperationalError, errmsg); return NULL; @@ -2340,6 +2345,119 @@ getlimit_impl(pysqlite_Connection *self, int category) return setlimit_impl(self, category, -1); } +static inline bool +is_int_config(const int op) +{ + switch (op) { + case SQLITE_DBCONFIG_ENABLE_FKEY: + case SQLITE_DBCONFIG_ENABLE_TRIGGER: +#if SQLITE_VERSION_NUMBER >= 3012002 + case SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER: +#endif +#if SQLITE_VERSION_NUMBER >= 3013000 + case SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION: +#endif +#if SQLITE_VERSION_NUMBER >= 3016000 + case SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE: +#endif +#if SQLITE_VERSION_NUMBER >= 3020000 + case SQLITE_DBCONFIG_ENABLE_QPSG: +#endif +#if SQLITE_VERSION_NUMBER >= 3022000 + case SQLITE_DBCONFIG_TRIGGER_EQP: +#endif +#if SQLITE_VERSION_NUMBER >= 3024000 + case SQLITE_DBCONFIG_RESET_DATABASE: +#endif +#if SQLITE_VERSION_NUMBER >= 3026000 + case SQLITE_DBCONFIG_DEFENSIVE: +#endif +#if SQLITE_VERSION_NUMBER >= 3028000 + case SQLITE_DBCONFIG_WRITABLE_SCHEMA: +#endif +#if SQLITE_VERSION_NUMBER >= 3029000 + case SQLITE_DBCONFIG_DQS_DDL: + case SQLITE_DBCONFIG_DQS_DML: + case SQLITE_DBCONFIG_LEGACY_ALTER_TABLE: +#endif +#if SQLITE_VERSION_NUMBER >= 3030000 + case SQLITE_DBCONFIG_ENABLE_VIEW: +#endif +#if SQLITE_VERSION_NUMBER >= 3031000 + case SQLITE_DBCONFIG_LEGACY_FILE_FORMAT: + case SQLITE_DBCONFIG_TRUSTED_SCHEMA: +#endif + return true; + default: + return false; + } +} + +/*[clinic input] +_sqlite3.Connection.setconfig as setconfig + + op: int + The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. + enable: bool = True + / + +Set a boolean connection configuration option. +[clinic start generated code]*/ + +static PyObject * +setconfig_impl(pysqlite_Connection *self, int op, int enable) +/*[clinic end generated code: output=c60b13e618aff873 input=a10f1539c2d7da6b]*/ +{ + if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { + return NULL; + } + if (!is_int_config(op)) { + return PyErr_Format(PyExc_ValueError, "unknown config 'op': %d", op); + } + + int actual; + int rc = sqlite3_db_config(self->db, op, enable, &actual); + if (rc != SQLITE_OK) { + (void)_pysqlite_seterror(self->state, self->db); + return NULL; + } + if (enable != actual) { + PyErr_SetString(self->state->OperationalError, "Unable to set config"); + return NULL; + } + Py_RETURN_NONE; +} + +/*[clinic input] +_sqlite3.Connection.getconfig as getconfig -> bool + + op: int + The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. + / + +Query a boolean connection configuration option. +[clinic start generated code]*/ + +static int +getconfig_impl(pysqlite_Connection *self, int op) +/*[clinic end generated code: output=25ac05044c7b78a3 input=b0526d7e432e3f2f]*/ +{ + if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { + return -1; + } + if (!is_int_config(op)) { + PyErr_Format(PyExc_ValueError, "unknown config 'op': %d", op); + return -1; + } + + int current; + int rc = sqlite3_db_config(self->db, op, -1, ¤t); + if (rc != SQLITE_OK) { + (void)_pysqlite_seterror(self->state, self->db); + return -1; + } + return current; +} static PyObject * get_autocommit(pysqlite_Connection *self, void *Py_UNUSED(ctx)) @@ -2421,6 +2539,8 @@ static PyMethodDef connection_methods[] = { DESERIALIZE_METHODDEF CREATE_WINDOW_FUNCTION_METHODDEF BLOBOPEN_METHODDEF + SETCONFIG_METHODDEF + GETCONFIG_METHODDEF {NULL, NULL} }; diff --git a/Modules/_sqlite/module.c b/Modules/_sqlite/module.c index 6db3d51fd20220..9c42faa232c70d 100644 --- a/Modules/_sqlite/module.c +++ b/Modules/_sqlite/module.c @@ -499,6 +499,49 @@ add_integer_constants(PyObject *module) { #if SQLITE_VERSION_NUMBER >= 3008007 ADD_INT(SQLITE_LIMIT_WORKER_THREADS); #endif + + /* + * Database connection configuration options. + * See https://www.sqlite.org/c3ref/c_dbconfig_defensive.html + */ + ADD_INT(SQLITE_DBCONFIG_ENABLE_FKEY); + ADD_INT(SQLITE_DBCONFIG_ENABLE_TRIGGER); +#if SQLITE_VERSION_NUMBER >= 3012002 + ADD_INT(SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER); +#endif +#if SQLITE_VERSION_NUMBER >= 3013000 + ADD_INT(SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION); +#endif +#if SQLITE_VERSION_NUMBER >= 3016000 + ADD_INT(SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE); +#endif +#if SQLITE_VERSION_NUMBER >= 3020000 + ADD_INT(SQLITE_DBCONFIG_ENABLE_QPSG); +#endif +#if SQLITE_VERSION_NUMBER >= 3022000 + ADD_INT(SQLITE_DBCONFIG_TRIGGER_EQP); +#endif +#if SQLITE_VERSION_NUMBER >= 3024000 + ADD_INT(SQLITE_DBCONFIG_RESET_DATABASE); +#endif +#if SQLITE_VERSION_NUMBER >= 3026000 + ADD_INT(SQLITE_DBCONFIG_DEFENSIVE); +#endif +#if SQLITE_VERSION_NUMBER >= 3028000 + ADD_INT(SQLITE_DBCONFIG_WRITABLE_SCHEMA); +#endif +#if SQLITE_VERSION_NUMBER >= 3029000 + ADD_INT(SQLITE_DBCONFIG_DQS_DDL); + ADD_INT(SQLITE_DBCONFIG_DQS_DML); + ADD_INT(SQLITE_DBCONFIG_LEGACY_ALTER_TABLE); +#endif +#if SQLITE_VERSION_NUMBER >= 3030000 + ADD_INT(SQLITE_DBCONFIG_ENABLE_VIEW); +#endif +#if SQLITE_VERSION_NUMBER >= 3031000 + ADD_INT(SQLITE_DBCONFIG_LEGACY_FILE_FORMAT); + ADD_INT(SQLITE_DBCONFIG_TRUSTED_SCHEMA); +#endif #undef ADD_INT return 0; } diff --git a/Modules/_ssl.c b/Modules/_ssl.c index 121d18884d0a9f..c9e2f24d66cc00 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -660,6 +660,16 @@ PySSL_SetError(PySSLSocket *sslsock, int ret, const char *filename, int lineno) ERR_GET_REASON(e) == SSL_R_CERTIFICATE_VERIFY_FAILED) { type = state->PySSLCertVerificationErrorObject; } +#if defined(SSL_R_UNEXPECTED_EOF_WHILE_READING) + /* OpenSSL 3.0 changed transport EOF from SSL_ERROR_SYSCALL with + * zero return value to SSL_ERROR_SSL with a special error code. */ + if (ERR_GET_LIB(e) == ERR_LIB_SSL && + ERR_GET_REASON(e) == SSL_R_UNEXPECTED_EOF_WHILE_READING) { + p = PY_SSL_ERROR_EOF; + type = state->PySSLEOFErrorObject; + errstr = "EOF occurred in violation of protocol"; + } +#endif break; } default: @@ -1988,24 +1998,44 @@ static PyObject * _ssl__SSLSocket_shared_ciphers_impl(PySSLSocket *self) /*[clinic end generated code: output=3d174ead2e42c4fd input=0bfe149da8fe6306]*/ { - STACK_OF(SSL_CIPHER) *ciphers; - int i; + STACK_OF(SSL_CIPHER) *server_ciphers; + STACK_OF(SSL_CIPHER) *client_ciphers; + int i, len; PyObject *res; + const SSL_CIPHER* cipher; + + /* Rather than use SSL_get_shared_ciphers, we use an equivalent algorithm because: + + 1) It returns a colon separated list of strings, in an undefined + order, that we would have to post process back into tuples. + 2) It will return a truncated string with no indication that it has + done so, if the buffer is too small. + */ - ciphers = SSL_get_ciphers(self->ssl); - if (!ciphers) + server_ciphers = SSL_get_ciphers(self->ssl); + if (!server_ciphers) Py_RETURN_NONE; - res = PyList_New(sk_SSL_CIPHER_num(ciphers)); + client_ciphers = SSL_get_client_ciphers(self->ssl); + if (!client_ciphers) + Py_RETURN_NONE; + + res = PyList_New(sk_SSL_CIPHER_num(server_ciphers)); if (!res) return NULL; - for (i = 0; i < sk_SSL_CIPHER_num(ciphers); i++) { - PyObject *tup = cipher_to_tuple(sk_SSL_CIPHER_value(ciphers, i)); + len = 0; + for (i = 0; i < sk_SSL_CIPHER_num(server_ciphers); i++) { + cipher = sk_SSL_CIPHER_value(server_ciphers, i); + if (sk_SSL_CIPHER_find(client_ciphers, cipher) < 0) + continue; + + PyObject *tup = cipher_to_tuple(cipher); if (!tup) { Py_DECREF(res); return NULL; } - PyList_SET_ITEM(res, i, tup); + PyList_SET_ITEM(res, len++, tup); } + Py_SET_SIZE(res, len); return res; } @@ -3092,10 +3122,6 @@ _ssl__SSLContext_impl(PyTypeObject *type, int proto_version) #endif #ifdef SSL_OP_SINGLE_ECDH_USE options |= SSL_OP_SINGLE_ECDH_USE; -#endif -#ifdef SSL_OP_IGNORE_UNEXPECTED_EOF - /* Make OpenSSL 3.0.0 behave like 1.1.1 */ - options |= SSL_OP_IGNORE_UNEXPECTED_EOF; #endif SSL_CTX_set_options(self->ctx, options); @@ -3904,7 +3930,7 @@ _add_ca_certs(PySSLContext *self, const void *data, Py_ssize_t len, { BIO *biobuf = NULL; X509_STORE *store; - int retval = -1, err, loaded = 0; + int retval = -1, err, loaded = 0, was_bio_eof = 0; assert(filetype == SSL_FILETYPE_ASN1 || filetype == SSL_FILETYPE_PEM); @@ -3932,6 +3958,10 @@ _add_ca_certs(PySSLContext *self, const void *data, Py_ssize_t len, int r; if (filetype == SSL_FILETYPE_ASN1) { + if (BIO_eof(biobuf)) { + was_bio_eof = 1; + break; + } cert = d2i_X509_bio(biobuf, NULL); } else { cert = PEM_read_bio_X509(biobuf, NULL, @@ -3967,9 +3997,7 @@ _add_ca_certs(PySSLContext *self, const void *data, Py_ssize_t len, } _setSSLError(get_state_ctx(self), msg, 0, __FILE__, __LINE__); retval = -1; - } else if ((filetype == SSL_FILETYPE_ASN1) && - (ERR_GET_LIB(err) == ERR_LIB_ASN1) && - (ERR_GET_REASON(err) == ASN1_R_HEADER_TOO_LONG)) { + } else if ((filetype == SSL_FILETYPE_ASN1) && was_bio_eof) { /* EOF ASN1 file, not an error */ ERR_clear_error(); retval = 0; @@ -4308,8 +4336,6 @@ _ssl__SSLContext_set_ecdh_curve(PySSLContext *self, PyObject *name) { PyObject *name_bytes; int nid; - EC_KEY *key; - if (!PyUnicode_FSConverter(name, &name_bytes)) return NULL; assert(PyBytes_Check(name_bytes)); @@ -4320,13 +4346,20 @@ _ssl__SSLContext_set_ecdh_curve(PySSLContext *self, PyObject *name) "unknown elliptic curve name %R", name); return NULL; } - key = EC_KEY_new_by_curve_name(nid); +#if OPENSSL_VERSION_MAJOR < 3 + EC_KEY *key = EC_KEY_new_by_curve_name(nid); if (key == NULL) { _setSSLError(get_state_ctx(self), NULL, 0, __FILE__, __LINE__); return NULL; } SSL_CTX_set_tmp_ecdh(self->ctx, key); EC_KEY_free(key); +#else + if (!SSL_CTX_set1_groups(self->ctx, &nid, 1)) { + _setSSLError(get_state_ctx(self), NULL, 0, __FILE__, __LINE__); + return NULL; + } +#endif Py_RETURN_NONE; } @@ -5963,9 +5996,6 @@ sslmodule_init_errorcodes(PyObject *module) state->err_codes_to_names = PyDict_New(); if (state->err_codes_to_names == NULL) return -1; - state->err_names_to_codes = PyDict_New(); - if (state->err_names_to_codes == NULL) - return -1; state->lib_codes_to_names = PyDict_New(); if (state->lib_codes_to_names == NULL) return -1; @@ -5979,8 +6009,6 @@ sslmodule_init_errorcodes(PyObject *module) return -1; if (PyDict_SetItem(state->err_codes_to_names, key, mnemo)) return -1; - if (PyDict_SetItem(state->err_names_to_codes, mnemo, key)) - return -1; Py_DECREF(key); Py_DECREF(mnemo); errcode++; @@ -6000,13 +6028,6 @@ sslmodule_init_errorcodes(PyObject *module) libcode++; } - if (PyModule_AddObjectRef(module, "err_codes_to_names", state->err_codes_to_names)) - return -1; - if (PyModule_AddObjectRef(module, "err_names_to_codes", state->err_names_to_codes)) - return -1; - if (PyModule_AddObjectRef(module, "lib_codes_to_names", state->lib_codes_to_names)) - return -1; - return 0; } @@ -6161,7 +6182,6 @@ sslmodule_traverse(PyObject *m, visitproc visit, void *arg) Py_VISIT(state->PySSLSyscallErrorObject); Py_VISIT(state->PySSLEOFErrorObject); Py_VISIT(state->err_codes_to_names); - Py_VISIT(state->err_names_to_codes); Py_VISIT(state->lib_codes_to_names); Py_VISIT(state->Sock_Type); @@ -6186,7 +6206,6 @@ sslmodule_clear(PyObject *m) Py_CLEAR(state->PySSLSyscallErrorObject); Py_CLEAR(state->PySSLEOFErrorObject); Py_CLEAR(state->err_codes_to_names); - Py_CLEAR(state->err_names_to_codes); Py_CLEAR(state->lib_codes_to_names); Py_CLEAR(state->Sock_Type); Py_CLEAR(state->str_library); diff --git a/Modules/_ssl.h b/Modules/_ssl.h index d68ccdec5e88c4..c1da8b46b536d3 100644 --- a/Modules/_ssl.h +++ b/Modules/_ssl.h @@ -25,7 +25,6 @@ typedef struct { PyObject *PySSLEOFErrorObject; /* Error mappings */ PyObject *err_codes_to_names; - PyObject *err_names_to_codes; PyObject *lib_codes_to_names; /* socket type from module CAPI */ PyTypeObject *Sock_Type; diff --git a/Modules/_testcapi/exceptions.c b/Modules/_testcapi/exceptions.c index 1922ca3beb7916..6099f7d20eb56a 100644 --- a/Modules/_testcapi/exceptions.c +++ b/Modules/_testcapi/exceptions.c @@ -40,12 +40,22 @@ static PyObject * exception_print(PyObject *self, PyObject *args) { PyObject *exc; + int legacy = 0; - if (!PyArg_ParseTuple(args, "O:exception_print", &exc)) { + if (!PyArg_ParseTuple(args, "O|i:exception_print", &exc, &legacy)) { return NULL; } - - PyErr_DisplayException(exc); + if (legacy) { + PyObject *tb = NULL; + if (PyExceptionInstance_Check(exc)) { + tb = PyException_GetTraceback(exc); + } + PyErr_Display((PyObject *) Py_TYPE(exc), exc, tb); + Py_XDECREF(tb); + } + else { + PyErr_DisplayException(exc); + } Py_RETURN_NONE; } diff --git a/Modules/_testcapi/heaptype.c b/Modules/_testcapi/heaptype.c index df2a061ed82b06..209cc182c0698d 100644 --- a/Modules/_testcapi/heaptype.c +++ b/Modules/_testcapi/heaptype.c @@ -174,7 +174,7 @@ test_from_spec_invalid_metatype_inheritance(PyObject *self, PyObject *Py_UNUSED( } if (res == 0) { PyErr_SetString(PyExc_AssertionError, - "TypeError did not inlclude expected message."); + "TypeError did not include expected message."); goto finally; } result = Py_NewRef(Py_None); @@ -265,7 +265,7 @@ test_type_from_ephemeral_spec(PyObject *self, PyObject *Py_UNUSED(ignored)) /* deallocate the spec (and all contents) */ - // (Explicitly ovewrite memory before freeing, + // (Explicitly overwrite memory before freeing, // so bugs show themselves even without the debug allocator's help.) memset(spec, 0xdd, sizeof(PyType_Spec)); PyMem_Del(spec); diff --git a/Modules/_testcapi/mem.c b/Modules/_testcapi/mem.c index ae3f7a4372dcd8..af32e9668dda2d 100644 --- a/Modules/_testcapi/mem.c +++ b/Modules/_testcapi/mem.c @@ -347,7 +347,7 @@ test_pyobject_new(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *obj; PyTypeObject *type = &PyBaseObject_Type; - PyTypeObject *var_type = &PyLong_Type; + PyTypeObject *var_type = &PyBytes_Type; // PyObject_New() obj = PyObject_New(PyObject, type); diff --git a/Modules/_testcapi/parts.h b/Modules/_testcapi/parts.h index c8f31dc8e39fae..60ec81dad2ba9e 100644 --- a/Modules/_testcapi/parts.h +++ b/Modules/_testcapi/parts.h @@ -38,6 +38,7 @@ int _PyTestCapi_Init_Float(PyObject *module); int _PyTestCapi_Init_Structmember(PyObject *module); int _PyTestCapi_Init_Exceptions(PyObject *module); int _PyTestCapi_Init_Code(PyObject *module); +int _PyTestCapi_Init_PyOS(PyObject *module); #ifdef LIMITED_API_AVAILABLE int _PyTestCapi_Init_VectorcallLimited(PyObject *module); diff --git a/Modules/_testcapi/pyos.c b/Modules/_testcapi/pyos.c new file mode 100644 index 00000000000000..63140e914875db --- /dev/null +++ b/Modules/_testcapi/pyos.c @@ -0,0 +1,60 @@ +#include "parts.h" + + +static PyObject * +test_PyOS_mystrnicmp(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + assert(PyOS_mystrnicmp("", "", 0) == 0); + assert(PyOS_mystrnicmp("", "", 1) == 0); + + assert(PyOS_mystrnicmp("insert", "ins", 3) == 0); + assert(PyOS_mystrnicmp("ins", "insert", 3) == 0); + assert(PyOS_mystrnicmp("insect", "insert", 3) == 0); + + assert(PyOS_mystrnicmp("insert", "insert", 6) == 0); + assert(PyOS_mystrnicmp("Insert", "insert", 6) == 0); + assert(PyOS_mystrnicmp("INSERT", "insert", 6) == 0); + assert(PyOS_mystrnicmp("insert", "insert", 10) == 0); + + assert(PyOS_mystrnicmp("invert", "insert", 6) == ('v' - 's')); + assert(PyOS_mystrnicmp("insert", "invert", 6) == ('s' - 'v')); + assert(PyOS_mystrnicmp("insert", "ins\0rt", 6) == 'e'); + + // GH-21845 + assert(PyOS_mystrnicmp("insert\0a", "insert\0b", 8) == 0); + + Py_RETURN_NONE; +} + +static PyObject * +test_PyOS_mystricmp(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + assert(PyOS_mystricmp("", "") == 0); + assert(PyOS_mystricmp("insert", "insert") == 0); + assert(PyOS_mystricmp("Insert", "insert") == 0); + assert(PyOS_mystricmp("INSERT", "insert") == 0); + assert(PyOS_mystricmp("insert", "ins") == 'e'); + assert(PyOS_mystricmp("ins", "insert") == -'e'); + + // GH-21845 + assert(PyOS_mystricmp("insert", "ins\0rt") == 'e'); + assert(PyOS_mystricmp("invert", "insert") == ('v' - 's')); + + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"test_PyOS_mystrnicmp", test_PyOS_mystrnicmp, METH_NOARGS, NULL}, + {"test_PyOS_mystricmp", test_PyOS_mystricmp, METH_NOARGS, NULL}, + {NULL}, +}; + +int +_PyTestCapi_Init_PyOS(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index f45d0312e94411..c1892f6fa0a4b8 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -1482,6 +1482,7 @@ static PyObject * run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs) { const char *code; + int use_main_obmalloc = -1; int allow_fork = -1; int allow_exec = -1; int allow_threads = -1; @@ -1493,6 +1494,7 @@ run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs) PyCompilerFlags cflags = {0}; static char *kwlist[] = {"code", + "use_main_obmalloc", "allow_fork", "allow_exec", "allow_threads", @@ -1500,12 +1502,17 @@ run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs) "check_multi_interp_extensions", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwargs, - "s$ppppp:run_in_subinterp_with_config", kwlist, - &code, &allow_fork, &allow_exec, + "s$pppppp:run_in_subinterp_with_config", kwlist, + &code, &use_main_obmalloc, + &allow_fork, &allow_exec, &allow_threads, &allow_daemon_threads, &check_multi_interp_extensions)) { return NULL; } + if (use_main_obmalloc < 0) { + PyErr_SetString(PyExc_ValueError, "missing use_main_obmalloc"); + return NULL; + } if (allow_fork < 0) { PyErr_SetString(PyExc_ValueError, "missing allow_fork"); return NULL; @@ -1532,21 +1539,26 @@ run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs) PyThreadState_Swap(NULL); const _PyInterpreterConfig config = { + .use_main_obmalloc = use_main_obmalloc, .allow_fork = allow_fork, .allow_exec = allow_exec, .allow_threads = allow_threads, .allow_daemon_threads = allow_daemon_threads, .check_multi_interp_extensions = check_multi_interp_extensions, }; - substate = _Py_NewInterpreterFromConfig(&config); - if (substate == NULL) { + PyStatus status = _Py_NewInterpreterFromConfig(&substate, &config); + if (PyStatus_Exception(status)) { /* Since no new thread state was created, there is no exception to propagate; raise a fresh one after swapping in the old thread state. */ PyThreadState_Swap(mainstate); + _PyErr_SetFromPyStatus(status); + PyObject *exc = PyErr_GetRaisedException(); PyErr_SetString(PyExc_RuntimeError, "sub-interpreter creation failed"); + _PyErr_ChainExceptions1(exc); return NULL; } + assert(substate != NULL); r = PyRun_SimpleStringFlags(code, &cflags); Py_EndInterpreter(substate); @@ -2729,6 +2741,18 @@ type_get_version(PyObject *self, PyObject *type) } +static PyObject * +type_assign_version(PyObject *self, PyObject *type) +{ + if (!PyType_Check(type)) { + PyErr_SetString(PyExc_TypeError, "argument must be a type"); + return NULL; + } + int res = PyUnstable_Type_AssignVersionTag((PyTypeObject *)type); + return PyLong_FromLong(res); +} + + // Test PyThreadState C API static PyObject * test_tstate_capi(PyObject *self, PyObject *Py_UNUSED(args)) @@ -3377,6 +3401,37 @@ test_gc_visit_objects_exit_early(PyObject *Py_UNUSED(self), } +struct atexit_data { + int called; +}; + +static void +callback(void *data) +{ + ((struct atexit_data *)data)->called += 1; +} + +static PyObject * +test_atexit(PyObject *self, PyObject *Py_UNUSED(args)) +{ + PyThreadState *oldts = PyThreadState_Swap(NULL); + PyThreadState *tstate = Py_NewInterpreter(); + + struct atexit_data data = {0}; + int res = _Py_AtExit(tstate->interp, callback, (void *)&data); + Py_EndInterpreter(tstate); + PyThreadState_Swap(oldts); + if (res < 0) { + return NULL; + } + if (data.called == 0) { + PyErr_SetString(PyExc_RuntimeError, "atexit callback not called"); + return NULL; + } + Py_RETURN_NONE; +} + + static PyObject *test_buildvalue_issue38913(PyObject *, PyObject *); static PyMethodDef TestMethods[] = { @@ -3495,6 +3550,7 @@ static PyMethodDef TestMethods[] = { {"test_py_is_macros", test_py_is_macros, METH_NOARGS}, {"test_py_is_funcs", test_py_is_funcs, METH_NOARGS}, {"type_get_version", type_get_version, METH_O, PyDoc_STR("type->tp_version_tag")}, + {"type_assign_version", type_assign_version, METH_O, PyDoc_STR("PyUnstable_Type_AssignVersionTag")}, {"test_tstate_capi", test_tstate_capi, METH_NOARGS, NULL}, {"frame_getlocals", frame_getlocals, METH_O, NULL}, {"frame_getglobals", frame_getglobals, METH_O, NULL}, @@ -3521,6 +3577,7 @@ static PyMethodDef TestMethods[] = { {"function_set_kw_defaults", function_set_kw_defaults, METH_VARARGS, NULL}, {"test_gc_visit_objects_basic", test_gc_visit_objects_basic, METH_NOARGS, NULL}, {"test_gc_visit_objects_exit_early", test_gc_visit_objects_exit_early, METH_NOARGS, NULL}, + {"test_atexit", test_atexit, METH_NOARGS}, {NULL, NULL} /* sentinel */ }; @@ -4150,6 +4207,9 @@ PyInit__testcapi(void) if (_PyTestCapi_Init_Code(m) < 0) { return NULL; } + if (_PyTestCapi_Init_PyOS(m) < 0) { + return NULL; + } #ifndef LIMITED_API_AVAILABLE PyModule_AddObjectRef(m, "LIMITED_API_AVAILABLE", Py_False); diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 632fac2de0c419..9dec5f51310a04 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -14,7 +14,7 @@ #include "Python.h" #include "pycore_atomic_funcs.h" // _Py_atomic_int_get() #include "pycore_bitutils.h" // _Py_bswap32() -#include "pycore_compile.h" // _PyCompile_CodeGen, _PyCompile_OptimizeCfg +#include "pycore_compile.h" // _PyCompile_CodeGen, _PyCompile_OptimizeCfg, _PyCompile_Assemble #include "pycore_fileutils.h" // _Py_normpath #include "pycore_frame.h" // _PyInterpreterFrame #include "pycore_gc.h" // PyGC_Head @@ -625,6 +625,70 @@ _testinternalcapi_optimize_cfg_impl(PyObject *module, PyObject *instructions, return _PyCompile_OptimizeCfg(instructions, consts); } +static int +get_nonnegative_int_from_dict(PyObject *dict, const char *key) { + PyObject *obj = PyDict_GetItemString(dict, key); + if (obj == NULL) { + return -1; + } + return PyLong_AsLong(obj); +} + +/*[clinic input] + +_testinternalcapi.assemble_code_object -> object + + filename: object + instructions: object + metadata: object + +Create a code object for the given instructions. +[clinic start generated code]*/ + +static PyObject * +_testinternalcapi_assemble_code_object_impl(PyObject *module, + PyObject *filename, + PyObject *instructions, + PyObject *metadata) +/*[clinic end generated code: output=38003dc16a930f48 input=e713ad77f08fb3a8]*/ + +{ + assert(PyDict_Check(metadata)); + _PyCompile_CodeUnitMetadata umd; + + umd.u_name = PyDict_GetItemString(metadata, "name"); + umd.u_qualname = PyDict_GetItemString(metadata, "qualname"); + + assert(PyUnicode_Check(umd.u_name)); + assert(PyUnicode_Check(umd.u_qualname)); + + umd.u_consts = PyDict_GetItemString(metadata, "consts"); + umd.u_names = PyDict_GetItemString(metadata, "names"); + umd.u_varnames = PyDict_GetItemString(metadata, "varnames"); + umd.u_cellvars = PyDict_GetItemString(metadata, "cellvars"); + umd.u_freevars = PyDict_GetItemString(metadata, "freevars"); + umd.u_fasthidden = PyDict_GetItemString(metadata, "fasthidden"); + + assert(PyList_Check(umd.u_consts)); + assert(PyDict_Check(umd.u_names)); + assert(PyDict_Check(umd.u_varnames)); + assert(PyDict_Check(umd.u_cellvars)); + assert(PyDict_Check(umd.u_freevars)); + assert(PyDict_Check(umd.u_fasthidden)); + + umd.u_argcount = get_nonnegative_int_from_dict(metadata, "argcount"); + umd.u_posonlyargcount = get_nonnegative_int_from_dict(metadata, "posonlyargcount"); + umd.u_kwonlyargcount = get_nonnegative_int_from_dict(metadata, "kwonlyargcount"); + umd.u_firstlineno = get_nonnegative_int_from_dict(metadata, "firstlineno"); + + assert(umd.u_argcount >= 0); + assert(umd.u_posonlyargcount >= 0); + assert(umd.u_kwonlyargcount >= 0); + assert(umd.u_firstlineno >= 0); + + return (PyObject*)_PyCompile_Assemble(&umd, filename, instructions); +} + static PyObject * get_interp_settings(PyObject *self, PyObject *args) @@ -705,6 +769,7 @@ static PyMethodDef module_functions[] = { {"set_eval_frame_record", set_eval_frame_record, METH_O, NULL}, _TESTINTERNALCAPI_COMPILER_CODEGEN_METHODDEF _TESTINTERNALCAPI_OPTIMIZE_CFG_METHODDEF + _TESTINTERNALCAPI_ASSEMBLE_CODE_OBJECT_METHODDEF {"get_interp_settings", get_interp_settings, METH_VARARGS, NULL}, {"clear_extension", clear_extension, METH_VARARGS, NULL}, {NULL, NULL} /* sentinel */ diff --git a/Modules/_testmultiphase.c b/Modules/_testmultiphase.c index e34854f7025798..cf8990a2df0a9b 100644 --- a/Modules/_testmultiphase.c +++ b/Modules/_testmultiphase.c @@ -884,15 +884,3 @@ PyInit__test_module_state_shared(void) } return module; } - - -/*** Helper for imp test ***/ - -static PyModuleDef imp_dummy_def = TEST_MODULE_DEF("imp_dummy", main_slots, testexport_methods); - -PyMODINIT_FUNC -PyInit_imp_dummy(void) -{ - return PyModuleDef_Init(&imp_dummy_def); -} - diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c index 9c12c696757439..fd2fd9ab25f113 100644 --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -946,7 +946,7 @@ local_setattro(localobject *self, PyObject *name, PyObject *v) } if (r == 1) { PyErr_Format(PyExc_AttributeError, - "'%.50s' object attribute '%U' is read-only", + "'%.100s' object attribute '%U' is read-only", Py_TYPE(self)->tp_name, name); return -1; } diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c index 1608939766ffb6..385a05932a77ed 100644 --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -32,6 +32,8 @@ Copyright (C) 1994 Steen Lumholt. # include "pycore_fileutils.h" // _Py_stat() #endif +#include "pycore_long.h" + #ifdef MS_WINDOWS #include <windows.h> #endif @@ -119,7 +121,7 @@ Copyright (C) 1994 Steen Lumholt. #define WAIT_FOR_STDIN static PyObject * -_get_tcl_lib_path() +_get_tcl_lib_path(void) { static PyObject *tcl_library_path = NULL; static int already_checked = 0; @@ -322,10 +324,6 @@ static int quitMainLoop = 0; static int errorInCmd = 0; static PyObject *excInCmd; -#ifdef TKINTER_PROTECT_LOADTK -static int tk_load_failed = 0; -#endif - static PyObject *Tkapp_UnicodeResult(TkappObject *); @@ -530,17 +528,7 @@ Tcl_AppInit(Tcl_Interp *interp) return TCL_OK; } -#ifdef TKINTER_PROTECT_LOADTK - if (tk_load_failed) { - PySys_WriteStderr("Tk_Init error: %s\n", TKINTER_LOADTK_ERRMSG); - return TCL_ERROR; - } -#endif - if (Tk_Init(interp) == TCL_ERROR) { -#ifdef TKINTER_PROTECT_LOADTK - tk_load_failed = 1; -#endif PySys_WriteStderr("Tk_Init error: %s\n", Tcl_GetStringResult(interp)); return TCL_ERROR; } @@ -633,12 +621,6 @@ Tkapp_New(const char *screenName, const char *className, Tcl_SetVar(v->interp, "_tkinter_skip_tk_init", "1", TCL_GLOBAL_ONLY); } -#ifdef TKINTER_PROTECT_LOADTK - else if (tk_load_failed) { - Tcl_SetVar(v->interp, - "_tkinter_tk_failed", "1", TCL_GLOBAL_ONLY); - } -#endif /* some initial arguments need to be in argv */ if (sync || use) { @@ -700,18 +682,6 @@ Tkapp_New(const char *screenName, const char *className, if (Tcl_AppInit(v->interp) != TCL_OK) { PyObject *result = Tkinter_Error(v); -#ifdef TKINTER_PROTECT_LOADTK - if (wantTk) { - const char *_tkinter_tk_failed; - _tkinter_tk_failed = Tcl_GetVar(v->interp, - "_tkinter_tk_failed", TCL_GLOBAL_ONLY); - - if ( _tkinter_tk_failed != NULL && - strcmp(_tkinter_tk_failed, "1") == 0) { - tk_load_failed = 1; - } - } -#endif Py_DECREF((PyObject *)v); return (TkappObject *)result; } @@ -886,7 +856,8 @@ asBignumObj(PyObject *value) const char *hexchars; mp_int bigValue; - neg = Py_SIZE(value) < 0; + assert(PyLong_Check(value)); + neg = _PyLong_IsNegative((PyLongObject *)value); hexstr = _PyLong_Format(value, 16); if (hexstr == NULL) return NULL; @@ -1950,7 +1921,7 @@ _tkinter_tkapp_getboolean(TkappObject *self, PyObject *arg) int v; if (PyLong_Check(arg)) { /* int or bool */ - return PyBool_FromLong(Py_SIZE(arg) != 0); + return PyBool_FromLong(!_PyLong_IsZero((PyLongObject *)arg)); } if (PyTclObject_Check(arg)) { @@ -2777,18 +2748,6 @@ _tkinter_tkapp_loadtk_impl(TkappObject *self) const char * _tk_exists = NULL; int err; -#ifdef TKINTER_PROTECT_LOADTK - /* Up to Tk 8.4.13, Tk_Init deadlocks on the second call when the - * first call failed. - * To avoid the deadlock, we just refuse the second call through - * a static variable. - */ - if (tk_load_failed) { - PyErr_SetString(Tkinter_TclError, TKINTER_LOADTK_ERRMSG); - return NULL; - } -#endif - /* We want to guard against calling Tk_Init() multiple times */ CHECK_TCL_APPARTMENT; ENTER_TCL @@ -2808,9 +2767,6 @@ _tkinter_tkapp_loadtk_impl(TkappObject *self) if (_tk_exists == NULL || strcmp(_tk_exists, "1") != 0) { if (Tk_Init(interp) == TCL_ERROR) { Tkinter_Error(self); -#ifdef TKINTER_PROTECT_LOADTK - tk_load_failed = 1; -#endif return NULL; } } diff --git a/Modules/_winapi.c b/Modules/_winapi.c index 83cde7501176b6..fa380b8b798405 100644 --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -2054,6 +2054,26 @@ _winapi__mimetypes_read_windows_registry_impl(PyObject *module, #undef CB_TYPE } +/*[clinic input] +_winapi.NeedCurrentDirectoryForExePath -> bool + + exe_name: LPCWSTR + / +[clinic start generated code]*/ + +static int +_winapi_NeedCurrentDirectoryForExePath_impl(PyObject *module, + LPCWSTR exe_name) +/*[clinic end generated code: output=a65ec879502b58fc input=972aac88a1ec2f00]*/ +{ + BOOL result; + + Py_BEGIN_ALLOW_THREADS + result = NeedCurrentDirectoryForExePathW(exe_name); + Py_END_ALLOW_THREADS + + return result; +} static PyMethodDef winapi_functions[] = { _WINAPI_CLOSEHANDLE_METHODDEF @@ -2089,6 +2109,7 @@ static PyMethodDef winapi_functions[] = { _WINAPI_GETACP_METHODDEF _WINAPI_GETFILETYPE_METHODDEF _WINAPI__MIMETYPES_READ_WINDOWS_REGISTRY_METHODDEF + _WINAPI_NEEDCURRENTDIRECTORYFOREXEPATH_METHODDEF {NULL, NULL} }; diff --git a/Modules/_xxinterpchannelsmodule.c b/Modules/_xxinterpchannelsmodule.c index fead12c963da26..13b005eaef9866 100644 --- a/Modules/_xxinterpchannelsmodule.c +++ b/Modules/_xxinterpchannelsmodule.c @@ -10,9 +10,77 @@ #include "pycore_interpreteridobject.h" +/* +This module has the following process-global state: + +_globals (static struct globals): + module_count (int) + channels (struct _channels): + numopen (int64_t) + next_id; (int64_t) + mutex (PyThread_type_lock) + head (linked list of struct _channelref *): + id (int64_t) + objcount (Py_ssize_t) + next (struct _channelref *): + ... + chan (struct _channel *): + open (int) + mutex (PyThread_type_lock) + closing (struct _channel_closing *): + ref (struct _channelref *): + ... + ends (struct _channelends *): + numsendopen (int64_t) + numrecvopen (int64_t) + send (struct _channelend *): + interp (int64_t) + open (int) + next (struct _channelend *) + recv (struct _channelend *): + ... + queue (struct _channelqueue *): + count (int64_t) + first (struct _channelitem *): + next (struct _channelitem *): + ... + data (_PyCrossInterpreterData *): + data (void *) + obj (PyObject *) + interp (int64_t) + new_object (xid_newobjectfunc) + free (xid_freefunc) + last (struct _channelitem *): + ... + +The above state includes the following allocations by the module: + +* 1 top-level mutex (to protect the rest of the state) +* for each channel: + * 1 struct _channelref + * 1 struct _channel + * 0-1 struct _channel_closing + * 1 struct _channelends + * 2 struct _channelend + * 1 struct _channelqueue +* for each item in each channel: + * 1 struct _channelitem + * 1 _PyCrossInterpreterData + +The only objects in that global state are the references held by each +channel's queue, which are safely managed via the _PyCrossInterpreterData_*() +API.. The module does not create any objects that are shared globally. +*/ + #define MODULE_NAME "_xxinterpchannels" +#define GLOBAL_MALLOC(TYPE) \ + PyMem_RawMalloc(sizeof(TYPE)) +#define GLOBAL_FREE(VAR) \ + PyMem_RawFree(VAR) + + static PyInterpreterState * _get_current_interp(void) { @@ -106,19 +174,7 @@ _release_xid_data(_PyCrossInterpreterData *data, int ignoreexc) } int res = _PyCrossInterpreterData_Release(data); if (res < 0) { - // XXX Fix this! - /* The owning interpreter is already destroyed. - * Ideally, this shouldn't ever happen. When an interpreter is - * about to be destroyed, we should clear out all of its objects - * from every channel associated with that interpreter. - * For now we hack around that to resolve refleaks, by decref'ing - * the released object here, even if its the wrong interpreter. - * The owning interpreter has already been destroyed - * so we should be okay, especially since the currently - * shareable types are all very basic, with no GC. - * That said, it becomes much messier once interpreters - * no longer share a GIL, so this needs to be fixed before then. */ - _PyCrossInterpreterData_Clear(NULL, data); + /* The owning interpreter is already destroyed. */ if (ignoreexc) { // XXX Emit a warning? PyErr_Clear(); @@ -301,7 +357,7 @@ typedef struct _channelitem { static _channelitem * _channelitem_new(void) { - _channelitem *item = PyMem_NEW(_channelitem, 1); + _channelitem *item = GLOBAL_MALLOC(_channelitem); if (item == NULL) { PyErr_NoMemory(); return NULL; @@ -316,7 +372,8 @@ _channelitem_clear(_channelitem *item) { if (item->data != NULL) { (void)_release_xid_data(item->data, 1); - PyMem_Free(item->data); + // It was allocated in _channel_send(). + GLOBAL_FREE(item->data); item->data = NULL; } item->next = NULL; @@ -326,7 +383,7 @@ static void _channelitem_free(_channelitem *item) { _channelitem_clear(item); - PyMem_Free(item); + GLOBAL_FREE(item); } static void @@ -357,7 +414,7 @@ typedef struct _channelqueue { static _channelqueue * _channelqueue_new(void) { - _channelqueue *queue = PyMem_NEW(_channelqueue, 1); + _channelqueue *queue = GLOBAL_MALLOC(_channelqueue); if (queue == NULL) { PyErr_NoMemory(); return NULL; @@ -381,7 +438,7 @@ static void _channelqueue_free(_channelqueue *queue) { _channelqueue_clear(queue); - PyMem_Free(queue); + GLOBAL_FREE(queue); } static int @@ -420,6 +477,30 @@ _channelqueue_get(_channelqueue *queue) return _channelitem_popped(item); } +static void +_channelqueue_drop_interpreter(_channelqueue *queue, int64_t interp) +{ + _channelitem *prev = NULL; + _channelitem *next = queue->first; + while (next != NULL) { + _channelitem *item = next; + next = item->next; + if (item->data->interp == interp) { + if (prev == NULL) { + queue->first = item->next; + } + else { + prev->next = item->next; + } + _channelitem_free(item); + queue->count -= 1; + } + else { + prev = item; + } + } +} + /* channel-interpreter associations */ struct _channelend; @@ -433,7 +514,7 @@ typedef struct _channelend { static _channelend * _channelend_new(int64_t interp) { - _channelend *end = PyMem_NEW(_channelend, 1); + _channelend *end = GLOBAL_MALLOC(_channelend); if (end == NULL) { PyErr_NoMemory(); return NULL; @@ -447,7 +528,7 @@ _channelend_new(int64_t interp) static void _channelend_free(_channelend *end) { - PyMem_Free(end); + GLOBAL_FREE(end); } static void @@ -492,7 +573,7 @@ typedef struct _channelassociations { static _channelends * _channelends_new(void) { - _channelends *ends = PyMem_NEW(_channelends, 1); + _channelends *ends = GLOBAL_MALLOC(_channelends); if (ends== NULL) { return NULL; } @@ -519,7 +600,7 @@ static void _channelends_free(_channelends *ends) { _channelends_clear(ends); - PyMem_Free(ends); + GLOBAL_FREE(ends); } static _channelend * @@ -624,6 +705,20 @@ _channelends_close_interpreter(_channelends *ends, int64_t interp, int which) return 0; } +static void +_channelends_drop_interpreter(_channelends *ends, int64_t interp) +{ + _channelend *end; + end = _channelend_find(ends->send, interp, NULL); + if (end != NULL) { + _channelends_close_end(ends, end, 1); + } + end = _channelend_find(ends->recv, interp, NULL); + if (end != NULL) { + _channelends_close_end(ends, end, 0); + } +} + static void _channelends_close_all(_channelends *ends, int which, int force) { @@ -660,20 +755,20 @@ typedef struct _channel { static _PyChannelState * _channel_new(PyThread_type_lock mutex) { - _PyChannelState *chan = PyMem_NEW(_PyChannelState, 1); + _PyChannelState *chan = GLOBAL_MALLOC(_PyChannelState); if (chan == NULL) { return NULL; } chan->mutex = mutex; chan->queue = _channelqueue_new(); if (chan->queue == NULL) { - PyMem_Free(chan); + GLOBAL_FREE(chan); return NULL; } chan->ends = _channelends_new(); if (chan->ends == NULL) { _channelqueue_free(chan->queue); - PyMem_Free(chan); + GLOBAL_FREE(chan); return NULL; } chan->open = 1; @@ -691,7 +786,7 @@ _channel_free(_PyChannelState *chan) PyThread_release_lock(chan->mutex); PyThread_free_lock(chan->mutex); - PyMem_Free(chan); + GLOBAL_FREE(chan); } static int @@ -772,6 +867,18 @@ _channel_close_interpreter(_PyChannelState *chan, int64_t interp, int end) return res; } +static void +_channel_drop_interpreter(_PyChannelState *chan, int64_t interp) +{ + PyThread_acquire_lock(chan->mutex, WAIT_LOCK); + + _channelqueue_drop_interpreter(chan->queue, interp); + _channelends_drop_interpreter(chan->ends, interp); + chan->open = _channelends_is_open(chan->ends); + + PyThread_release_lock(chan->mutex); +} + static int _channel_close_all(_PyChannelState *chan, int end, int force) { @@ -814,7 +921,7 @@ typedef struct _channelref { static _channelref * _channelref_new(int64_t id, _PyChannelState *chan) { - _channelref *ref = PyMem_NEW(_channelref, 1); + _channelref *ref = GLOBAL_MALLOC(_channelref); if (ref == NULL) { return NULL; } @@ -841,7 +948,7 @@ _channelref_free(_channelref *ref) _channel_clear_closing(ref->chan); } //_channelref_clear(ref); - PyMem_Free(ref); + GLOBAL_FREE(ref); } static _channelref * @@ -1144,6 +1251,21 @@ _channels_list_all(_channels *channels, int64_t *count) return cids; } +static void +_channels_drop_interpreter(_channels *channels, int64_t interp) +{ + PyThread_acquire_lock(channels->mutex, WAIT_LOCK); + + _channelref *ref = channels->head; + for (; ref != NULL; ref = ref->next) { + if (ref->chan != NULL) { + _channel_drop_interpreter(ref->chan, interp); + } + } + + PyThread_release_lock(channels->mutex); +} + /* support for closing non-empty channels */ struct _channel_closing { @@ -1163,7 +1285,7 @@ _channel_set_closing(struct _channelref *ref, PyThread_type_lock mutex) { res = ERR_CHANNEL_CLOSED; goto done; } - chan->closing = PyMem_NEW(struct _channel_closing, 1); + chan->closing = GLOBAL_MALLOC(struct _channel_closing); if (chan->closing == NULL) { goto done; } @@ -1179,7 +1301,7 @@ static void _channel_clear_closing(struct _channel *chan) { PyThread_acquire_lock(chan->mutex, WAIT_LOCK); if (chan->closing != NULL) { - PyMem_Free(chan->closing); + GLOBAL_FREE(chan->closing); chan->closing = NULL; } PyThread_release_lock(chan->mutex); @@ -1257,14 +1379,14 @@ _channel_send(_channels *channels, int64_t id, PyObject *obj) } // Convert the object to cross-interpreter data. - _PyCrossInterpreterData *data = PyMem_NEW(_PyCrossInterpreterData, 1); + _PyCrossInterpreterData *data = GLOBAL_MALLOC(_PyCrossInterpreterData); if (data == NULL) { PyThread_release_lock(mutex); return -1; } if (_PyObject_GetCrossInterpreterData(obj, data) != 0) { PyThread_release_lock(mutex); - PyMem_Free(data); + GLOBAL_FREE(data); return -1; } @@ -1274,7 +1396,7 @@ _channel_send(_channels *channels, int64_t id, PyObject *obj) if (res != 0) { // We may chain an exception here: (void)_release_xid_data(data, 0); - PyMem_Free(data); + GLOBAL_FREE(data); return res; } @@ -1323,11 +1445,13 @@ _channel_recv(_channels *channels, int64_t id, PyObject **res) if (obj == NULL) { assert(PyErr_Occurred()); (void)_release_xid_data(data, 1); - PyMem_Free(data); + // It was allocated in _channel_send(). + GLOBAL_FREE(data); return -1; } int release_res = _release_xid_data(data, 0); - PyMem_Free(data); + // It was allocated in _channel_send(). + GLOBAL_FREE(data); if (release_res < 0) { // The source interpreter has been destroyed already. assert(PyErr_Occurred()); @@ -1861,6 +1985,19 @@ _global_channels(void) { } +static void +clear_interpreter(void *data) +{ + if (_globals.module_count == 0) { + return; + } + PyInterpreterState *interp = (PyInterpreterState *)data; + assert(interp == _get_current_interp()); + int64_t id = PyInterpreterState_GetID(interp); + _channels_drop_interpreter(&_globals.channels, id); +} + + static PyObject * channel_create(PyObject *self, PyObject *Py_UNUSED(ignored)) { @@ -2268,6 +2405,10 @@ module_exec(PyObject *mod) goto error; } + // Make sure chnnels drop objects owned by this interpreter + PyInterpreterState *interp = _get_current_interp(); + _Py_AtExit(interp, clear_interpreter, (void *)interp); + return 0; error: diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index 76fb87fa3a34e1..884fb0d31f2b7f 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -67,16 +67,7 @@ _release_xid_data(_PyCrossInterpreterData *data, int ignoreexc) } int res = _PyCrossInterpreterData_Release(data); if (res < 0) { - // XXX Fix this! - /* The owning interpreter is already destroyed. - * Ideally, this shouldn't ever happen. (It's highly unlikely.) - * For now we hack around that to resolve refleaks, by decref'ing - * the released object here, even if its the wrong interpreter. - * The owning interpreter has already been destroyed - * so we should be okay, especially since the currently - * shareable types are all very basic, with no GC. - * That said, it becomes much messier once interpreters - * no longer share a GIL, so this needs to be fixed before then. */ + /* The owning interpreter is already destroyed. */ _PyCrossInterpreterData_Clear(NULL, data); if (ignoreexc) { // XXX Emit a warning? @@ -481,7 +472,7 @@ _run_script_in_interpreter(PyObject *mod, PyInterpreterState *interp, } // Run the script. - _sharedexception exc; + _sharedexception exc = {NULL, NULL}; int result = _run_script(interp, codestr, shared, &exc); // Switch back. @@ -526,15 +517,20 @@ interp_create(PyObject *self, PyObject *args, PyObject *kwds) ? (_PyInterpreterConfig)_PyInterpreterConfig_INIT : (_PyInterpreterConfig)_PyInterpreterConfig_LEGACY_INIT; // XXX Possible GILState issues? - PyThreadState *tstate = _Py_NewInterpreterFromConfig(&config); + PyThreadState *tstate = NULL; + PyStatus status = _Py_NewInterpreterFromConfig(&tstate, &config); PyThreadState_Swap(save_tstate); - if (tstate == NULL) { + if (PyStatus_Exception(status)) { /* Since no new thread state was created, there is no exception to propagate; raise a fresh one after swapping in the old thread state. */ + _PyErr_SetFromPyStatus(status); + PyObject *exc = PyErr_GetRaisedException(); PyErr_SetString(PyExc_RuntimeError, "interpreter creation failed"); + _PyErr_ChainExceptions1(exc); return NULL; } + assert(tstate != NULL); PyInterpreterState *interp = PyThreadState_GetInterpreter(tstate); PyObject *idobj = _PyInterpreterState_GetIDObject(interp); if (idobj == NULL) { diff --git a/Modules/atexitmodule.c b/Modules/atexitmodule.c index a1c511e09d704e..47afd7f0751039 100644 --- a/Modules/atexitmodule.c +++ b/Modules/atexitmodule.c @@ -7,6 +7,7 @@ */ #include "Python.h" +#include "pycore_atexit.h" #include "pycore_initconfig.h" // _PyStatus_NO_MEMORY #include "pycore_interp.h" // PyInterpreterState.atexit #include "pycore_pystate.h" // _PyInterpreterState_GET @@ -22,10 +23,36 @@ get_atexit_state(void) } +int +_Py_AtExit(PyInterpreterState *interp, + atexit_datacallbackfunc func, void *data) +{ + assert(interp == _PyInterpreterState_GET()); + atexit_callback *callback = PyMem_Malloc(sizeof(atexit_callback)); + if (callback == NULL) { + PyErr_NoMemory(); + return -1; + } + callback->func = func; + callback->data = data; + callback->next = NULL; + + struct atexit_state *state = &interp->atexit; + if (state->ll_callbacks == NULL) { + state->ll_callbacks = callback; + state->last_ll_callback = callback; + } + else { + state->last_ll_callback->next = callback; + } + return 0; +} + + static void atexit_delete_cb(struct atexit_state *state, int i) { - atexit_callback *cb = state->callbacks[i]; + atexit_py_callback *cb = state->callbacks[i]; state->callbacks[i] = NULL; Py_DECREF(cb->func); @@ -39,7 +66,7 @@ atexit_delete_cb(struct atexit_state *state, int i) static void atexit_cleanup(struct atexit_state *state) { - atexit_callback *cb; + atexit_py_callback *cb; for (int i = 0; i < state->ncallbacks; i++) { cb = state->callbacks[i]; if (cb == NULL) @@ -60,7 +87,7 @@ _PyAtExit_Init(PyInterpreterState *interp) state->callback_len = 32; state->ncallbacks = 0; - state->callbacks = PyMem_New(atexit_callback*, state->callback_len); + state->callbacks = PyMem_New(atexit_py_callback*, state->callback_len); if (state->callbacks == NULL) { return _PyStatus_NO_MEMORY(); } @@ -75,6 +102,18 @@ _PyAtExit_Fini(PyInterpreterState *interp) atexit_cleanup(state); PyMem_Free(state->callbacks); state->callbacks = NULL; + + atexit_callback *next = state->ll_callbacks; + state->ll_callbacks = NULL; + while (next != NULL) { + atexit_callback *callback = next; + next = callback->next; + atexit_datacallbackfunc exitfunc = callback->func; + void *data = callback->data; + // It was allocated in _PyAtExit_AddCallback(). + PyMem_Free(callback); + exitfunc(data); + } } @@ -88,7 +127,7 @@ atexit_callfuncs(struct atexit_state *state) } for (int i = state->ncallbacks - 1; i >= 0; i--) { - atexit_callback *cb = state->callbacks[i]; + atexit_py_callback *cb = state->callbacks[i]; if (cb == NULL) { continue; } @@ -152,17 +191,17 @@ atexit_register(PyObject *module, PyObject *args, PyObject *kwargs) struct atexit_state *state = get_atexit_state(); if (state->ncallbacks >= state->callback_len) { - atexit_callback **r; + atexit_py_callback **r; state->callback_len += 16; - size_t size = sizeof(atexit_callback*) * (size_t)state->callback_len; - r = (atexit_callback**)PyMem_Realloc(state->callbacks, size); + size_t size = sizeof(atexit_py_callback*) * (size_t)state->callback_len; + r = (atexit_py_callback**)PyMem_Realloc(state->callbacks, size); if (r == NULL) { return PyErr_NoMemory(); } state->callbacks = r; } - atexit_callback *callback = PyMem_Malloc(sizeof(atexit_callback)); + atexit_py_callback *callback = PyMem_Malloc(sizeof(atexit_py_callback)); if (callback == NULL) { return PyErr_NoMemory(); } @@ -233,7 +272,7 @@ atexit_unregister(PyObject *module, PyObject *func) struct atexit_state *state = get_atexit_state(); for (int i = 0; i < state->ncallbacks; i++) { - atexit_callback *cb = state->callbacks[i]; + atexit_py_callback *cb = state->callbacks[i]; if (cb == NULL) { continue; } diff --git a/Modules/cjkcodecs/_codecs_cn.c b/Modules/cjkcodecs/_codecs_cn.c index 8a62f7e257c6b1..e2c7908c9bb275 100644 --- a/Modules/cjkcodecs/_codecs_cn.c +++ b/Modules/cjkcodecs/_codecs_cn.c @@ -453,14 +453,14 @@ DECODER(hz) } -BEGIN_MAPPINGS_LIST +BEGIN_MAPPINGS_LIST(4) MAPPING_DECONLY(gb2312) MAPPING_DECONLY(gbkext) MAPPING_ENCONLY(gbcommon) MAPPING_ENCDEC(gb18030ext) END_MAPPINGS_LIST -BEGIN_CODECS_LIST +BEGIN_CODECS_LIST(4) CODEC_STATELESS(gb2312) CODEC_STATELESS(gbk) CODEC_STATELESS(gb18030) diff --git a/Modules/cjkcodecs/_codecs_hk.c b/Modules/cjkcodecs/_codecs_hk.c index 4f21569a0ce73f..e7273bf18e3494 100644 --- a/Modules/cjkcodecs/_codecs_hk.c +++ b/Modules/cjkcodecs/_codecs_hk.c @@ -6,6 +6,10 @@ #define USING_IMPORTED_MAPS +#define CJK_MOD_SPECIFIC_STATE \ + const encode_map *big5_encmap; \ + const decode_map *big5_decmap; + #include "cjkcodecs.h" #include "mappings_hk.h" @@ -13,16 +17,12 @@ * BIG5HKSCS codec */ -static const encode_map *big5_encmap = NULL; -static const decode_map *big5_decmap = NULL; - CODEC_INIT(big5hkscs) { - static int initialized = 0; - - if (!initialized && IMPORT_MAP(tw, big5, &big5_encmap, &big5_decmap)) + cjkcodecs_module_state *st = codec->modstate; + if (IMPORT_MAP(tw, big5, &st->big5_encmap, &st->big5_decmap)) { return -1; - initialized = 1; + } return 0; } @@ -81,7 +81,7 @@ ENCODER(big5hkscs) } } } - else if (TRYMAP_ENC(big5, code, c)) + else if (TRYMAP_ENC_ST(big5, code, c)) ; else return 1; @@ -122,7 +122,7 @@ DECODER(big5hkscs) REQUIRE_INBUF(2); if (0xc6 > c || c > 0xc8 || (c < 0xc7 && INBYTE2 < 0xa1)) { - if (TRYMAP_DEC(big5, decoded, c, INBYTE2)) { + if (TRYMAP_DEC_ST(big5, decoded, c, INBYTE2)) { OUTCHAR(decoded); NEXT_IN(2); continue; @@ -177,14 +177,13 @@ DECODER(big5hkscs) return 0; } - -BEGIN_MAPPINGS_LIST +BEGIN_MAPPINGS_LIST(3) MAPPING_DECONLY(big5hkscs) MAPPING_ENCONLY(big5hkscs_bmp) MAPPING_ENCONLY(big5hkscs_nonbmp) END_MAPPINGS_LIST -BEGIN_CODECS_LIST +BEGIN_CODECS_LIST(1) CODEC_STATELESS_WINIT(big5hkscs) END_CODECS_LIST diff --git a/Modules/cjkcodecs/_codecs_iso2022.c b/Modules/cjkcodecs/_codecs_iso2022.c index 7394cf67e0e7dd..86bb73b982a551 100644 --- a/Modules/cjkcodecs/_codecs_iso2022.c +++ b/Modules/cjkcodecs/_codecs_iso2022.c @@ -10,6 +10,27 @@ #define EMULATE_JISX0213_2000_ENCODE_INVALID MAP_UNMAPPABLE #define EMULATE_JISX0213_2000_DECODE_INVALID MAP_UNMAPPABLE +#define CJK_MOD_SPECIFIC_STATE \ + /* kr */ \ + const encode_map *cp949_encmap; \ + const decode_map *ksx1001_decmap; \ + \ + /* jp */ \ + const encode_map *jisxcommon_encmap; \ + const decode_map *jisx0208_decmap; \ + const decode_map *jisx0212_decmap; \ + const encode_map *jisx0213_bmp_encmap; \ + const decode_map *jisx0213_1_bmp_decmap; \ + const decode_map *jisx0213_2_bmp_decmap; \ + const encode_map *jisx0213_emp_encmap; \ + const decode_map *jisx0213_1_emp_decmap; \ + const decode_map *jisx0213_2_emp_decmap; \ + \ + /* cn */ \ + const encode_map *gbcommon_encmap; \ + const decode_map *gb2312_decmap; + + #include "cjkcodecs.h" #include "alg_jisx0201.h" #include "emu_jisx0213_2000.h" @@ -90,7 +111,7 @@ #define STATE_CLEARFLAG(f) do { ((state)->c[4]) &= ~(f); } while (0) #define STATE_CLEARFLAGS() do { ((state)->c[4]) = 0; } while (0) -#define ISO2022_CONFIG ((const struct iso2022_config *)config) +#define ISO2022_CONFIG ((const struct iso2022_config *)(codec->config)) #define CONFIG_ISSET(flag) (ISO2022_CONFIG->flags & (flag)) #define CONFIG_DESIGNATIONS (ISO2022_CONFIG->designations) @@ -101,9 +122,12 @@ /*-*- internal data structures -*-*/ -typedef int (*iso2022_init_func)(void); -typedef Py_UCS4 (*iso2022_decode_func)(const unsigned char *data); -typedef DBCHAR (*iso2022_encode_func)(const Py_UCS4 *data, Py_ssize_t *length); +typedef int (*iso2022_init_func)(const MultibyteCodec *codec); +typedef Py_UCS4 (*iso2022_decode_func)(const MultibyteCodec *codec, + const unsigned char *data); +typedef DBCHAR (*iso2022_encode_func)(const MultibyteCodec *codec, + const Py_UCS4 *data, + Py_ssize_t *length); struct iso2022_designation { unsigned char mark; @@ -124,9 +148,11 @@ struct iso2022_config { CODEC_INIT(iso2022) { const struct iso2022_designation *desig; - for (desig = CONFIG_DESIGNATIONS; desig->mark; desig++) - if (desig->initializer != NULL && desig->initializer() != 0) + for (desig = CONFIG_DESIGNATIONS; desig->mark; desig++) { + if (desig->initializer != NULL && desig->initializer(codec) != 0) { return -1; + } + } return 0; } @@ -182,7 +208,7 @@ ENCODER(iso2022) encoded = MAP_UNMAPPABLE; for (dsg = CONFIG_DESIGNATIONS; dsg->mark; dsg++) { Py_ssize_t length = 1; - encoded = dsg->encoder(&c, &length); + encoded = dsg->encoder(codec, &c, &length); if (encoded == MAP_MULTIPLE_AVAIL) { /* this implementation won't work for pair * of non-bmp characters. */ @@ -193,7 +219,7 @@ ENCODER(iso2022) } else length = 2; - encoded = dsg->encoder(&c, &length); + encoded = dsg->encoder(codec, &c, &length); if (encoded != MAP_UNMAPPABLE) { insize = length; break; @@ -288,7 +314,7 @@ DECODER_RESET(iso2022) } static Py_ssize_t -iso2022processesc(const void *config, MultibyteCodec_State *state, +iso2022processesc(const MultibyteCodec *codec, MultibyteCodec_State *state, const unsigned char **inbuf, Py_ssize_t *inleft) { unsigned char charset, designation; @@ -388,7 +414,7 @@ iso2022processesc(const void *config, MultibyteCodec_State *state, } static Py_ssize_t -iso2022processg2(const void *config, MultibyteCodec_State *state, +iso2022processg2(const MultibyteCodec *codec, MultibyteCodec_State *state, const unsigned char **inbuf, Py_ssize_t *inleft, _PyUnicodeWriter *writer) { @@ -442,14 +468,14 @@ DECODER(iso2022) case ESC: REQUIRE_INBUF(2); if (IS_ISO2022ESC(INBYTE2)) { - err = iso2022processesc(config, state, + err = iso2022processesc(codec, state, inbuf, &inleft); if (err != 0) return err; } else if (CONFIG_ISSET(USE_G2) && INBYTE2 == 'N') {/* SS2 */ REQUIRE_INBUF(3); - err = iso2022processg2(config, state, + err = iso2022processg2(codec, state, inbuf, &inleft, writer); if (err != 0) return err; @@ -517,7 +543,7 @@ DECODER(iso2022) } REQUIRE_INBUF(dsg->width); - decoded = dsg->decoder(*inbuf); + decoded = dsg->decoder(codec, *inbuf); if (decoded == MAP_UNMAPPABLE) return dsg->width; @@ -538,64 +564,38 @@ DECODER(iso2022) return 0; } -/*-*- mapping table holders -*-*/ - -#define ENCMAP(enc) static const encode_map *enc##_encmap = NULL; -#define DECMAP(enc) static const decode_map *enc##_decmap = NULL; - -/* kr */ -ENCMAP(cp949) -DECMAP(ksx1001) - -/* jp */ -ENCMAP(jisxcommon) -DECMAP(jisx0208) -DECMAP(jisx0212) -ENCMAP(jisx0213_bmp) -DECMAP(jisx0213_1_bmp) -DECMAP(jisx0213_2_bmp) -ENCMAP(jisx0213_emp) -DECMAP(jisx0213_1_emp) -DECMAP(jisx0213_2_emp) - -/* cn */ -ENCMAP(gbcommon) -DECMAP(gb2312) - -/* tw */ - /*-*- mapping access functions -*-*/ static int -ksx1001_init(void) +ksx1001_init(const MultibyteCodec *codec) { - static int initialized = 0; - - if (!initialized && ( - IMPORT_MAP(kr, cp949, &cp949_encmap, NULL) || - IMPORT_MAP(kr, ksx1001, NULL, &ksx1001_decmap))) + cjkcodecs_module_state *st = codec->modstate; + if (IMPORT_MAP(kr, cp949, &st->cp949_encmap, NULL) || + IMPORT_MAP(kr, ksx1001, NULL, &st->ksx1001_decmap)) + { return -1; - initialized = 1; + } return 0; } static Py_UCS4 -ksx1001_decoder(const unsigned char *data) +ksx1001_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; - if (TRYMAP_DEC(ksx1001, u, data[0], data[1])) + if (TRYMAP_DEC_ST(ksx1001, u, data[0], data[1])) return u; else return MAP_UNMAPPABLE; } static DBCHAR -ksx1001_encoder(const Py_UCS4 *data, Py_ssize_t *length) +ksx1001_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { DBCHAR coded; assert(*length == 1); if (*data < 0x10000) { - if (TRYMAP_ENC(cp949, coded, *data)) { + if (TRYMAP_ENC_ST(cp949, coded, *data)) { if (!(coded & 0x8000)) return coded; } @@ -604,39 +604,39 @@ ksx1001_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static int -jisx0208_init(void) +jisx0208_init(const MultibyteCodec *codec) { - static int initialized = 0; - - if (!initialized && ( - IMPORT_MAP(jp, jisxcommon, &jisxcommon_encmap, NULL) || - IMPORT_MAP(jp, jisx0208, NULL, &jisx0208_decmap))) + cjkcodecs_module_state *st = codec->modstate; + if (IMPORT_MAP(jp, jisxcommon, &st->jisxcommon_encmap, NULL) || + IMPORT_MAP(jp, jisx0208, NULL, &st->jisx0208_decmap)) + { return -1; - initialized = 1; + } return 0; } static Py_UCS4 -jisx0208_decoder(const unsigned char *data) +jisx0208_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; if (data[0] == 0x21 && data[1] == 0x40) /* F/W REVERSE SOLIDUS */ return 0xff3c; - else if (TRYMAP_DEC(jisx0208, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0208, u, data[0], data[1])) return u; else return MAP_UNMAPPABLE; } static DBCHAR -jisx0208_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0208_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { DBCHAR coded; assert(*length == 1); if (*data < 0x10000) { if (*data == 0xff3c) /* F/W REVERSE SOLIDUS */ return 0x2140; - else if (TRYMAP_ENC(jisxcommon, coded, *data)) { + else if (TRYMAP_ENC_ST(jisxcommon, coded, *data)) { if (!(coded & 0x8000)) return coded; } @@ -645,35 +645,35 @@ jisx0208_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static int -jisx0212_init(void) +jisx0212_init(const MultibyteCodec *codec) { - static int initialized = 0; - - if (!initialized && ( - IMPORT_MAP(jp, jisxcommon, &jisxcommon_encmap, NULL) || - IMPORT_MAP(jp, jisx0212, NULL, &jisx0212_decmap))) + cjkcodecs_module_state *st = codec->modstate; + if (IMPORT_MAP(jp, jisxcommon, &st->jisxcommon_encmap, NULL) || + IMPORT_MAP(jp, jisx0212, NULL, &st->jisx0212_decmap)) + { return -1; - initialized = 1; + } return 0; } static Py_UCS4 -jisx0212_decoder(const unsigned char *data) +jisx0212_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; - if (TRYMAP_DEC(jisx0212, u, data[0], data[1])) + if (TRYMAP_DEC_ST(jisx0212, u, data[0], data[1])) return u; else return MAP_UNMAPPABLE; } static DBCHAR -jisx0212_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0212_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { DBCHAR coded; assert(*length == 1); if (*data < 0x10000) { - if (TRYMAP_ENC(jisxcommon, coded, *data)) { + if (TRYMAP_ENC_ST(jisxcommon, coded, *data)) { if (coded & 0x8000) return coded & 0x7fff; } @@ -682,44 +682,37 @@ jisx0212_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static int -jisx0213_init(void) +jisx0213_init(const MultibyteCodec *codec) { - static int initialized = 0; - - if (!initialized && ( - jisx0208_init() || - IMPORT_MAP(jp, jisx0213_bmp, - &jisx0213_bmp_encmap, NULL) || - IMPORT_MAP(jp, jisx0213_1_bmp, - NULL, &jisx0213_1_bmp_decmap) || - IMPORT_MAP(jp, jisx0213_2_bmp, - NULL, &jisx0213_2_bmp_decmap) || - IMPORT_MAP(jp, jisx0213_emp, - &jisx0213_emp_encmap, NULL) || - IMPORT_MAP(jp, jisx0213_1_emp, - NULL, &jisx0213_1_emp_decmap) || - IMPORT_MAP(jp, jisx0213_2_emp, - NULL, &jisx0213_2_emp_decmap) || - IMPORT_MAP(jp, jisx0213_pair, &jisx0213_pair_encmap, - &jisx0213_pair_decmap))) + cjkcodecs_module_state *st = codec->modstate; + if (jisx0208_init(codec) || + IMPORT_MAP(jp, jisx0213_bmp, &st->jisx0213_bmp_encmap, NULL) || + IMPORT_MAP(jp, jisx0213_1_bmp, NULL, &st->jisx0213_1_bmp_decmap) || + IMPORT_MAP(jp, jisx0213_2_bmp, NULL, &st->jisx0213_2_bmp_decmap) || + IMPORT_MAP(jp, jisx0213_emp, &st->jisx0213_emp_encmap, NULL) || + IMPORT_MAP(jp, jisx0213_1_emp, NULL, &st->jisx0213_1_emp_decmap) || + IMPORT_MAP(jp, jisx0213_2_emp, NULL, &st->jisx0213_2_emp_decmap) || + IMPORT_MAP(jp, jisx0213_pair, + &jisx0213_pair_encmap, &jisx0213_pair_decmap)) + { return -1; - initialized = 1; + } return 0; } #define config ((void *)2000) static Py_UCS4 -jisx0213_2000_1_decoder(const unsigned char *data) +jisx0213_2000_1_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; - EMULATE_JISX0213_2000_DECODE_PLANE1(u, data[0], data[1]) + EMULATE_JISX0213_2000_DECODE_PLANE1(config, u, data[0], data[1]) else if (data[0] == 0x21 && data[1] == 0x40) /* F/W REVERSE SOLIDUS */ return 0xff3c; - else if (TRYMAP_DEC(jisx0208, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0208, u, data[0], data[1])) ; - else if (TRYMAP_DEC(jisx0213_1_bmp, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0213_1_bmp, u, data[0], data[1])) ; - else if (TRYMAP_DEC(jisx0213_1_emp, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0213_1_emp, u, data[0], data[1])) u |= 0x20000; else if (TRYMAP_DEC(jisx0213_pair, u, data[0], data[1])) ; @@ -729,13 +722,13 @@ jisx0213_2000_1_decoder(const unsigned char *data) } static Py_UCS4 -jisx0213_2000_2_decoder(const unsigned char *data) +jisx0213_2000_2_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; - EMULATE_JISX0213_2000_DECODE_PLANE2_CHAR(u, data[0], data[1]) - if (TRYMAP_DEC(jisx0213_2_bmp, u, data[0], data[1])) + EMULATE_JISX0213_2000_DECODE_PLANE2_CHAR(config, u, data[0], data[1]) + if (TRYMAP_DEC_ST(jisx0213_2_bmp, u, data[0], data[1])) ; - else if (TRYMAP_DEC(jisx0213_2_emp, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0213_2_emp, u, data[0], data[1])) u |= 0x20000; else return MAP_UNMAPPABLE; @@ -744,16 +737,16 @@ jisx0213_2000_2_decoder(const unsigned char *data) #undef config static Py_UCS4 -jisx0213_2004_1_decoder(const unsigned char *data) +jisx0213_2004_1_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; if (data[0] == 0x21 && data[1] == 0x40) /* F/W REVERSE SOLIDUS */ return 0xff3c; - else if (TRYMAP_DEC(jisx0208, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0208, u, data[0], data[1])) ; - else if (TRYMAP_DEC(jisx0213_1_bmp, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0213_1_bmp, u, data[0], data[1])) ; - else if (TRYMAP_DEC(jisx0213_1_emp, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0213_1_emp, u, data[0], data[1])) u |= 0x20000; else if (TRYMAP_DEC(jisx0213_pair, u, data[0], data[1])) ; @@ -763,12 +756,12 @@ jisx0213_2004_1_decoder(const unsigned char *data) } static Py_UCS4 -jisx0213_2004_2_decoder(const unsigned char *data) +jisx0213_2004_2_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; - if (TRYMAP_DEC(jisx0213_2_bmp, u, data[0], data[1])) + if (TRYMAP_DEC_ST(jisx0213_2_bmp, u, data[0], data[1])) ; - else if (TRYMAP_DEC(jisx0213_2_emp, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0213_2_emp, u, data[0], data[1])) u |= 0x20000; else return MAP_UNMAPPABLE; @@ -776,7 +769,8 @@ jisx0213_2004_2_decoder(const unsigned char *data) } static DBCHAR -jisx0213_encoder(const Py_UCS4 *data, Py_ssize_t *length, void *config) +jisx0213_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length, const void *config) { DBCHAR coded; @@ -784,19 +778,19 @@ jisx0213_encoder(const Py_UCS4 *data, Py_ssize_t *length, void *config) case 1: /* first character */ if (*data >= 0x10000) { if ((*data) >> 16 == 0x20000 >> 16) { - EMULATE_JISX0213_2000_ENCODE_EMP(coded, *data) - else if (TRYMAP_ENC(jisx0213_emp, coded, (*data) & 0xffff)) + EMULATE_JISX0213_2000_ENCODE_EMP(config, coded, *data) + else if (TRYMAP_ENC_ST(jisx0213_emp, coded, (*data) & 0xffff)) return coded; } return MAP_UNMAPPABLE; } - EMULATE_JISX0213_2000_ENCODE_BMP(coded, *data) - else if (TRYMAP_ENC(jisx0213_bmp, coded, *data)) { + EMULATE_JISX0213_2000_ENCODE_BMP(config, coded, *data) + else if (TRYMAP_ENC_ST(jisx0213_bmp, coded, *data)) { if (coded == MULTIC) return MAP_MULTIPLE_AVAIL; } - else if (TRYMAP_ENC(jisxcommon, coded, *data)) { + else if (TRYMAP_ENC_ST(jisxcommon, coded, *data)) { if (coded & 0x8000) return MAP_UNMAPPABLE; } @@ -827,9 +821,10 @@ jisx0213_encoder(const Py_UCS4 *data, Py_ssize_t *length, void *config) } static DBCHAR -jisx0213_2000_1_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0213_2000_1_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { - DBCHAR coded = jisx0213_encoder(data, length, (void *)2000); + DBCHAR coded = jisx0213_encoder(codec, data, length, (void *)2000); if (coded == MAP_UNMAPPABLE || coded == MAP_MULTIPLE_AVAIL) return coded; else if (coded & 0x8000) @@ -839,12 +834,13 @@ jisx0213_2000_1_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static DBCHAR -jisx0213_2000_1_encoder_paironly(const Py_UCS4 *data, Py_ssize_t *length) +jisx0213_2000_1_encoder_paironly(const MultibyteCodec *codec, + const Py_UCS4 *data, Py_ssize_t *length) { DBCHAR coded; Py_ssize_t ilength = *length; - coded = jisx0213_encoder(data, length, (void *)2000); + coded = jisx0213_encoder(codec, data, length, (void *)2000); switch (ilength) { case 1: if (coded == MAP_MULTIPLE_AVAIL) @@ -862,9 +858,10 @@ jisx0213_2000_1_encoder_paironly(const Py_UCS4 *data, Py_ssize_t *length) } static DBCHAR -jisx0213_2000_2_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0213_2000_2_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { - DBCHAR coded = jisx0213_encoder(data, length, (void *)2000); + DBCHAR coded = jisx0213_encoder(codec, data, length, (void *)2000); if (coded == MAP_UNMAPPABLE || coded == MAP_MULTIPLE_AVAIL) return coded; else if (coded & 0x8000) @@ -874,9 +871,10 @@ jisx0213_2000_2_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static DBCHAR -jisx0213_2004_1_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0213_2004_1_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { - DBCHAR coded = jisx0213_encoder(data, length, NULL); + DBCHAR coded = jisx0213_encoder(codec, data, length, NULL); if (coded == MAP_UNMAPPABLE || coded == MAP_MULTIPLE_AVAIL) return coded; else if (coded & 0x8000) @@ -886,12 +884,13 @@ jisx0213_2004_1_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static DBCHAR -jisx0213_2004_1_encoder_paironly(const Py_UCS4 *data, Py_ssize_t *length) +jisx0213_2004_1_encoder_paironly(const MultibyteCodec *codec, + const Py_UCS4 *data, Py_ssize_t *length) { DBCHAR coded; Py_ssize_t ilength = *length; - coded = jisx0213_encoder(data, length, NULL); + coded = jisx0213_encoder(codec, data, length, NULL); switch (ilength) { case 1: if (coded == MAP_MULTIPLE_AVAIL) @@ -909,9 +908,10 @@ jisx0213_2004_1_encoder_paironly(const Py_UCS4 *data, Py_ssize_t *length) } static DBCHAR -jisx0213_2004_2_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0213_2004_2_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { - DBCHAR coded = jisx0213_encoder(data, length, NULL); + DBCHAR coded = jisx0213_encoder(codec, data, length, NULL); if (coded == MAP_UNMAPPABLE || coded == MAP_MULTIPLE_AVAIL) return coded; else if (coded & 0x8000) @@ -921,7 +921,7 @@ jisx0213_2004_2_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static Py_UCS4 -jisx0201_r_decoder(const unsigned char *data) +jisx0201_r_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; JISX0201_R_DECODE_CHAR(*data, u) @@ -931,7 +931,8 @@ jisx0201_r_decoder(const unsigned char *data) } static DBCHAR -jisx0201_r_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0201_r_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { DBCHAR coded; JISX0201_R_ENCODE(*data, coded) @@ -941,7 +942,7 @@ jisx0201_r_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static Py_UCS4 -jisx0201_k_decoder(const unsigned char *data) +jisx0201_k_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; JISX0201_K_DECODE_CHAR(*data ^ 0x80, u) @@ -951,7 +952,8 @@ jisx0201_k_decoder(const unsigned char *data) } static DBCHAR -jisx0201_k_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0201_k_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { DBCHAR coded; JISX0201_K_ENCODE(*data, coded) @@ -961,35 +963,35 @@ jisx0201_k_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static int -gb2312_init(void) +gb2312_init(const MultibyteCodec *codec) { - static int initialized = 0; - - if (!initialized && ( - IMPORT_MAP(cn, gbcommon, &gbcommon_encmap, NULL) || - IMPORT_MAP(cn, gb2312, NULL, &gb2312_decmap))) + cjkcodecs_module_state *st = codec->modstate; + if (IMPORT_MAP(cn, gbcommon, &st->gbcommon_encmap, NULL) || + IMPORT_MAP(cn, gb2312, NULL, &st->gb2312_decmap)) + { return -1; - initialized = 1; + } return 0; } static Py_UCS4 -gb2312_decoder(const unsigned char *data) +gb2312_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; - if (TRYMAP_DEC(gb2312, u, data[0], data[1])) + if (TRYMAP_DEC_ST(gb2312, u, data[0], data[1])) return u; else return MAP_UNMAPPABLE; } static DBCHAR -gb2312_encoder(const Py_UCS4 *data, Py_ssize_t *length) +gb2312_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { DBCHAR coded; assert(*length == 1); if (*data < 0x10000) { - if (TRYMAP_ENC(gbcommon, coded, *data)) { + if (TRYMAP_ENC_ST(gbcommon, coded, *data)) { if (!(coded & 0x8000)) return coded; } @@ -999,13 +1001,14 @@ gb2312_encoder(const Py_UCS4 *data, Py_ssize_t *length) static Py_UCS4 -dummy_decoder(const unsigned char *data) +dummy_decoder(const MultibyteCodec *codec, const unsigned char *data) { return MAP_UNMAPPABLE; } static DBCHAR -dummy_encoder(const Py_UCS4 *data, Py_ssize_t *length) +dummy_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { return MAP_UNMAPPABLE; } @@ -1119,18 +1122,19 @@ static const struct iso2022_designation iso2022_jp_ext_designations[] = { CONFIGDEF(jp_ext, NO_SHIFT | USE_JISX0208_EXT) -BEGIN_MAPPINGS_LIST +BEGIN_MAPPINGS_LIST(0) /* no mapping table here */ END_MAPPINGS_LIST -#define ISO2022_CODEC(variation) { \ +#define ISO2022_CODEC(variation) \ +NEXT_CODEC = (MultibyteCodec){ \ "iso2022_" #variation, \ &iso2022_##variation##_config, \ iso2022_codec_init, \ _STATEFUL_METHODS(iso2022) \ -}, +}; -BEGIN_CODECS_LIST +BEGIN_CODECS_LIST(7) ISO2022_CODEC(kr) ISO2022_CODEC(jp) ISO2022_CODEC(jp_1) diff --git a/Modules/cjkcodecs/_codecs_jp.c b/Modules/cjkcodecs/_codecs_jp.c index 3a332953b957cb..f7127487aa5f59 100644 --- a/Modules/cjkcodecs/_codecs_jp.c +++ b/Modules/cjkcodecs/_codecs_jp.c @@ -164,7 +164,7 @@ ENCODER(euc_jis_2004) insize = 1; if (c <= 0xFFFF) { - EMULATE_JISX0213_2000_ENCODE_BMP(code, c) + EMULATE_JISX0213_2000_ENCODE_BMP(codec->config, code, c) else if (TRYMAP_ENC(jisx0213_bmp, code, c)) { if (code == MULTIC) { if (inlen - *inpos < 2) { @@ -215,7 +215,7 @@ ENCODER(euc_jis_2004) return 1; } else if (c >> 16 == EMPBASE >> 16) { - EMULATE_JISX0213_2000_ENCODE_EMP(code, c) + EMULATE_JISX0213_2000_ENCODE_EMP(codec->config, code, c) else if (TRYMAP_ENC(jisx0213_emp, code, c & 0xffff)) ; else @@ -271,7 +271,7 @@ DECODER(euc_jis_2004) c3 = INBYTE3 ^ 0x80; /* JIS X 0213 Plane 2 or JIS X 0212 (see NOTES) */ - EMULATE_JISX0213_2000_DECODE_PLANE2(writer, c2, c3) + EMULATE_JISX0213_2000_DECODE_PLANE2(codec->config, writer, c2, c3) else if (TRYMAP_DEC(jisx0213_2_bmp, decoded, c2, c3)) OUTCHAR(decoded); else if (TRYMAP_DEC(jisx0213_2_emp, code, c2, c3)) { @@ -293,7 +293,7 @@ DECODER(euc_jis_2004) c2 = INBYTE2 ^ 0x80; /* JIS X 0213 Plane 1 */ - EMULATE_JISX0213_2000_DECODE_PLANE1(writer, c, c2) + EMULATE_JISX0213_2000_DECODE_PLANE1(codec->config, writer, c, c2) else if (c == 0x21 && c2 == 0x40) OUTCHAR(0xff3c); else if (c == 0x22 && c2 == 0x32) @@ -582,7 +582,7 @@ ENCODER(shift_jis_2004) if (code == NOCHAR) { if (c <= 0xffff) { - EMULATE_JISX0213_2000_ENCODE_BMP(code, c) + EMULATE_JISX0213_2000_ENCODE_BMP(codec->config, code, c) else if (TRYMAP_ENC(jisx0213_bmp, code, c)) { if (code == MULTIC) { if (inlen - *inpos < 2) { @@ -625,7 +625,7 @@ ENCODER(shift_jis_2004) return 1; } else if (c >> 16 == EMPBASE >> 16) { - EMULATE_JISX0213_2000_ENCODE_EMP(code, c) + EMULATE_JISX0213_2000_ENCODE_EMP(codec->config, code, c) else if (TRYMAP_ENC(jisx0213_emp, code, c&0xffff)) ; else @@ -686,7 +686,7 @@ DECODER(shift_jis_2004) if (c1 < 0x5e) { /* Plane 1 */ c1 += 0x21; - EMULATE_JISX0213_2000_DECODE_PLANE1(writer, + EMULATE_JISX0213_2000_DECODE_PLANE1(codec->config, writer, c1, c2) else if (TRYMAP_DEC(jisx0208, decoded, c1, c2)) OUTCHAR(decoded); @@ -708,7 +708,7 @@ DECODER(shift_jis_2004) else c1 -= 0x3d; - EMULATE_JISX0213_2000_DECODE_PLANE2(writer, + EMULATE_JISX0213_2000_DECODE_PLANE2(codec->config, writer, c1, c2) else if (TRYMAP_DEC(jisx0213_2_bmp, decoded, c1, c2)) OUTCHAR(decoded); @@ -733,7 +733,7 @@ DECODER(shift_jis_2004) } -BEGIN_MAPPINGS_LIST +BEGIN_MAPPINGS_LIST(11) MAPPING_DECONLY(jisx0208) MAPPING_DECONLY(jisx0212) MAPPING_ENCONLY(jisxcommon) @@ -747,14 +747,19 @@ BEGIN_MAPPINGS_LIST MAPPING_ENCDEC(cp932ext) END_MAPPINGS_LIST -BEGIN_CODECS_LIST +#define CODEC_CUSTOM(NAME, N, METH) \ + NEXT_CODEC = (MultibyteCodec){NAME, (void *)N, NULL, _STATELESS_METHODS(METH)}; + +BEGIN_CODECS_LIST(7) CODEC_STATELESS(shift_jis) CODEC_STATELESS(cp932) CODEC_STATELESS(euc_jp) CODEC_STATELESS(shift_jis_2004) CODEC_STATELESS(euc_jis_2004) - { "euc_jisx0213", (void *)2000, NULL, _STATELESS_METHODS(euc_jis_2004) }, - { "shift_jisx0213", (void *)2000, NULL, _STATELESS_METHODS(shift_jis_2004) }, + CODEC_CUSTOM("euc_jisx0213", 2000, euc_jis_2004) + CODEC_CUSTOM("shift_jisx0213", 2000, shift_jis_2004) END_CODECS_LIST +#undef CODEC_CUSTOM + I_AM_A_MODULE_FOR(jp) diff --git a/Modules/cjkcodecs/_codecs_kr.c b/Modules/cjkcodecs/_codecs_kr.c index 72641e495af0b0..fd9a9fd92db1fd 100644 --- a/Modules/cjkcodecs/_codecs_kr.c +++ b/Modules/cjkcodecs/_codecs_kr.c @@ -453,13 +453,13 @@ DECODER(johab) #undef FILL -BEGIN_MAPPINGS_LIST +BEGIN_MAPPINGS_LIST(3) MAPPING_DECONLY(ksx1001) MAPPING_ENCONLY(cp949) MAPPING_DECONLY(cp949ext) END_MAPPINGS_LIST -BEGIN_CODECS_LIST +BEGIN_CODECS_LIST(3) CODEC_STATELESS(euc_kr) CODEC_STATELESS(cp949) CODEC_STATELESS(johab) diff --git a/Modules/cjkcodecs/_codecs_tw.c b/Modules/cjkcodecs/_codecs_tw.c index 722b26b128a708..3e440991414434 100644 --- a/Modules/cjkcodecs/_codecs_tw.c +++ b/Modules/cjkcodecs/_codecs_tw.c @@ -130,12 +130,12 @@ DECODER(cp950) -BEGIN_MAPPINGS_LIST +BEGIN_MAPPINGS_LIST(2) MAPPING_ENCDEC(big5) MAPPING_ENCDEC(cp950ext) END_MAPPINGS_LIST -BEGIN_CODECS_LIST +BEGIN_CODECS_LIST(2) CODEC_STATELESS(big5) CODEC_STATELESS(cp950) END_CODECS_LIST diff --git a/Modules/cjkcodecs/cjkcodecs.h b/Modules/cjkcodecs/cjkcodecs.h index d9aeec2ff40b08..e553ff3e17b898 100644 --- a/Modules/cjkcodecs/cjkcodecs.h +++ b/Modules/cjkcodecs/cjkcodecs.h @@ -60,37 +60,55 @@ struct pair_encodemap { DBCHAR code; }; -static const MultibyteCodec *codec_list; -static const struct dbcs_map *mapping_list; +#ifndef CJK_MOD_SPECIFIC_STATE +#define CJK_MOD_SPECIFIC_STATE +#endif + +typedef struct _cjk_mod_state { + int num_mappings; + int num_codecs; + struct dbcs_map *mapping_list; + MultibyteCodec *codec_list; + + CJK_MOD_SPECIFIC_STATE +} cjkcodecs_module_state; + +static inline cjkcodecs_module_state * +get_module_state(PyObject *mod) +{ + void *state = PyModule_GetState(mod); + assert(state != NULL); + return (cjkcodecs_module_state *)state; +} #define CODEC_INIT(encoding) \ - static int encoding##_codec_init(const void *config) + static int encoding##_codec_init(const MultibyteCodec *codec) #define ENCODER_INIT(encoding) \ static int encoding##_encode_init( \ - MultibyteCodec_State *state, const void *config) + MultibyteCodec_State *state, const MultibyteCodec *codec) #define ENCODER(encoding) \ static Py_ssize_t encoding##_encode( \ - MultibyteCodec_State *state, const void *config, \ + MultibyteCodec_State *state, const MultibyteCodec *codec, \ int kind, const void *data, \ Py_ssize_t *inpos, Py_ssize_t inlen, \ unsigned char **outbuf, Py_ssize_t outleft, int flags) #define ENCODER_RESET(encoding) \ static Py_ssize_t encoding##_encode_reset( \ - MultibyteCodec_State *state, const void *config, \ + MultibyteCodec_State *state, const MultibyteCodec *codec, \ unsigned char **outbuf, Py_ssize_t outleft) #define DECODER_INIT(encoding) \ static int encoding##_decode_init( \ - MultibyteCodec_State *state, const void *config) + MultibyteCodec_State *state, const MultibyteCodec *codec) #define DECODER(encoding) \ static Py_ssize_t encoding##_decode( \ - MultibyteCodec_State *state, const void *config, \ + MultibyteCodec_State *state, const MultibyteCodec *codec, \ const unsigned char **inbuf, Py_ssize_t inleft, \ _PyUnicodeWriter *writer) #define DECODER_RESET(encoding) \ static Py_ssize_t encoding##_decode_reset( \ - MultibyteCodec_State *state, const void *config) + MultibyteCodec_State *state, const MultibyteCodec *codec) #define NEXT_IN(i) \ do { \ @@ -193,6 +211,9 @@ static const struct dbcs_map *mapping_list; (m)->bottom]) != NOCHAR) #define TRYMAP_ENC(charset, assi, uni) \ _TRYMAP_ENC(&charset##_encmap[(uni) >> 8], assi, (uni) & 0xff) +#define TRYMAP_ENC_ST(charset, assi, uni) \ + _TRYMAP_ENC(&(codec->modstate->charset##_encmap)[(uni) >> 8], \ + assi, (uni) & 0xff) #define _TRYMAP_DEC(m, assi, val) \ ((m)->map != NULL && \ @@ -201,17 +222,45 @@ static const struct dbcs_map *mapping_list; ((assi) = (m)->map[(val) - (m)->bottom]) != UNIINV) #define TRYMAP_DEC(charset, assi, c1, c2) \ _TRYMAP_DEC(&charset##_decmap[c1], assi, c2) +#define TRYMAP_DEC_ST(charset, assi, c1, c2) \ + _TRYMAP_DEC(&(codec->modstate->charset##_decmap)[c1], assi, c2) + +#define BEGIN_MAPPINGS_LIST(NUM) \ +static int \ +add_mappings(cjkcodecs_module_state *st) \ +{ \ + int idx = 0; \ + (void)idx; \ + st->num_mappings = NUM; \ + st->mapping_list = PyMem_Calloc(NUM, sizeof(struct dbcs_map)); \ + if (st->mapping_list == NULL) { \ + return -1; \ + } -#define BEGIN_MAPPINGS_LIST static const struct dbcs_map _mapping_list[] = { -#define MAPPING_ENCONLY(enc) {#enc, (void*)enc##_encmap, NULL}, -#define MAPPING_DECONLY(enc) {#enc, NULL, (void*)enc##_decmap}, -#define MAPPING_ENCDEC(enc) {#enc, (void*)enc##_encmap, (void*)enc##_decmap}, -#define END_MAPPINGS_LIST \ - {"", NULL, NULL} }; \ - static const struct dbcs_map *mapping_list = \ - (const struct dbcs_map *)_mapping_list; +#define MAPPING_ENCONLY(enc) \ + st->mapping_list[idx++] = (struct dbcs_map){#enc, (void*)enc##_encmap, NULL}; +#define MAPPING_DECONLY(enc) \ + st->mapping_list[idx++] = (struct dbcs_map){#enc, NULL, (void*)enc##_decmap}; +#define MAPPING_ENCDEC(enc) \ + st->mapping_list[idx++] = (struct dbcs_map){#enc, (void*)enc##_encmap, (void*)enc##_decmap}; + +#define END_MAPPINGS_LIST \ + assert(st->num_mappings == idx); \ + return 0; \ +} + +#define BEGIN_CODECS_LIST(NUM) \ +static int \ +add_codecs(cjkcodecs_module_state *st) \ +{ \ + int idx = 0; \ + (void)idx; \ + st->num_codecs = NUM; \ + st->codec_list = PyMem_Calloc(NUM, sizeof(MultibyteCodec)); \ + if (st->codec_list == NULL) { \ + return -1; \ + } -#define BEGIN_CODECS_LIST static const MultibyteCodec _codec_list[] = { #define _STATEFUL_METHODS(enc) \ enc##_encode, \ enc##_encode_init, \ @@ -222,23 +271,24 @@ static const struct dbcs_map *mapping_list; #define _STATELESS_METHODS(enc) \ enc##_encode, NULL, NULL, \ enc##_decode, NULL, NULL, -#define CODEC_STATEFUL(enc) { \ - #enc, NULL, NULL, \ - _STATEFUL_METHODS(enc) \ -}, -#define CODEC_STATELESS(enc) { \ - #enc, NULL, NULL, \ - _STATELESS_METHODS(enc) \ -}, -#define CODEC_STATELESS_WINIT(enc) { \ - #enc, NULL, \ - enc##_codec_init, \ - _STATELESS_METHODS(enc) \ -}, -#define END_CODECS_LIST \ - {"", NULL,} }; \ - static const MultibyteCodec *codec_list = \ - (const MultibyteCodec *)_codec_list; + +#define NEXT_CODEC \ + st->codec_list[idx++] + +#define CODEC_STATEFUL(enc) \ + NEXT_CODEC = (MultibyteCodec){#enc, NULL, NULL, _STATEFUL_METHODS(enc)}; +#define CODEC_STATELESS(enc) \ + NEXT_CODEC = (MultibyteCodec){#enc, NULL, NULL, _STATELESS_METHODS(enc)}; +#define CODEC_STATELESS_WINIT(enc) \ + NEXT_CODEC = (MultibyteCodec){#enc, NULL, enc##_codec_init, _STATELESS_METHODS(enc)}; + +#define END_CODECS_LIST \ + assert(st->num_codecs == idx); \ + for (int i = 0; i < st->num_codecs; i++) { \ + st->codec_list[i].modstate = st; \ + } \ + return 0; \ +} @@ -248,59 +298,102 @@ getmultibytecodec(void) return _PyImport_GetModuleAttrString("_multibytecodec", "__create_codec"); } -static PyObject * -getcodec(PyObject *self, PyObject *encoding) +static void +destroy_codec_capsule(PyObject *capsule) { - PyObject *codecobj, *r, *cofunc; - const MultibyteCodec *codec; - const char *enc; + void *ptr = PyCapsule_GetPointer(capsule, CODEC_CAPSULE); + codec_capsule *data = (codec_capsule *)ptr; + Py_DECREF(data->cjk_module); + PyMem_Free(ptr); +} - if (!PyUnicode_Check(encoding)) { - PyErr_SetString(PyExc_TypeError, - "encoding name must be a string."); +static codec_capsule * +capsulate_codec(PyObject *mod, const MultibyteCodec *codec) +{ + codec_capsule *data = PyMem_Malloc(sizeof(codec_capsule)); + if (data == NULL) { + PyErr_NoMemory(); return NULL; } - enc = PyUnicode_AsUTF8(encoding); - if (enc == NULL) - return NULL; + data->codec = codec; + data->cjk_module = Py_NewRef(mod); + return data; +} - cofunc = getmultibytecodec(); - if (cofunc == NULL) +static PyObject * +_getcodec(PyObject *self, const MultibyteCodec *codec) +{ + PyObject *cofunc = getmultibytecodec(); + if (cofunc == NULL) { return NULL; + } - for (codec = codec_list; codec->encoding[0]; codec++) - if (strcmp(codec->encoding, enc) == 0) - break; - - if (codec->encoding[0] == '\0') { - PyErr_SetString(PyExc_LookupError, - "no such codec is supported."); + codec_capsule *data = capsulate_codec(self, codec); + if (data == NULL) { + Py_DECREF(cofunc); return NULL; } - - codecobj = PyCapsule_New((void *)codec, PyMultibyteCodec_CAPSULE_NAME, NULL); - if (codecobj == NULL) + PyObject *codecobj = PyCapsule_New(data, CODEC_CAPSULE, + destroy_codec_capsule); + if (codecobj == NULL) { + PyMem_Free(data); + Py_DECREF(cofunc); return NULL; + } - r = PyObject_CallOneArg(cofunc, codecobj); + PyObject *res = PyObject_CallOneArg(cofunc, codecobj); Py_DECREF(codecobj); Py_DECREF(cofunc); + return res; +} + +static PyObject * +getcodec(PyObject *self, PyObject *encoding) +{ + if (!PyUnicode_Check(encoding)) { + PyErr_SetString(PyExc_TypeError, + "encoding name must be a string."); + return NULL; + } + const char *enc = PyUnicode_AsUTF8(encoding); + if (enc == NULL) { + return NULL; + } + + cjkcodecs_module_state *st = get_module_state(self); + for (int i = 0; i < st->num_codecs; i++) { + const MultibyteCodec *codec = &st->codec_list[i]; + if (strcmp(codec->encoding, enc) == 0) { + return _getcodec(self, codec); + } + } - return r; + PyErr_SetString(PyExc_LookupError, + "no such codec is supported."); + return NULL; } +static int add_mappings(cjkcodecs_module_state *); +static int add_codecs(cjkcodecs_module_state *); static int register_maps(PyObject *module) { - const struct dbcs_map *h; + // Init module state. + cjkcodecs_module_state *st = get_module_state(module); + if (add_mappings(st) < 0) { + return -1; + } + if (add_codecs(st) < 0) { + return -1; + } - for (h = mapping_list; h->charset[0] != '\0'; h++) { + for (int i = 0; i < st->num_mappings; i++) { + const struct dbcs_map *h = &st->mapping_list[i]; char mhname[256] = "__map_"; strcpy(mhname + sizeof("__map_") - 1, h->charset); - PyObject *capsule = PyCapsule_New((void *)h, - PyMultibyteCodec_CAPSULE_NAME, NULL); + PyObject *capsule = PyCapsule_New((void *)h, MAP_CAPSULE, NULL); if (capsule == NULL) { return -1; } @@ -364,14 +457,14 @@ importmap(const char *modname, const char *symbol, o = PyObject_GetAttrString(mod, symbol); if (o == NULL) goto errorexit; - else if (!PyCapsule_IsValid(o, PyMultibyteCodec_CAPSULE_NAME)) { + else if (!PyCapsule_IsValid(o, MAP_CAPSULE)) { PyErr_SetString(PyExc_ValueError, "map data must be a Capsule."); goto errorexit; } else { struct dbcs_map *map; - map = PyCapsule_GetPointer(o, PyMultibyteCodec_CAPSULE_NAME); + map = PyCapsule_GetPointer(o, MAP_CAPSULE); if (encmap != NULL) *encmap = map->encmap; if (decmap != NULL) @@ -394,6 +487,13 @@ _cjk_exec(PyObject *module) return register_maps(module); } +static void +_cjk_free(void *mod) +{ + cjkcodecs_module_state *st = get_module_state((PyObject *)mod); + PyMem_Free(st->mapping_list); + PyMem_Free(st->codec_list); +} static struct PyMethodDef _cjk_methods[] = { {"getcodec", (PyCFunction)getcodec, METH_O, ""}, @@ -409,9 +509,10 @@ static PyModuleDef_Slot _cjk_slots[] = { static struct PyModuleDef _cjk_module = { \ PyModuleDef_HEAD_INIT, \ .m_name = "_codecs_"#loc, \ - .m_size = 0, \ + .m_size = sizeof(cjkcodecs_module_state), \ .m_methods = _cjk_methods, \ .m_slots = _cjk_slots, \ + .m_free = _cjk_free, \ }; \ \ PyMODINIT_FUNC \ diff --git a/Modules/cjkcodecs/emu_jisx0213_2000.h b/Modules/cjkcodecs/emu_jisx0213_2000.h index a5d5a7063d37e6..c30c948a2b1279 100644 --- a/Modules/cjkcodecs/emu_jisx0213_2000.h +++ b/Modules/cjkcodecs/emu_jisx0213_2000.h @@ -5,8 +5,8 @@ # define EMULATE_JISX0213_2000_ENCODE_INVALID 1 #endif -#define EMULATE_JISX0213_2000_ENCODE_BMP(assi, c) \ - if (config == (void *)2000 && ( \ +#define EMULATE_JISX0213_2000_ENCODE_BMP(config, assi, c) \ + if ((config) == (void *)2000 && ( \ (c) == 0x9B1C || (c) == 0x4FF1 || \ (c) == 0x525D || (c) == 0x541E || \ (c) == 0x5653 || (c) == 0x59F8 || \ @@ -14,12 +14,12 @@ (c) == 0x7626 || (c) == 0x7E6B)) { \ return EMULATE_JISX0213_2000_ENCODE_INVALID; \ } \ - else if (config == (void *)2000 && (c) == 0x9B1D) { \ + else if ((config) == (void *)2000 && (c) == 0x9B1D) { \ (assi) = 0x8000 | 0x7d3b; \ } -#define EMULATE_JISX0213_2000_ENCODE_EMP(assi, c) \ - if (config == (void *)2000 && (c) == 0x20B9F) { \ +#define EMULATE_JISX0213_2000_ENCODE_EMP(config, assi, c) \ + if ((config) == (void *)2000 && (c) == 0x20B9F) { \ return EMULATE_JISX0213_2000_ENCODE_INVALID; \ } @@ -27,8 +27,8 @@ # define EMULATE_JISX0213_2000_DECODE_INVALID 2 #endif -#define EMULATE_JISX0213_2000_DECODE_PLANE1(assi, c1, c2) \ - if (config == (void *)2000 && \ +#define EMULATE_JISX0213_2000_DECODE_PLANE1(config, assi, c1, c2) \ + if ((config) == (void *)2000 && \ (((c1) == 0x2E && (c2) == 0x21) || \ ((c1) == 0x2F && (c2) == 0x7E) || \ ((c1) == 0x4F && (c2) == 0x54) || \ @@ -42,13 +42,13 @@ return EMULATE_JISX0213_2000_DECODE_INVALID; \ } -#define EMULATE_JISX0213_2000_DECODE_PLANE2(writer, c1, c2) \ - if (config == (void *)2000 && (c1) == 0x7D && (c2) == 0x3B) { \ +#define EMULATE_JISX0213_2000_DECODE_PLANE2(config, writer, c1, c2) \ + if ((config) == (void *)2000 && (c1) == 0x7D && (c2) == 0x3B) { \ OUTCHAR(0x9B1D); \ } -#define EMULATE_JISX0213_2000_DECODE_PLANE2_CHAR(assi, c1, c2) \ - if (config == (void *)2000 && (c1) == 0x7D && (c2) == 0x3B) { \ +#define EMULATE_JISX0213_2000_DECODE_PLANE2_CHAR(config, assi, c1, c2) \ + if ((config) == (void *)2000 && (c1) == 0x7D && (c2) == 0x3B) { \ (assi) = 0x9B1D; \ } diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c index 8564494f6262fb..233fc3020fd6a8 100644 --- a/Modules/cjkcodecs/multibytecodec.c +++ b/Modules/cjkcodecs/multibytecodec.c @@ -19,26 +19,27 @@ typedef struct { PyTypeObject *writer_type; PyTypeObject *multibytecodec_type; PyObject *str_write; -} _multibytecodec_state; +} module_state; -static _multibytecodec_state * -_multibytecodec_get_state(PyObject *module) +static module_state * +get_module_state(PyObject *module) { - _multibytecodec_state *state = PyModule_GetState(module); + module_state *state = PyModule_GetState(module); assert(state != NULL); return state; } static struct PyModuleDef _multibytecodecmodule; -static _multibytecodec_state * -_multibyte_codec_find_state_by_type(PyTypeObject *type) + +static module_state * +find_state_by_def(PyTypeObject *type) { PyObject *module = PyType_GetModuleByDef(type, &_multibytecodecmodule); assert(module != NULL); - return _multibytecodec_get_state(module); + return get_module_state(module); } -#define clinic_get_state() _multibyte_codec_find_state_by_type(type) +#define clinic_get_state() find_state_by_def(type) /*[clinic input] module _multibytecodec class _multibytecodec.MultibyteCodec "MultibyteCodecObject *" "clinic_get_state()->multibytecodec_type" @@ -66,7 +67,7 @@ typedef struct { static char *incnewkwarglist[] = {"errors", NULL}; static char *streamkwarglist[] = {"stream", "errors", NULL}; -static PyObject *multibytecodec_encode(MultibyteCodec *, +static PyObject *multibytecodec_encode(const MultibyteCodec *, MultibyteCodec_State *, PyObject *, Py_ssize_t *, PyObject *, int); @@ -220,7 +221,7 @@ expand_encodebuffer(MultibyteEncodeBuffer *buf, Py_ssize_t esize) */ static int -multibytecodec_encerror(MultibyteCodec *codec, +multibytecodec_encerror(const MultibyteCodec *codec, MultibyteCodec_State *state, MultibyteEncodeBuffer *buf, PyObject *errors, Py_ssize_t e) @@ -271,7 +272,7 @@ multibytecodec_encerror(MultibyteCodec *codec, for (;;) { Py_ssize_t outleft = (Py_ssize_t)(buf->outbuf_end - buf->outbuf); - r = codec->encode(state, codec->config, + r = codec->encode(state, codec, kind, data, &inpos, 1, &buf->outbuf, outleft, 0); if (r == MBERR_TOOSMALL) { @@ -374,7 +375,7 @@ multibytecodec_encerror(MultibyteCodec *codec, } static int -multibytecodec_decerror(MultibyteCodec *codec, +multibytecodec_decerror(const MultibyteCodec *codec, MultibyteCodec_State *state, MultibyteDecodeBuffer *buf, PyObject *errors, Py_ssize_t e) @@ -478,7 +479,7 @@ multibytecodec_decerror(MultibyteCodec *codec, } static PyObject * -multibytecodec_encode(MultibyteCodec *codec, +multibytecodec_encode(const MultibyteCodec *codec, MultibyteCodec_State *state, PyObject *text, Py_ssize_t *inpos_t, PyObject *errors, int flags) @@ -520,7 +521,7 @@ multibytecodec_encode(MultibyteCodec *codec, * error callbacks can relocate the cursor anywhere on buffer*/ Py_ssize_t outleft = (Py_ssize_t)(buf.outbuf_end - buf.outbuf); - r = codec->encode(state, codec->config, + r = codec->encode(state, codec, kind, data, &buf.inpos, buf.inlen, &buf.outbuf, outleft, flags); @@ -537,7 +538,7 @@ multibytecodec_encode(MultibyteCodec *codec, Py_ssize_t outleft; outleft = (Py_ssize_t)(buf.outbuf_end - buf.outbuf); - r = codec->encreset(state, codec->config, &buf.outbuf, + r = codec->encreset(state, codec, &buf.outbuf, outleft); if (r == 0) break; @@ -615,7 +616,7 @@ _multibytecodec_MultibyteCodec_encode_impl(MultibyteCodecObject *self, } if (self->codec->encinit != NULL && - self->codec->encinit(&state, self->codec->config) != 0) + self->codec->encinit(&state, self->codec) != 0) goto errorexit; r = multibytecodec_encode(self->codec, &state, input, NULL, errorcb, @@ -679,7 +680,7 @@ _multibytecodec_MultibyteCodec_decode_impl(MultibyteCodecObject *self, buf.inbuf_end = buf.inbuf_top + datalen; if (self->codec->decinit != NULL && - self->codec->decinit(&state, self->codec->config) != 0) + self->codec->decinit(&state, self->codec) != 0) goto errorexit; while (buf.inbuf < buf.inbuf_end) { @@ -687,7 +688,7 @@ _multibytecodec_MultibyteCodec_decode_impl(MultibyteCodecObject *self, inleft = (Py_ssize_t)(buf.inbuf_end - buf.inbuf); - r = self->codec->decode(&state, self->codec->config, + r = self->codec->decode(&state, self->codec, &buf.inbuf, inleft, &buf.writer); if (r == 0) break; @@ -719,9 +720,17 @@ static struct PyMethodDef multibytecodec_methods[] = { }; static int -multibytecodec_traverse(PyObject *self, visitproc visit, void *arg) +multibytecodec_clear(MultibyteCodecObject *self) +{ + Py_CLEAR(self->cjk_module); + return 0; +} + +static int +multibytecodec_traverse(MultibyteCodecObject *self, visitproc visit, void *arg) { Py_VISIT(Py_TYPE(self)); + Py_VISIT(self->cjk_module); return 0; } @@ -730,6 +739,7 @@ multibytecodec_dealloc(MultibyteCodecObject *self) { PyObject_GC_UnTrack(self); PyTypeObject *tp = Py_TYPE(self); + (void)multibytecodec_clear(self); tp->tp_free(self); Py_DECREF(tp); } @@ -739,6 +749,7 @@ static PyType_Slot multibytecodec_slots[] = { {Py_tp_getattro, PyObject_GenericGetAttr}, {Py_tp_methods, multibytecodec_methods}, {Py_tp_traverse, multibytecodec_traverse}, + {Py_tp_clear, multibytecodec_clear}, {0, NULL}, }; @@ -877,7 +888,7 @@ decoder_feed_buffer(MultibyteStatefulDecoderContext *ctx, inleft = (Py_ssize_t)(buf->inbuf_end - buf->inbuf); - r = ctx->codec->decode(&ctx->state, ctx->codec->config, + r = ctx->codec->decode(&ctx->state, ctx->codec, &buf->inbuf, inleft, &buf->writer); if (r == 0 || r == MBERR_TOOFEW) break; @@ -1004,7 +1015,7 @@ _multibytecodec_MultibyteIncrementalEncoder_reset_impl(MultibyteIncrementalEncod Py_ssize_t r; if (self->codec->encreset != NULL) { outbuf = buffer; - r = self->codec->encreset(&self->state, self->codec->config, + r = self->codec->encreset(&self->state, self->codec, &outbuf, sizeof(buffer)); if (r != 0) return NULL; @@ -1040,7 +1051,7 @@ mbiencoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (codec == NULL) goto errorexit; - _multibytecodec_state *state = _multibyte_codec_find_state_by_type(type); + module_state *state = find_state_by_def(type); if (!MultibyteCodec_Check(state, codec)) { PyErr_SetString(PyExc_TypeError, "codec is unexpected type"); goto errorexit; @@ -1052,7 +1063,7 @@ mbiencoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (self->errors == NULL) goto errorexit; if (self->codec->encinit != NULL && - self->codec->encinit(&self->state, self->codec->config) != 0) + self->codec->encinit(&self->state, self->codec) != 0) goto errorexit; Py_DECREF(codec); @@ -1281,7 +1292,7 @@ _multibytecodec_MultibyteIncrementalDecoder_reset_impl(MultibyteIncrementalDecod /*[clinic end generated code: output=da423b1782c23ed1 input=3b63b3be85b2fb45]*/ { if (self->codec->decreset != NULL && - self->codec->decreset(&self->state, self->codec->config) != 0) + self->codec->decreset(&self->state, self->codec) != 0) return NULL; self->pendingsize = 0; @@ -1315,7 +1326,7 @@ mbidecoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (codec == NULL) goto errorexit; - _multibytecodec_state *state = _multibyte_codec_find_state_by_type(type); + module_state *state = find_state_by_def(type); if (!MultibyteCodec_Check(state, codec)) { PyErr_SetString(PyExc_TypeError, "codec is unexpected type"); goto errorexit; @@ -1327,7 +1338,7 @@ mbidecoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (self->errors == NULL) goto errorexit; if (self->codec->decinit != NULL && - self->codec->decinit(&self->state, self->codec->config) != 0) + self->codec->decinit(&self->state, self->codec) != 0) goto errorexit; Py_DECREF(codec); @@ -1589,7 +1600,7 @@ _multibytecodec_MultibyteStreamReader_reset_impl(MultibyteStreamReaderObject *se /*[clinic end generated code: output=138490370a680abc input=5d4140db84b5e1e2]*/ { if (self->codec->decreset != NULL && - self->codec->decreset(&self->state, self->codec->config) != 0) + self->codec->decreset(&self->state, self->codec) != 0) return NULL; self->pendingsize = 0; @@ -1630,7 +1641,7 @@ mbstreamreader_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (codec == NULL) goto errorexit; - _multibytecodec_state *state = _multibyte_codec_find_state_by_type(type); + module_state *state = find_state_by_def(type); if (!MultibyteCodec_Check(state, codec)) { PyErr_SetString(PyExc_TypeError, "codec is unexpected type"); goto errorexit; @@ -1643,7 +1654,7 @@ mbstreamreader_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (self->errors == NULL) goto errorexit; if (self->codec->decinit != NULL && - self->codec->decinit(&self->state, self->codec->config) != 0) + self->codec->decinit(&self->state, self->codec) != 0) goto errorexit; Py_DECREF(codec); @@ -1735,7 +1746,7 @@ _multibytecodec_MultibyteStreamWriter_write_impl(MultibyteStreamWriterObject *se PyObject *strobj) /*[clinic end generated code: output=68ade3aea26410ac input=199f26f68bd8425a]*/ { - _multibytecodec_state *state = PyType_GetModuleState(cls); + module_state *state = PyType_GetModuleState(cls); assert(state != NULL); if (mbstreamwriter_iwrite(self, strobj, state->str_write)) { return NULL; @@ -1766,7 +1777,7 @@ _multibytecodec_MultibyteStreamWriter_writelines_impl(MultibyteStreamWriterObjec return NULL; } - _multibytecodec_state *state = PyType_GetModuleState(cls); + module_state *state = PyType_GetModuleState(cls); assert(state != NULL); for (i = 0; i < PySequence_Length(lines); i++) { /* length can be changed even within this loop */ @@ -1817,7 +1828,7 @@ _multibytecodec_MultibyteStreamWriter_reset_impl(MultibyteStreamWriterObject *se assert(PyBytes_Check(pwrt)); - _multibytecodec_state *state = PyType_GetModuleState(cls); + module_state *state = PyType_GetModuleState(cls); assert(state != NULL); if (PyBytes_Size(pwrt) > 0) { @@ -1853,7 +1864,7 @@ mbstreamwriter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (codec == NULL) goto errorexit; - _multibytecodec_state *state = _multibyte_codec_find_state_by_type(type); + module_state *state = find_state_by_def(type); if (!MultibyteCodec_Check(state, codec)) { PyErr_SetString(PyExc_TypeError, "codec is unexpected type"); goto errorexit; @@ -1866,7 +1877,7 @@ mbstreamwriter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (self->errors == NULL) goto errorexit; if (self->codec->encinit != NULL && - self->codec->encinit(&self->state, self->codec->config) != 0) + self->codec->encinit(&self->state, self->codec) != 0) goto errorexit; Py_DECREF(codec); @@ -1952,22 +1963,23 @@ _multibytecodec___create_codec(PyObject *module, PyObject *arg) /*[clinic end generated code: output=cfa3dce8260e809d input=6840b2a6b183fcfa]*/ { MultibyteCodecObject *self; - MultibyteCodec *codec; - if (!PyCapsule_IsValid(arg, PyMultibyteCodec_CAPSULE_NAME)) { + if (!PyCapsule_IsValid(arg, CODEC_CAPSULE)) { PyErr_SetString(PyExc_ValueError, "argument type invalid"); return NULL; } - codec = PyCapsule_GetPointer(arg, PyMultibyteCodec_CAPSULE_NAME); - if (codec->codecinit != NULL && codec->codecinit(codec->config) != 0) + codec_capsule *data = PyCapsule_GetPointer(arg, CODEC_CAPSULE); + const MultibyteCodec *codec = data->codec; + if (codec->codecinit != NULL && codec->codecinit(codec) != 0) return NULL; - _multibytecodec_state *state = _multibytecodec_get_state(module); + module_state *state = get_module_state(module); self = PyObject_GC_New(MultibyteCodecObject, state->multibytecodec_type); if (self == NULL) return NULL; self->codec = codec; + self->cjk_module = Py_NewRef(data->cjk_module); PyObject_GC_Track(self); return (PyObject *)self; @@ -1976,7 +1988,7 @@ _multibytecodec___create_codec(PyObject *module, PyObject *arg) static int _multibytecodec_traverse(PyObject *mod, visitproc visit, void *arg) { - _multibytecodec_state *state = _multibytecodec_get_state(mod); + module_state *state = get_module_state(mod); Py_VISIT(state->multibytecodec_type); Py_VISIT(state->encoder_type); Py_VISIT(state->decoder_type); @@ -1988,7 +2000,7 @@ _multibytecodec_traverse(PyObject *mod, visitproc visit, void *arg) static int _multibytecodec_clear(PyObject *mod) { - _multibytecodec_state *state = _multibytecodec_get_state(mod); + module_state *state = get_module_state(mod); Py_CLEAR(state->multibytecodec_type); Py_CLEAR(state->encoder_type); Py_CLEAR(state->decoder_type); @@ -2022,7 +2034,7 @@ _multibytecodec_free(void *mod) static int _multibytecodec_exec(PyObject *mod) { - _multibytecodec_state *state = _multibytecodec_get_state(mod); + module_state *state = get_module_state(mod); state->str_write = PyUnicode_InternFromString("write"); if (state->str_write == NULL) { return -1; @@ -2056,7 +2068,7 @@ static PyModuleDef_Slot _multibytecodec_slots[] = { static struct PyModuleDef _multibytecodecmodule = { .m_base = PyModuleDef_HEAD_INIT, .m_name = "_multibytecodec", - .m_size = sizeof(_multibytecodec_state), + .m_size = sizeof(module_state), .m_methods = _multibytecodec_methods, .m_slots = _multibytecodec_slots, .m_traverse = _multibytecodec_traverse, diff --git a/Modules/cjkcodecs/multibytecodec.h b/Modules/cjkcodecs/multibytecodec.h index 69404ba96aa1f0..f59362205d26fc 100644 --- a/Modules/cjkcodecs/multibytecodec.h +++ b/Modules/cjkcodecs/multibytecodec.h @@ -27,28 +27,31 @@ typedef struct { unsigned char c[8]; } MultibyteCodec_State; -typedef int (*mbcodec_init)(const void *config); +struct _cjk_mod_state; +struct _multibyte_codec; + +typedef int (*mbcodec_init)(const struct _multibyte_codec *codec); typedef Py_ssize_t (*mbencode_func)(MultibyteCodec_State *state, - const void *config, + const struct _multibyte_codec *codec, int kind, const void *data, Py_ssize_t *inpos, Py_ssize_t inlen, unsigned char **outbuf, Py_ssize_t outleft, int flags); typedef int (*mbencodeinit_func)(MultibyteCodec_State *state, - const void *config); + const struct _multibyte_codec *codec); typedef Py_ssize_t (*mbencodereset_func)(MultibyteCodec_State *state, - const void *config, + const struct _multibyte_codec *codec, unsigned char **outbuf, Py_ssize_t outleft); typedef Py_ssize_t (*mbdecode_func)(MultibyteCodec_State *state, - const void *config, + const struct _multibyte_codec *codec, const unsigned char **inbuf, Py_ssize_t inleft, _PyUnicodeWriter *writer); typedef int (*mbdecodeinit_func)(MultibyteCodec_State *state, - const void *config); + const struct _multibyte_codec *codec); typedef Py_ssize_t (*mbdecodereset_func)(MultibyteCodec_State *state, - const void *config); + const struct _multibyte_codec *codec); -typedef struct { +typedef struct _multibyte_codec { const char *encoding; const void *config; mbcodec_init codecinit; @@ -58,18 +61,20 @@ typedef struct { mbdecode_func decode; mbdecodeinit_func decinit; mbdecodereset_func decreset; + struct _cjk_mod_state *modstate; } MultibyteCodec; typedef struct { PyObject_HEAD - MultibyteCodec *codec; + const MultibyteCodec *codec; + PyObject *cjk_module; } MultibyteCodecObject; #define MultibyteCodec_Check(state, op) Py_IS_TYPE((op), state->multibytecodec_type) #define _MultibyteStatefulCodec_HEAD \ PyObject_HEAD \ - MultibyteCodec *codec; \ + const MultibyteCodec *codec; \ MultibyteCodec_State state; \ PyObject *errors; typedef struct { @@ -130,7 +135,13 @@ typedef struct { #define MBENC_FLUSH 0x0001 /* encode all characters encodable */ #define MBENC_MAX MBENC_FLUSH -#define PyMultibyteCodec_CAPSULE_NAME "multibytecodec.__map_*" +typedef struct { + const MultibyteCodec *codec; + PyObject *cjk_module; +} codec_capsule; + +#define MAP_CAPSULE "multibytecodec.map" +#define CODEC_CAPSULE "multibytecodec.codec" #ifdef __cplusplus diff --git a/Modules/clinic/_asynciomodule.c.h b/Modules/clinic/_asynciomodule.c.h index 43c5d771798634..6a780a80cd0bc4 100644 --- a/Modules/clinic/_asynciomodule.c.h +++ b/Modules/clinic/_asynciomodule.c.h @@ -482,14 +482,15 @@ _asyncio_Future__make_cancelled_error(FutureObj *self, PyObject *Py_UNUSED(ignor } PyDoc_STRVAR(_asyncio_Task___init____doc__, -"Task(coro, *, loop=None, name=None, context=None)\n" +"Task(coro, *, loop=None, name=None, context=None, eager_start=False)\n" "--\n" "\n" "A coroutine wrapped in a Future."); static int _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, - PyObject *name, PyObject *context); + PyObject *name, PyObject *context, + int eager_start); static int _asyncio_Task___init__(PyObject *self, PyObject *args, PyObject *kwargs) @@ -497,14 +498,14 @@ _asyncio_Task___init__(PyObject *self, PyObject *args, PyObject *kwargs) int return_value = -1; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) - #define NUM_KEYWORDS 4 + #define NUM_KEYWORDS 5 static struct { PyGC_Head _this_is_not_used; PyObject_VAR_HEAD PyObject *ob_item[NUM_KEYWORDS]; } _kwtuple = { .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) - .ob_item = { &_Py_ID(coro), &_Py_ID(loop), &_Py_ID(name), &_Py_ID(context), }, + .ob_item = { &_Py_ID(coro), &_Py_ID(loop), &_Py_ID(name), &_Py_ID(context), &_Py_ID(eager_start), }, }; #undef NUM_KEYWORDS #define KWTUPLE (&_kwtuple.ob_base.ob_base) @@ -513,14 +514,14 @@ _asyncio_Task___init__(PyObject *self, PyObject *args, PyObject *kwargs) # define KWTUPLE NULL #endif // !Py_BUILD_CORE - static const char * const _keywords[] = {"coro", "loop", "name", "context", NULL}; + static const char * const _keywords[] = {"coro", "loop", "name", "context", "eager_start", NULL}; static _PyArg_Parser _parser = { .keywords = _keywords, .fname = "Task", .kwtuple = KWTUPLE, }; #undef KWTUPLE - PyObject *argsbuf[4]; + PyObject *argsbuf[5]; PyObject * const *fastargs; Py_ssize_t nargs = PyTuple_GET_SIZE(args); Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 1; @@ -528,6 +529,7 @@ _asyncio_Task___init__(PyObject *self, PyObject *args, PyObject *kwargs) PyObject *loop = Py_None; PyObject *name = Py_None; PyObject *context = Py_None; + int eager_start = 0; fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 1, 1, 0, argsbuf); if (!fastargs) { @@ -549,9 +551,18 @@ _asyncio_Task___init__(PyObject *self, PyObject *args, PyObject *kwargs) goto skip_optional_kwonly; } } - context = fastargs[3]; + if (fastargs[3]) { + context = fastargs[3]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + eager_start = PyObject_IsTrue(fastargs[4]); + if (eager_start < 0) { + goto exit; + } skip_optional_kwonly: - return_value = _asyncio_Task___init___impl((TaskObj *)self, coro, loop, name, context); + return_value = _asyncio_Task___init___impl((TaskObj *)self, coro, loop, name, context, eager_start); exit: return return_value; @@ -1064,6 +1075,63 @@ _asyncio__register_task(PyObject *module, PyObject *const *args, Py_ssize_t narg return return_value; } +PyDoc_STRVAR(_asyncio__register_eager_task__doc__, +"_register_eager_task($module, /, task)\n" +"--\n" +"\n" +"Register a new task in asyncio as executed by loop.\n" +"\n" +"Returns None."); + +#define _ASYNCIO__REGISTER_EAGER_TASK_METHODDEF \ + {"_register_eager_task", _PyCFunction_CAST(_asyncio__register_eager_task), METH_FASTCALL|METH_KEYWORDS, _asyncio__register_eager_task__doc__}, + +static PyObject * +_asyncio__register_eager_task_impl(PyObject *module, PyObject *task); + +static PyObject * +_asyncio__register_eager_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(task), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"task", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "_register_eager_task", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject *task; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + task = args[0]; + return_value = _asyncio__register_eager_task_impl(module, task); + +exit: + return return_value; +} + PyDoc_STRVAR(_asyncio__unregister_task__doc__, "_unregister_task($module, /, task)\n" "--\n" @@ -1121,6 +1189,63 @@ _asyncio__unregister_task(PyObject *module, PyObject *const *args, Py_ssize_t na return return_value; } +PyDoc_STRVAR(_asyncio__unregister_eager_task__doc__, +"_unregister_eager_task($module, /, task)\n" +"--\n" +"\n" +"Unregister a task.\n" +"\n" +"Returns None."); + +#define _ASYNCIO__UNREGISTER_EAGER_TASK_METHODDEF \ + {"_unregister_eager_task", _PyCFunction_CAST(_asyncio__unregister_eager_task), METH_FASTCALL|METH_KEYWORDS, _asyncio__unregister_eager_task__doc__}, + +static PyObject * +_asyncio__unregister_eager_task_impl(PyObject *module, PyObject *task); + +static PyObject * +_asyncio__unregister_eager_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(task), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"task", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "_unregister_eager_task", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject *task; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + task = args[0]; + return_value = _asyncio__unregister_eager_task_impl(module, task); + +exit: + return return_value; +} + PyDoc_STRVAR(_asyncio__enter_task__doc__, "_enter_task($module, /, loop, task)\n" "--\n" @@ -1243,6 +1368,66 @@ _asyncio__leave_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs, return return_value; } +PyDoc_STRVAR(_asyncio__swap_current_task__doc__, +"_swap_current_task($module, /, loop, task)\n" +"--\n" +"\n" +"Temporarily swap in the supplied task and return the original one (or None).\n" +"\n" +"This is intended for use during eager coroutine execution."); + +#define _ASYNCIO__SWAP_CURRENT_TASK_METHODDEF \ + {"_swap_current_task", _PyCFunction_CAST(_asyncio__swap_current_task), METH_FASTCALL|METH_KEYWORDS, _asyncio__swap_current_task__doc__}, + +static PyObject * +_asyncio__swap_current_task_impl(PyObject *module, PyObject *loop, + PyObject *task); + +static PyObject * +_asyncio__swap_current_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(loop), &_Py_ID(task), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"loop", "task", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "_swap_current_task", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *loop; + PyObject *task; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); + if (!args) { + goto exit; + } + loop = args[0]; + task = args[1]; + return_value = _asyncio__swap_current_task_impl(module, loop, task); + +exit: + return return_value; +} + PyDoc_STRVAR(_asyncio_current_task__doc__, "current_task($module, /, loop=None)\n" "--\n" @@ -1302,4 +1487,4 @@ _asyncio_current_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs, exit: return return_value; } -/*[clinic end generated code: output=00f494214f2fd008 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6b0e283177b07639 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_bisectmodule.c.h b/Modules/clinic/_bisectmodule.c.h index bbf456e4b0f411..7944f5219b02a3 100644 --- a/Modules/clinic/_bisectmodule.c.h +++ b/Modules/clinic/_bisectmodule.c.h @@ -19,7 +19,9 @@ PyDoc_STRVAR(_bisect_bisect_right__doc__, "insert just after the rightmost x already there.\n" "\n" "Optional args lo (default 0) and hi (default len(a)) bound the\n" -"slice of a to be searched."); +"slice of a to be searched.\n" +"\n" +"A custom key function can be supplied to customize the sort order."); #define _BISECT_BISECT_RIGHT_METHODDEF \ {"bisect_right", _PyCFunction_CAST(_bisect_bisect_right), METH_FASTCALL|METH_KEYWORDS, _bisect_bisect_right__doc__}, @@ -125,7 +127,9 @@ PyDoc_STRVAR(_bisect_insort_right__doc__, "If x is already in a, insert it to the right of the rightmost x.\n" "\n" "Optional args lo (default 0) and hi (default len(a)) bound the\n" -"slice of a to be searched."); +"slice of a to be searched.\n" +"\n" +"A custom key function can be supplied to customize the sort order."); #define _BISECT_INSORT_RIGHT_METHODDEF \ {"insort_right", _PyCFunction_CAST(_bisect_insort_right), METH_FASTCALL|METH_KEYWORDS, _bisect_insort_right__doc__}, @@ -228,7 +232,9 @@ PyDoc_STRVAR(_bisect_bisect_left__doc__, "insert just before the leftmost x already there.\n" "\n" "Optional args lo (default 0) and hi (default len(a)) bound the\n" -"slice of a to be searched."); +"slice of a to be searched.\n" +"\n" +"A custom key function can be supplied to customize the sort order."); #define _BISECT_BISECT_LEFT_METHODDEF \ {"bisect_left", _PyCFunction_CAST(_bisect_bisect_left), METH_FASTCALL|METH_KEYWORDS, _bisect_bisect_left__doc__}, @@ -334,7 +340,9 @@ PyDoc_STRVAR(_bisect_insort_left__doc__, "If x is already in a, insert it to the left of the leftmost x.\n" "\n" "Optional args lo (default 0) and hi (default len(a)) bound the\n" -"slice of a to be searched."); +"slice of a to be searched.\n" +"\n" +"A custom key function can be supplied to customize the sort order."); #define _BISECT_INSORT_LEFT_METHODDEF \ {"insort_left", _PyCFunction_CAST(_bisect_insort_left), METH_FASTCALL|METH_KEYWORDS, _bisect_insort_left__doc__}, @@ -425,4 +433,4 @@ _bisect_insort_left(PyObject *module, PyObject *const *args, Py_ssize_t nargs, P exit: return return_value; } -/*[clinic end generated code: output=7dc87f7af75275a1 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=5a7fa64bf9b262f3 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_collectionsmodule.c.h b/Modules/clinic/_collectionsmodule.c.h index 8ea0255b061070..3882d069216e28 100644 --- a/Modules/clinic/_collectionsmodule.c.h +++ b/Modules/clinic/_collectionsmodule.c.h @@ -46,7 +46,7 @@ static PyObject * tuplegetter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) { PyObject *return_value = NULL; - PyTypeObject *base_tp = &tuplegetter_type; + PyTypeObject *base_tp = clinic_state()->tuplegetter_type; Py_ssize_t index; PyObject *doc; @@ -75,4 +75,4 @@ tuplegetter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=91a0f221c7b1f96c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=00e516317d2b8bed input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_pickle.c.h b/Modules/clinic/_pickle.c.h index adb3abc5eb2372..539acc34a05cc1 100644 --- a/Modules/clinic/_pickle.c.h +++ b/Modules/clinic/_pickle.c.h @@ -38,7 +38,42 @@ PyDoc_STRVAR(_pickle_Pickler_dump__doc__, "Write a pickled representation of the given object to the open file."); #define _PICKLE_PICKLER_DUMP_METHODDEF \ - {"dump", (PyCFunction)_pickle_Pickler_dump, METH_O, _pickle_Pickler_dump__doc__}, + {"dump", _PyCFunction_CAST(_pickle_Pickler_dump), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _pickle_Pickler_dump__doc__}, + +static PyObject * +_pickle_Pickler_dump_impl(PicklerObject *self, PyTypeObject *cls, + PyObject *obj); + +static PyObject * +_pickle_Pickler_dump(PicklerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty) + #else + # define KWTUPLE NULL + #endif + + static const char * const _keywords[] = {"", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "dump", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject *obj; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + obj = args[0]; + return_value = _pickle_Pickler_dump_impl(self, cls, obj); + +exit: + return return_value; +} PyDoc_STRVAR(_pickle_Pickler___sizeof____doc__, "__sizeof__($self, /)\n" @@ -242,15 +277,19 @@ PyDoc_STRVAR(_pickle_Unpickler_load__doc__, "specified therein."); #define _PICKLE_UNPICKLER_LOAD_METHODDEF \ - {"load", (PyCFunction)_pickle_Unpickler_load, METH_NOARGS, _pickle_Unpickler_load__doc__}, + {"load", _PyCFunction_CAST(_pickle_Unpickler_load), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _pickle_Unpickler_load__doc__}, static PyObject * -_pickle_Unpickler_load_impl(UnpicklerObject *self); +_pickle_Unpickler_load_impl(UnpicklerObject *self, PyTypeObject *cls); static PyObject * -_pickle_Unpickler_load(UnpicklerObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_Unpickler_load(UnpicklerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - return _pickle_Unpickler_load_impl(self); + if (nargs) { + PyErr_SetString(PyExc_TypeError, "load() takes no arguments"); + return NULL; + } + return _pickle_Unpickler_load_impl(self, cls); } PyDoc_STRVAR(_pickle_Unpickler_find_class__doc__, @@ -267,26 +306,41 @@ PyDoc_STRVAR(_pickle_Unpickler_find_class__doc__, "needed. Both arguments passed are str objects."); #define _PICKLE_UNPICKLER_FIND_CLASS_METHODDEF \ - {"find_class", _PyCFunction_CAST(_pickle_Unpickler_find_class), METH_FASTCALL, _pickle_Unpickler_find_class__doc__}, + {"find_class", _PyCFunction_CAST(_pickle_Unpickler_find_class), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _pickle_Unpickler_find_class__doc__}, static PyObject * -_pickle_Unpickler_find_class_impl(UnpicklerObject *self, +_pickle_Unpickler_find_class_impl(UnpicklerObject *self, PyTypeObject *cls, PyObject *module_name, PyObject *global_name); static PyObject * -_pickle_Unpickler_find_class(UnpicklerObject *self, PyObject *const *args, Py_ssize_t nargs) +_pickle_Unpickler_find_class(UnpicklerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty) + #else + # define KWTUPLE NULL + #endif + + static const char * const _keywords[] = {"", "", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "find_class", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; PyObject *module_name; PyObject *global_name; - if (!_PyArg_CheckPositional("find_class", nargs, 2, 2)) { + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); + if (!args) { goto exit; } module_name = args[0]; global_name = args[1]; - return_value = _pickle_Unpickler_find_class_impl(self, module_name, global_name); + return_value = _pickle_Unpickler_find_class_impl(self, cls, module_name, global_name); exit: return return_value; @@ -980,4 +1034,4 @@ _pickle_loads(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec exit: return return_value; } -/*[clinic end generated code: output=730dc26938561313 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=a0e04b85e7bae626 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_posixsubprocess.c.h b/Modules/clinic/_posixsubprocess.c.h new file mode 100644 index 00000000000000..f08878cf668908 --- /dev/null +++ b/Modules/clinic/_posixsubprocess.c.h @@ -0,0 +1,162 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_gc.h" // PyGC_Head +# include "pycore_runtime.h" // _Py_ID() +#endif + + +PyDoc_STRVAR(subprocess_fork_exec__doc__, +"fork_exec($module, args, executable_list, close_fds, pass_fds, cwd,\n" +" env, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite,\n" +" errpipe_read, errpipe_write, restore_signals, call_setsid,\n" +" pgid_to_set, gid, extra_groups, uid, child_umask, preexec_fn,\n" +" allow_vfork, /)\n" +"--\n" +"\n" +"Spawn a fresh new child process.\n" +"\n" +"Fork a child process, close parent file descriptors as appropriate in the\n" +"child and duplicate the few that are needed before calling exec() in the\n" +"child process.\n" +"\n" +"If close_fds is True, close file descriptors 3 and higher, except those listed\n" +"in the sorted tuple pass_fds.\n" +"\n" +"The preexec_fn, if supplied, will be called immediately before closing file\n" +"descriptors and exec.\n" +"\n" +"WARNING: preexec_fn is NOT SAFE if your application uses threads.\n" +" It may trigger infrequent, difficult to debug deadlocks.\n" +"\n" +"If an error occurs in the child process before the exec, it is\n" +"serialized and written to the errpipe_write fd per subprocess.py.\n" +"\n" +"Returns: the child process\'s PID.\n" +"\n" +"Raises: Only on an error in the parent process."); + +#define SUBPROCESS_FORK_EXEC_METHODDEF \ + {"fork_exec", _PyCFunction_CAST(subprocess_fork_exec), METH_FASTCALL, subprocess_fork_exec__doc__}, + +static PyObject * +subprocess_fork_exec_impl(PyObject *module, PyObject *process_args, + PyObject *executable_list, int close_fds, + PyObject *py_fds_to_keep, PyObject *cwd_obj, + PyObject *env_list, int p2cread, int p2cwrite, + int c2pread, int c2pwrite, int errread, + int errwrite, int errpipe_read, int errpipe_write, + int restore_signals, int call_setsid, + pid_t pgid_to_set, PyObject *gid_object, + PyObject *extra_groups_packed, + PyObject *uid_object, int child_umask, + PyObject *preexec_fn, int allow_vfork); + +static PyObject * +subprocess_fork_exec(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *process_args; + PyObject *executable_list; + int close_fds; + PyObject *py_fds_to_keep; + PyObject *cwd_obj; + PyObject *env_list; + int p2cread; + int p2cwrite; + int c2pread; + int c2pwrite; + int errread; + int errwrite; + int errpipe_read; + int errpipe_write; + int restore_signals; + int call_setsid; + pid_t pgid_to_set; + PyObject *gid_object; + PyObject *extra_groups_packed; + PyObject *uid_object; + int child_umask; + PyObject *preexec_fn; + int allow_vfork; + + if (!_PyArg_CheckPositional("fork_exec", nargs, 23, 23)) { + goto exit; + } + process_args = args[0]; + executable_list = args[1]; + close_fds = PyObject_IsTrue(args[2]); + if (close_fds < 0) { + goto exit; + } + if (!PyTuple_Check(args[3])) { + _PyArg_BadArgument("fork_exec", "argument 4", "tuple", args[3]); + goto exit; + } + py_fds_to_keep = args[3]; + cwd_obj = args[4]; + env_list = args[5]; + p2cread = _PyLong_AsInt(args[6]); + if (p2cread == -1 && PyErr_Occurred()) { + goto exit; + } + p2cwrite = _PyLong_AsInt(args[7]); + if (p2cwrite == -1 && PyErr_Occurred()) { + goto exit; + } + c2pread = _PyLong_AsInt(args[8]); + if (c2pread == -1 && PyErr_Occurred()) { + goto exit; + } + c2pwrite = _PyLong_AsInt(args[9]); + if (c2pwrite == -1 && PyErr_Occurred()) { + goto exit; + } + errread = _PyLong_AsInt(args[10]); + if (errread == -1 && PyErr_Occurred()) { + goto exit; + } + errwrite = _PyLong_AsInt(args[11]); + if (errwrite == -1 && PyErr_Occurred()) { + goto exit; + } + errpipe_read = _PyLong_AsInt(args[12]); + if (errpipe_read == -1 && PyErr_Occurred()) { + goto exit; + } + errpipe_write = _PyLong_AsInt(args[13]); + if (errpipe_write == -1 && PyErr_Occurred()) { + goto exit; + } + restore_signals = PyObject_IsTrue(args[14]); + if (restore_signals < 0) { + goto exit; + } + call_setsid = PyObject_IsTrue(args[15]); + if (call_setsid < 0) { + goto exit; + } + pgid_to_set = PyLong_AsPid(args[16]); + if (pgid_to_set == -1 && PyErr_Occurred()) { + goto exit; + } + gid_object = args[17]; + extra_groups_packed = args[18]; + uid_object = args[19]; + child_umask = _PyLong_AsInt(args[20]); + if (child_umask == -1 && PyErr_Occurred()) { + goto exit; + } + preexec_fn = args[21]; + allow_vfork = PyObject_IsTrue(args[22]); + if (allow_vfork < 0) { + goto exit; + } + return_value = subprocess_fork_exec_impl(module, process_args, executable_list, close_fds, py_fds_to_keep, cwd_obj, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, call_setsid, pgid_to_set, gid_object, extra_groups_packed, uid_object, child_umask, preexec_fn, allow_vfork); + +exit: + return return_value; +} +/*[clinic end generated code: output=46d71e86845c93d7 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_testinternalcapi.c.h b/Modules/clinic/_testinternalcapi.c.h index e8d5681b194916..89573222572594 100644 --- a/Modules/clinic/_testinternalcapi.c.h +++ b/Modules/clinic/_testinternalcapi.c.h @@ -128,4 +128,66 @@ _testinternalcapi_optimize_cfg(PyObject *module, PyObject *const *args, Py_ssize exit: return return_value; } -/*[clinic end generated code: output=efe95836482fd542 input=a9049054013a1b77]*/ + +PyDoc_STRVAR(_testinternalcapi_assemble_code_object__doc__, +"assemble_code_object($module, /, filename, instructions, metadata)\n" +"--\n" +"\n" +"Create a code object for the given instructions."); + +#define _TESTINTERNALCAPI_ASSEMBLE_CODE_OBJECT_METHODDEF \ + {"assemble_code_object", _PyCFunction_CAST(_testinternalcapi_assemble_code_object), METH_FASTCALL|METH_KEYWORDS, _testinternalcapi_assemble_code_object__doc__}, + +static PyObject * +_testinternalcapi_assemble_code_object_impl(PyObject *module, + PyObject *filename, + PyObject *instructions, + PyObject *metadata); + +static PyObject * +_testinternalcapi_assemble_code_object(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(filename), &_Py_ID(instructions), &_Py_ID(metadata), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"filename", "instructions", "metadata", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "assemble_code_object", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + PyObject *filename; + PyObject *instructions; + PyObject *metadata; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 3, 3, 0, argsbuf); + if (!args) { + goto exit; + } + filename = args[0]; + instructions = args[1]; + metadata = args[2]; + return_value = _testinternalcapi_assemble_code_object_impl(module, filename, instructions, metadata); + +exit: + return return_value; +} +/*[clinic end generated code: output=d5e08c9d67f9721f input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_winapi.c.h b/Modules/clinic/_winapi.c.h index 891b3f851d1243..7bc63e612be348 100644 --- a/Modules/clinic/_winapi.c.h +++ b/Modules/clinic/_winapi.c.h @@ -1371,4 +1371,44 @@ _winapi__mimetypes_read_windows_registry(PyObject *module, PyObject *const *args exit: return return_value; } -/*[clinic end generated code: output=edb1a9d1bbfd6394 input=a9049054013a1b77]*/ + +PyDoc_STRVAR(_winapi_NeedCurrentDirectoryForExePath__doc__, +"NeedCurrentDirectoryForExePath($module, exe_name, /)\n" +"--\n" +"\n"); + +#define _WINAPI_NEEDCURRENTDIRECTORYFOREXEPATH_METHODDEF \ + {"NeedCurrentDirectoryForExePath", (PyCFunction)_winapi_NeedCurrentDirectoryForExePath, METH_O, _winapi_NeedCurrentDirectoryForExePath__doc__}, + +static int +_winapi_NeedCurrentDirectoryForExePath_impl(PyObject *module, + LPCWSTR exe_name); + +static PyObject * +_winapi_NeedCurrentDirectoryForExePath(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + LPCWSTR exe_name = NULL; + int _return_value; + + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("NeedCurrentDirectoryForExePath", "argument", "str", arg); + goto exit; + } + exe_name = PyUnicode_AsWideCharString(arg, NULL); + if (exe_name == NULL) { + goto exit; + } + _return_value = _winapi_NeedCurrentDirectoryForExePath_impl(module, exe_name); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyBool_FromLong((long)_return_value); + +exit: + /* Cleanup for exe_name */ + PyMem_Free((void *)exe_name); + + return return_value; +} +/*[clinic end generated code: output=96ea65ece7912d0a input=a9049054013a1b77]*/ diff --git a/Modules/clinic/mathmodule.c.h b/Modules/clinic/mathmodule.c.h index 1f9725883b9820..bc5bbceb4c92b6 100644 --- a/Modules/clinic/mathmodule.c.h +++ b/Modules/clinic/mathmodule.c.h @@ -186,49 +186,6 @@ math_modf(PyObject *module, PyObject *arg) return return_value; } -PyDoc_STRVAR(math_log__doc__, -"log(x, [base=math.e])\n" -"Return the logarithm of x to the given base.\n" -"\n" -"If the base not specified, returns the natural logarithm (base e) of x."); - -#define MATH_LOG_METHODDEF \ - {"log", (PyCFunction)math_log, METH_VARARGS, math_log__doc__}, - -static PyObject * -math_log_impl(PyObject *module, PyObject *x, int group_right_1, - PyObject *base); - -static PyObject * -math_log(PyObject *module, PyObject *args) -{ - PyObject *return_value = NULL; - PyObject *x; - int group_right_1 = 0; - PyObject *base = NULL; - - switch (PyTuple_GET_SIZE(args)) { - case 1: - if (!PyArg_ParseTuple(args, "O:log", &x)) { - goto exit; - } - break; - case 2: - if (!PyArg_ParseTuple(args, "OO:log", &x, &base)) { - goto exit; - } - group_right_1 = 1; - break; - default: - PyErr_SetString(PyExc_TypeError, "math.log requires 1 to 2 arguments"); - goto exit; - } - return_value = math_log_impl(module, x, group_right_1, base); - -exit: - return return_value; -} - PyDoc_STRVAR(math_log2__doc__, "log2($module, x, /)\n" "--\n" @@ -954,4 +911,4 @@ math_ulp(PyObject *module, PyObject *arg) exit: return return_value; } -/*[clinic end generated code: output=899211ec70e4506c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=a6437a3ba18c486a input=a9049054013a1b77]*/ diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c index bfe35fed7a450a..9b4e4199cdc20a 100644 --- a/Modules/faulthandler.c +++ b/Modules/faulthandler.c @@ -120,7 +120,7 @@ faulthandler_get_fileno(PyObject **file_ptr) return -1; if (fd < 0) { PyErr_SetString(PyExc_ValueError, - "file is not a valid file descripter"); + "file is not a valid file descriptor"); return -1; } *file_ptr = NULL; diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 4eaa5490b6134c..3fd5f4cd70e832 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -418,8 +418,20 @@ validate_list(PyGC_Head *head, enum flagstates flags) static void update_refs(PyGC_Head *containers) { + PyGC_Head *next; PyGC_Head *gc = GC_NEXT(containers); - for (; gc != containers; gc = GC_NEXT(gc)) { + + while (gc != containers) { + next = GC_NEXT(gc); + /* Move any object that might have become immortal to the + * permanent generation as the reference count is not accurately + * reflecting the actual number of live references to this object + */ + if (_Py_IsImmortal(FROM_GC(gc))) { + gc_list_move(gc, &get_gc_state()->permanent_generation.head); + gc = next; + continue; + } gc_reset_refs(gc, Py_REFCNT(FROM_GC(gc))); /* Python's cyclic gc should never see an incoming refcount * of 0: if something decref'ed to 0, it should have been @@ -440,6 +452,7 @@ update_refs(PyGC_Head *containers) * check instead of an assert? */ _PyObject_ASSERT(FROM_GC(gc), gc_get_refs(gc) != 0); + gc = next; } } @@ -1375,10 +1388,19 @@ invoke_gc_callback(PyThreadState *tstate, const char *phase, return; } } + + PyObject *phase_obj = PyUnicode_FromString(phase); + if (phase_obj == NULL) { + Py_XDECREF(info); + PyErr_WriteUnraisable(NULL); + return; + } + + PyObject *stack[] = {phase_obj, info}; for (Py_ssize_t i=0; i<PyList_GET_SIZE(gcstate->callbacks); i++) { PyObject *r, *cb = PyList_GET_ITEM(gcstate->callbacks, i); Py_INCREF(cb); /* make sure cb doesn't go away */ - r = PyObject_CallFunction(cb, "sO", phase, info); + r = PyObject_Vectorcall(cb, stack, 2, NULL); if (r == NULL) { PyErr_WriteUnraisable(cb); } @@ -1387,6 +1409,7 @@ invoke_gc_callback(PyThreadState *tstate, const char *phase, } Py_DECREF(cb); } + Py_DECREF(phase_obj); Py_XDECREF(info); assert(!_PyErr_Occurred(tstate)); } @@ -2348,16 +2371,17 @@ PyVarObject * _PyObject_GC_Resize(PyVarObject *op, Py_ssize_t nitems) { const size_t basicsize = _PyObject_VAR_SIZE(Py_TYPE(op), nitems); + const size_t presize = _PyType_PreHeaderSize(((PyObject *)op)->ob_type); _PyObject_ASSERT((PyObject *)op, !_PyObject_GC_IS_TRACKED(op)); - if (basicsize > (size_t)PY_SSIZE_T_MAX - sizeof(PyGC_Head)) { + if (basicsize > (size_t)PY_SSIZE_T_MAX - presize) { return (PyVarObject *)PyErr_NoMemory(); } - - PyGC_Head *g = AS_GC(op); - g = (PyGC_Head *)PyObject_Realloc(g, sizeof(PyGC_Head) + basicsize); - if (g == NULL) + char *mem = (char *)op - presize; + mem = (char *)PyObject_Realloc(mem, presize + basicsize); + if (mem == NULL) { return (PyVarObject *)PyErr_NoMemory(); - op = (PyVarObject *) FROM_GC(g); + } + op = (PyVarObject *) (mem + presize); Py_SET_SIZE(op, nitems); return op; } diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c index ae9e3211c072d8..4a2381d9611776 100644 --- a/Modules/mathmodule.c +++ b/Modules/mathmodule.c @@ -836,7 +836,7 @@ long_lcm(PyObject *a, PyObject *b) { PyObject *g, *m, *f, *ab; - if (Py_SIZE(a) == 0 || Py_SIZE(b) == 0) { + if (_PyLong_IsZero((PyLongObject *)a) || _PyLong_IsZero((PyLongObject *)b)) { return PyLong_FromLong(0); } g = _PyLong_GCD(a, b); @@ -1726,13 +1726,13 @@ math_isqrt(PyObject *module, PyObject *n) return NULL; } - if (_PyLong_Sign(n) < 0) { + if (_PyLong_IsNegative((PyLongObject *)n)) { PyErr_SetString( PyExc_ValueError, "isqrt() argument must be nonnegative"); goto error; } - if (_PyLong_Sign(n) == 0) { + if (_PyLong_IsZero((PyLongObject *)n)) { Py_DECREF(n); return PyLong_FromLong(0); } @@ -2096,7 +2096,7 @@ math_trunc(PyObject *module, PyObject *x) return PyFloat_Type.tp_as_number->nb_int(x); } - if (Py_TYPE(x)->tp_dict == NULL) { + if (_PyType_IsReady(Py_TYPE(x))) { if (PyType_Ready(Py_TYPE(x)) < 0) return NULL; } @@ -2254,7 +2254,7 @@ loghelper(PyObject* arg, double (*func)(double)) Py_ssize_t e; /* Negative or zero inputs give a ValueError. */ - if (Py_SIZE(arg) <= 0) { + if (!_PyLong_IsPositive((PyLongObject *)arg)) { PyErr_SetString(PyExc_ValueError, "math domain error"); return NULL; @@ -2284,33 +2284,22 @@ loghelper(PyObject* arg, double (*func)(double)) } -/*[clinic input] -math.log - - x: object - [ - base: object(c_default="NULL") = math.e - ] - / - -Return the logarithm of x to the given base. - -If the base not specified, returns the natural logarithm (base e) of x. -[clinic start generated code]*/ - +/* AC: cannot convert yet, see gh-102839 and gh-89381, waiting + for support of multiple signatures */ static PyObject * -math_log_impl(PyObject *module, PyObject *x, int group_right_1, - PyObject *base) -/*[clinic end generated code: output=7b5a39e526b73fc9 input=0f62d5726cbfebbd]*/ +math_log(PyObject *module, PyObject * const *args, Py_ssize_t nargs) { PyObject *num, *den; PyObject *ans; - num = loghelper(x, m_log); - if (num == NULL || base == NULL) + if (!_PyArg_CheckPositional("log", nargs, 1, 2)) + return NULL; + + num = loghelper(args[0], m_log); + if (num == NULL || nargs == 1) return num; - den = loghelper(base, m_log); + den = loghelper(args[1], m_log); if (den == NULL) { Py_DECREF(num); return NULL; @@ -2322,6 +2311,10 @@ math_log_impl(PyObject *module, PyObject *x, int group_right_1, return ans; } +PyDoc_STRVAR(math_log_doc, +"log(x, [base=math.e])\n\ +Return the logarithm of x to the given base.\n\n\ +If the base is not specified, returns the natural logarithm (base e) of x."); /*[clinic input] math.log2 @@ -2447,9 +2440,8 @@ Since lo**2 is less than 1/2 ulp(csum), we have csum+lo*lo == csum. To minimize loss of information during the accumulation of fractional values, each term has a separate accumulator. This also breaks up sequential dependencies in the inner loop so the CPU can maximize -floating point throughput. [4] On a 2.6 GHz Haswell, adding one -dimension has an incremental cost of only 5ns -- for example when -moving from hypot(x,y) to hypot(x,y,z). +floating point throughput. [4] On an Apple M1 Max, hypot(*vec) +takes only 3.33 µsec when len(vec) == 1000. The square root differential correction is needed because a correctly rounded square root of a correctly rounded sum of @@ -2473,7 +2465,7 @@ step is exact. The Neumaier summation computes as if in doubled precision (106 bits) and has the advantage that its input squares are non-negative so that the condition number of the sum is one. The square root with a differential correction is likewise computed -as if in double precision. +as if in doubled precision. For n <= 1000, prior to the final addition that rounds the overall result, the internal accuracy of "h" together with its correction of @@ -2498,7 +2490,7 @@ verified for 1 billion random inputs with n=5. [7] static inline double vector_norm(Py_ssize_t n, double *vec, double max, int found_nan) { - double x, h, scale, oldcsum, csum = 1.0, frac1 = 0.0, frac2 = 0.0; + double x, h, scale, csum = 1.0, frac1 = 0.0, frac2 = 0.0; DoubleLength pr, sm; int max_e; Py_ssize_t i; @@ -2513,49 +2505,37 @@ vector_norm(Py_ssize_t n, double *vec, double max, int found_nan) return max; } frexp(max, &max_e); - if (max_e >= -1023) { - scale = ldexp(1.0, -max_e); - assert(max * scale >= 0.5); - assert(max * scale < 1.0); + if (max_e < -1023) { + /* When max_e < -1023, ldexp(1.0, -max_e) would overflow. */ for (i=0 ; i < n ; i++) { - x = vec[i]; - assert(Py_IS_FINITE(x) && fabs(x) <= max); - - x *= scale; - assert(fabs(x) < 1.0); - - pr = dl_mul(x, x); - assert(pr.hi <= 1.0); - - sm = dl_fast_sum(csum, pr.hi); - csum = sm.hi; - frac1 += pr.lo; - frac2 += sm.lo; + vec[i] /= DBL_MIN; // convert subnormals to normals } - h = sqrt(csum - 1.0 + (frac1 + frac2)); - pr = dl_mul(-h, h); - sm = dl_fast_sum(csum, pr.hi); - csum = sm.hi; - frac1 += pr.lo; - frac2 += sm.lo; - x = csum - 1.0 + (frac1 + frac2); - return (h + x / (2.0 * h)) / scale; + return DBL_MIN * vector_norm(n, vec, max / DBL_MIN, found_nan); } - /* When max_e < -1023, ldexp(1.0, -max_e) overflows. - So instead of multiplying by a scale, we just divide by *max*. - */ + scale = ldexp(1.0, -max_e); + assert(max * scale >= 0.5); + assert(max * scale < 1.0); for (i=0 ; i < n ; i++) { x = vec[i]; assert(Py_IS_FINITE(x) && fabs(x) <= max); - x /= max; - x = x*x; - assert(x <= 1.0); - assert(fabs(csum) >= fabs(x)); - oldcsum = csum; - csum += x; - frac1 += (oldcsum - csum) + x; - } - return max * sqrt(csum - 1.0 + frac1); + x *= scale; // lossless scaling + assert(fabs(x) < 1.0); + pr = dl_mul(x, x); // lossless squaring + assert(pr.hi <= 1.0); + sm = dl_fast_sum(csum, pr.hi); // lossless addition + csum = sm.hi; + frac1 += pr.lo; // lossy addition + frac2 += sm.lo; // lossy addition + } + h = sqrt(csum - 1.0 + (frac1 + frac2)); + pr = dl_mul(-h, h); + sm = dl_fast_sum(csum, pr.hi); + csum = sm.hi; + frac1 += pr.lo; + frac2 += sm.lo; + x = csum - 1.0 + (frac1 + frac2); + h += x / (2.0 * h); // differential correction + return h / scale; } #define NUM_STACK_ELEMS 16 @@ -3736,12 +3716,12 @@ math_perm_impl(PyObject *module, PyObject *n, PyObject *k) } assert(PyLong_CheckExact(n) && PyLong_CheckExact(k)); - if (Py_SIZE(n) < 0) { + if (_PyLong_IsNegative((PyLongObject *)n)) { PyErr_SetString(PyExc_ValueError, "n must be a non-negative integer"); goto error; } - if (Py_SIZE(k) < 0) { + if (_PyLong_IsNegative((PyLongObject *)k)) { PyErr_SetString(PyExc_ValueError, "k must be a non-negative integer"); goto error; @@ -3828,12 +3808,12 @@ math_comb_impl(PyObject *module, PyObject *n, PyObject *k) } assert(PyLong_CheckExact(n) && PyLong_CheckExact(k)); - if (Py_SIZE(n) < 0) { + if (_PyLong_IsNegative((PyLongObject *)n)) { PyErr_SetString(PyExc_ValueError, "n must be a non-negative integer"); goto error; } - if (Py_SIZE(k) < 0) { + if (_PyLong_IsNegative((PyLongObject *)k)) { PyErr_SetString(PyExc_ValueError, "k must be a non-negative integer"); goto error; @@ -3865,7 +3845,8 @@ math_comb_impl(PyObject *module, PyObject *n, PyObject *k) if (temp == NULL) { goto error; } - if (Py_SIZE(temp) < 0) { + assert(PyLong_Check(temp)); + if (_PyLong_IsNegative((PyLongObject *)temp)) { Py_DECREF(temp); result = PyLong_FromLong(0); goto done; @@ -4058,7 +4039,7 @@ static PyMethodDef math_methods[] = { {"lcm", _PyCFunction_CAST(math_lcm), METH_FASTCALL, math_lcm_doc}, MATH_LDEXP_METHODDEF {"lgamma", math_lgamma, METH_O, math_lgamma_doc}, - MATH_LOG_METHODDEF + {"log", _PyCFunction_CAST(math_log), METH_FASTCALL, math_log_doc}, {"log1p", math_log1p, METH_O, math_log1p_doc}, MATH_LOG10_METHODDEF MATH_LOG2_METHODDEF diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index e38caf7cc0abee..dcb5e7a0e0408c 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -4789,6 +4789,8 @@ os__path_isdir_impl(PyObject *module, PyObject *path) FILE_BASIC_INFO info; path_t _path = PATH_T_INITIALIZE("isdir", "path", 0, 1); int result; + BOOL slow_path = TRUE; + FILE_STAT_BASIC_INFORMATION statInfo; if (!path_converter(path, &_path)) { path_cleanup(&_path); @@ -4800,43 +4802,60 @@ os__path_isdir_impl(PyObject *module, PyObject *path) } Py_BEGIN_ALLOW_THREADS - if (_path.fd != -1) { - hfile = _Py_get_osfhandle_noraise(_path.fd); - close_file = FALSE; - } - else { - hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, - OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); + if (_path.wide) { + if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, + &statInfo, sizeof(statInfo))) { + if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) { + slow_path = FALSE; + result = statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY; + } else if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY)) { + slow_path = FALSE; + result = 0; + } + } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) { + slow_path = FALSE; + result = 0; + } } - if (hfile != INVALID_HANDLE_VALUE) { - if (GetFileInformationByHandleEx(hfile, FileBasicInfo, &info, - sizeof(info))) - { - result = info.FileAttributes & FILE_ATTRIBUTE_DIRECTORY; + if (slow_path) { + if (_path.fd != -1) { + hfile = _Py_get_osfhandle_noraise(_path.fd); + close_file = FALSE; } else { - result = 0; + hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, + OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); } - if (close_file) { - CloseHandle(hfile); - } - } - else { - STRUCT_STAT st; - switch (GetLastError()) { - case ERROR_ACCESS_DENIED: - case ERROR_SHARING_VIOLATION: - case ERROR_CANT_ACCESS_FILE: - case ERROR_INVALID_PARAMETER: - if (STAT(_path.wide, &st)) { - result = 0; + if (hfile != INVALID_HANDLE_VALUE) { + if (GetFileInformationByHandleEx(hfile, FileBasicInfo, &info, + sizeof(info))) + { + result = info.FileAttributes & FILE_ATTRIBUTE_DIRECTORY; } else { - result = S_ISDIR(st.st_mode); + result = 0; + } + if (close_file) { + CloseHandle(hfile); + } + } + else { + STRUCT_STAT st; + switch (GetLastError()) { + case ERROR_ACCESS_DENIED: + case ERROR_SHARING_VIOLATION: + case ERROR_CANT_ACCESS_FILE: + case ERROR_INVALID_PARAMETER: + if (STAT(_path.wide, &st)) { + result = 0; + } + else { + result = S_ISDIR(st.st_mode); + } + break; + default: + result = 0; } - break; - default: - result = 0; } } Py_END_ALLOW_THREADS @@ -4867,6 +4886,8 @@ os__path_isfile_impl(PyObject *module, PyObject *path) FILE_BASIC_INFO info; path_t _path = PATH_T_INITIALIZE("isfile", "path", 0, 1); int result; + BOOL slow_path = TRUE; + FILE_STAT_BASIC_INFORMATION statInfo; if (!path_converter(path, &_path)) { path_cleanup(&_path); @@ -4878,43 +4899,60 @@ os__path_isfile_impl(PyObject *module, PyObject *path) } Py_BEGIN_ALLOW_THREADS - if (_path.fd != -1) { - hfile = _Py_get_osfhandle_noraise(_path.fd); - close_file = FALSE; - } - else { - hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, - OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); + if (_path.wide) { + if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, + &statInfo, sizeof(statInfo))) { + if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) { + slow_path = FALSE; + result = !(statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY); + } else if (statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY) { + slow_path = FALSE; + result = 0; + } + } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) { + slow_path = FALSE; + result = 0; + } } - if (hfile != INVALID_HANDLE_VALUE) { - if (GetFileInformationByHandleEx(hfile, FileBasicInfo, &info, - sizeof(info))) - { - result = !(info.FileAttributes & FILE_ATTRIBUTE_DIRECTORY); + if (slow_path) { + if (_path.fd != -1) { + hfile = _Py_get_osfhandle_noraise(_path.fd); + close_file = FALSE; } else { - result = 0; - } - if (close_file) { - CloseHandle(hfile); + hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, + OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); } - } - else { - STRUCT_STAT st; - switch (GetLastError()) { - case ERROR_ACCESS_DENIED: - case ERROR_SHARING_VIOLATION: - case ERROR_CANT_ACCESS_FILE: - case ERROR_INVALID_PARAMETER: - if (STAT(_path.wide, &st)) { - result = 0; + if (hfile != INVALID_HANDLE_VALUE) { + if (GetFileInformationByHandleEx(hfile, FileBasicInfo, &info, + sizeof(info))) + { + result = !(info.FileAttributes & FILE_ATTRIBUTE_DIRECTORY); } else { - result = S_ISREG(st.st_mode); + result = 0; + } + if (close_file) { + CloseHandle(hfile); + } + } + else { + STRUCT_STAT st; + switch (GetLastError()) { + case ERROR_ACCESS_DENIED: + case ERROR_SHARING_VIOLATION: + case ERROR_CANT_ACCESS_FILE: + case ERROR_INVALID_PARAMETER: + if (STAT(_path.wide, &st)) { + result = 0; + } + else { + result = S_ISREG(st.st_mode); + } + break; + default: + result = 0; } - break; - default: - result = 0; } } Py_END_ALLOW_THREADS @@ -4944,6 +4982,8 @@ os__path_exists_impl(PyObject *module, PyObject *path) BOOL close_file = TRUE; path_t _path = PATH_T_INITIALIZE("exists", "path", 0, 1); int result; + BOOL slow_path = TRUE; + FILE_STAT_BASIC_INFORMATION statInfo; if (!path_converter(path, &_path)) { path_cleanup(&_path); @@ -4955,36 +4995,50 @@ os__path_exists_impl(PyObject *module, PyObject *path) } Py_BEGIN_ALLOW_THREADS - if (_path.fd != -1) { - hfile = _Py_get_osfhandle_noraise(_path.fd); - close_file = FALSE; - } - else { - hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, - OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); - } - if (hfile != INVALID_HANDLE_VALUE) { - result = 1; - if (close_file) { - CloseHandle(hfile); + if (_path.wide) { + if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, + &statInfo, sizeof(statInfo))) { + if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) { + slow_path = FALSE; + result = 1; + } + } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) { + slow_path = FALSE; + result = 0; } } - else { - STRUCT_STAT st; - switch (GetLastError()) { - case ERROR_ACCESS_DENIED: - case ERROR_SHARING_VIOLATION: - case ERROR_CANT_ACCESS_FILE: - case ERROR_INVALID_PARAMETER: - if (STAT(_path.wide, &st)) { - result = 0; + if (slow_path) { + if (_path.fd != -1) { + hfile = _Py_get_osfhandle_noraise(_path.fd); + close_file = FALSE; + } + else { + hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, + OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); + } + if (hfile != INVALID_HANDLE_VALUE) { + result = 1; + if (close_file) { + CloseHandle(hfile); } - else { - result = 1; + } + else { + STRUCT_STAT st; + switch (GetLastError()) { + case ERROR_ACCESS_DENIED: + case ERROR_SHARING_VIOLATION: + case ERROR_CANT_ACCESS_FILE: + case ERROR_INVALID_PARAMETER: + if (STAT(_path.wide, &st)) { + result = 0; + } + else { + result = 1; + } + break; + default: + result = 0; } - break; - default: - result = 0; } } Py_END_ALLOW_THREADS @@ -5015,6 +5069,8 @@ os__path_islink_impl(PyObject *module, PyObject *path) FILE_ATTRIBUTE_TAG_INFO info; path_t _path = PATH_T_INITIALIZE("islink", "path", 0, 1); int result; + BOOL slow_path = TRUE; + FILE_STAT_BASIC_INFORMATION statInfo; if (!path_converter(path, &_path)) { path_cleanup(&_path); @@ -5026,45 +5082,62 @@ os__path_islink_impl(PyObject *module, PyObject *path) } Py_BEGIN_ALLOW_THREADS - if (_path.fd != -1) { - hfile = _Py_get_osfhandle_noraise(_path.fd); - close_file = FALSE; + if (_path.wide) { + if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, + &statInfo, sizeof(statInfo))) { + slow_path = FALSE; + if (statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT) { + result = (statInfo.ReparseTag == IO_REPARSE_TAG_SYMLINK); + } + else { + result = 0; + } + } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) { + slow_path = FALSE; + result = 0; + } } - else { - hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, - OPEN_EXISTING, - FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS, - NULL); - } - if (hfile != INVALID_HANDLE_VALUE) { - if (GetFileInformationByHandleEx(hfile, FileAttributeTagInfo, &info, - sizeof(info))) - { - result = (info.ReparseTag == IO_REPARSE_TAG_SYMLINK); + if (slow_path) { + if (_path.fd != -1) { + hfile = _Py_get_osfhandle_noraise(_path.fd); + close_file = FALSE; } else { - result = 0; + hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, + OPEN_EXISTING, + FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS, + NULL); } - if (close_file) { - CloseHandle(hfile); - } - } - else { - STRUCT_STAT st; - switch (GetLastError()) { - case ERROR_ACCESS_DENIED: - case ERROR_SHARING_VIOLATION: - case ERROR_CANT_ACCESS_FILE: - case ERROR_INVALID_PARAMETER: - if (LSTAT(_path.wide, &st)) { - result = 0; + if (hfile != INVALID_HANDLE_VALUE) { + if (GetFileInformationByHandleEx(hfile, FileAttributeTagInfo, &info, + sizeof(info))) + { + result = (info.ReparseTag == IO_REPARSE_TAG_SYMLINK); } else { - result = S_ISLNK(st.st_mode); + result = 0; + } + if (close_file) { + CloseHandle(hfile); + } + } + else { + STRUCT_STAT st; + switch (GetLastError()) { + case ERROR_ACCESS_DENIED: + case ERROR_SHARING_VIOLATION: + case ERROR_CANT_ACCESS_FILE: + case ERROR_INVALID_PARAMETER: + if (LSTAT(_path.wide, &st)) { + result = 0; + } + else { + result = S_ISLNK(st.st_mode); + } + break; + default: + result = 0; } - break; - default: - result = 0; } } Py_END_ALLOW_THREADS @@ -8546,7 +8619,7 @@ os_setpgrp_impl(PyObject *module) #include <processsnapshot.h> static PyObject* -win32_getppid() +win32_getppid(void) { DWORD error; PyObject* result = NULL; @@ -13330,7 +13403,7 @@ static int has_ShellExecute = -1; static HINSTANCE (CALLBACK *Py_ShellExecuteW)(HWND, LPCWSTR, LPCWSTR, LPCWSTR, LPCWSTR, INT); static int -check_ShellExecute() +check_ShellExecute(void) { HINSTANCE hShell32; diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index b7927750e334b7..f11d4b1a6e0591 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -108,6 +108,7 @@ Local naming conventions: #define PY_SSIZE_T_CLEAN #include "Python.h" #include "pycore_fileutils.h" // _Py_set_inheritable() +#include "pycore_moduleobject.h" // _PyModule_GetState #include "structmember.h" // PyMemberDef #ifdef _Py_MEMORY_SANITIZER @@ -337,9 +338,9 @@ static FlagRuntimeInfo win_runtime_flags[] = { /*[clinic input] module _socket -class _socket.socket "PySocketSockObject *" "&sock_type" +class _socket.socket "PySocketSockObject *" "clinic_state()->sock_type" [clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=7a8313d9b7f51988]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=2db2489bd2219fd8]*/ static int remove_unusable_flags(PyObject *m) @@ -353,7 +354,7 @@ remove_unusable_flags(PyObject *m) } #ifndef MS_WINDOWS_DESKTOP info.dwOSVersionInfoSize = sizeof(info); - if (!GetVersionExW((OSVERSIONINFOW*) &info)) { + if (!GetVersionEx((OSVERSIONINFO*) &info)) { PyErr_SetFromWindowsErr(0); return -1; } @@ -541,22 +542,59 @@ remove_unusable_flags(PyObject *m) #define INADDR_NONE (-1) #endif +typedef struct _socket_state { + /* The sock_type variable contains pointers to various functions, + some of which call new_sockobject(), which uses sock_type, so + there has to be a circular reference. */ + PyTypeObject *sock_type; + + /* Global variable holding the exception type for errors detected + by this module (but not argument type or memory errors, etc.). */ + PyObject *socket_herror; + PyObject *socket_gaierror; + + /* Default timeout for new sockets */ + _PyTime_t defaulttimeout; + +#if defined(HAVE_ACCEPT) || defined(HAVE_ACCEPT4) +#if defined(HAVE_ACCEPT4) && defined(SOCK_CLOEXEC) + /* accept4() is available on Linux 2.6.28+ and glibc 2.10 */ + int accept4_works; +#endif +#endif + +#ifdef SOCK_CLOEXEC + /* socket() and socketpair() fail with EINVAL on Linux kernel older + * than 2.6.27 if SOCK_CLOEXEC flag is set in the socket type. */ + int sock_cloexec_works; +#endif +} socket_state; + +static inline socket_state * +get_module_state(PyObject *mod) +{ + void *state = _PyModule_GetState(mod); + assert(state != NULL); + return (socket_state *)state; +} + +static struct PyModuleDef socketmodule; + +static inline socket_state * +find_module_state_by_def(PyTypeObject *type) +{ + PyObject *mod = PyType_GetModuleByDef(type, &socketmodule); + assert(mod != NULL); + return get_module_state(mod); +} + +#define clinic_state() (find_module_state_by_def(type)) #include "clinic/socketmodule.c.h" +#undef clinic_state /* XXX There's a problem here: *static* functions are not supposed to have a Py prefix (or use CapitalizedWords). Later... */ -/* Global variable holding the exception type for errors detected - by this module (but not argument type or memory errors, etc.). */ -static PyObject *socket_herror; -static PyObject *socket_gaierror; - -/* A forward reference to the socket type object. - The sock_type variable contains pointers to various functions, - some of which call new_sockobject(), which uses sock_type, so - there has to be a circular reference. */ -static PyTypeObject sock_type; - #if defined(HAVE_POLL_H) #include <poll.h> #elif defined(HAVE_SYS_POLL_H) @@ -641,7 +679,7 @@ set_error(void) #if defined(HAVE_GETHOSTBYNAME_R) || defined (HAVE_GETHOSTBYNAME) || defined (HAVE_GETHOSTBYADDR) static PyObject * -set_herror(int h_error) +set_herror(socket_state *state, int h_error) { PyObject *v; @@ -651,7 +689,7 @@ set_herror(int h_error) v = Py_BuildValue("(is)", h_error, "host not found"); #endif if (v != NULL) { - PyErr_SetObject(socket_herror, v); + PyErr_SetObject(state->socket_herror, v); Py_DECREF(v); } @@ -662,7 +700,7 @@ set_herror(int h_error) #ifdef HAVE_GETADDRINFO static PyObject * -set_gaierror(int error) +set_gaierror(socket_state *state, int error) { PyObject *v; @@ -678,7 +716,7 @@ set_gaierror(int error) v = Py_BuildValue("(is)", error, "getaddrinfo failed"); #endif if (v != NULL) { - PyErr_SetObject(socket_gaierror, v); + PyErr_SetObject(state->socket_gaierror, v); Py_DECREF(v); } @@ -991,11 +1029,8 @@ sock_call(PySocketSockObject *s, /* Initialize a new socket object. */ -/* Default timeout for new sockets */ -static _PyTime_t defaulttimeout = _PYTIME_FROMSECONDS(-1); - static int -init_sockobject(PySocketSockObject *s, +init_sockobject(socket_state *state, PySocketSockObject *s, SOCKET_T fd, int family, int type, int proto) { s->sock_fd = fd; @@ -1025,13 +1060,14 @@ init_sockobject(PySocketSockObject *s, else #endif { - s->sock_timeout = defaulttimeout; - if (defaulttimeout >= 0) { + s->sock_timeout = state->defaulttimeout; + if (state->defaulttimeout >= 0) { if (internal_setblocking(s, 0) == -1) { return -1; } } } + s->state = state; return 0; } @@ -1043,14 +1079,15 @@ init_sockobject(PySocketSockObject *s, in NEWOBJ()). */ static PySocketSockObject * -new_sockobject(SOCKET_T fd, int family, int type, int proto) +new_sockobject(socket_state *state, SOCKET_T fd, int family, int type, + int proto) { - PySocketSockObject *s; - s = (PySocketSockObject *) - PyType_GenericNew(&sock_type, NULL, NULL); - if (s == NULL) + PyTypeObject *tp = state->sock_type; + PySocketSockObject *s = (PySocketSockObject *)tp->tp_alloc(tp, 0); + if (s == NULL) { return NULL; - if (init_sockobject(s, fd, family, type, proto) == -1) { + } + if (init_sockobject(state, s, fd, family, type, proto) == -1) { Py_DECREF(s); return NULL; } @@ -1074,7 +1111,8 @@ static PyThread_type_lock netdb_lock; an error occurred; then an exception is raised. */ static int -setipaddr(const char *name, struct sockaddr *addr_ret, size_t addr_ret_size, int af) +setipaddr(socket_state *state, const char *name, struct sockaddr *addr_ret, + size_t addr_ret_size, int af) { struct addrinfo hints, *res; int error; @@ -1095,7 +1133,7 @@ setipaddr(const char *name, struct sockaddr *addr_ret, size_t addr_ret_size, int outcome of the first call. */ if (error) { res = NULL; // no-op, remind us that it is invalid; gh-100795 - set_gaierror(error); + set_gaierror(state, error); return -1; } switch (res->ai_family) { @@ -1206,7 +1244,7 @@ setipaddr(const char *name, struct sockaddr *addr_ret, size_t addr_ret_size, int Py_END_ALLOW_THREADS if (error) { res = NULL; // no-op, remind us that it is invalid; gh-100795 - set_gaierror(error); + set_gaierror(state, error); return -1; } if (res->ai_addrlen < addr_ret_size) @@ -1889,7 +1927,7 @@ getsockaddrarg(PySocketSockObject *s, PyObject *args, return 0; } struct sockaddr_in* addr = &addrbuf->in; - result = setipaddr(host.buf, (struct sockaddr *)addr, + result = setipaddr(s->state, host.buf, (struct sockaddr *)addr, sizeof(*addr), AF_INET); idna_cleanup(&host); if (result < 0) @@ -1934,7 +1972,7 @@ getsockaddrarg(PySocketSockObject *s, PyObject *args, return 0; } struct sockaddr_in6* addr = &addrbuf->in6; - result = setipaddr(host.buf, (struct sockaddr *)addr, + result = setipaddr(s->state, host.buf, (struct sockaddr *)addr, sizeof(*addr), AF_INET6); idna_cleanup(&host); if (result < 0) @@ -2813,10 +2851,6 @@ struct sock_accept { }; #if defined(HAVE_ACCEPT) || defined(HAVE_ACCEPT4) -#if defined(HAVE_ACCEPT4) && defined(SOCK_CLOEXEC) -/* accept4() is available on Linux 2.6.28+ and glibc 2.10 */ -static int accept4_works = -1; -#endif static int sock_accept_impl(PySocketSockObject *s, void *data) @@ -2835,15 +2869,16 @@ sock_accept_impl(PySocketSockObject *s, void *data) #endif #if defined(HAVE_ACCEPT4) && defined(SOCK_CLOEXEC) - if (accept4_works != 0) { + socket_state *state = s->state; + if (state->accept4_works != 0) { ctx->result = accept4(s->sock_fd, addr, paddrlen, SOCK_CLOEXEC); - if (ctx->result == INVALID_SOCKET && accept4_works == -1) { + if (ctx->result == INVALID_SOCKET && state->accept4_works == -1) { /* On Linux older than 2.6.28, accept4() fails with ENOSYS */ - accept4_works = (errno != ENOSYS); + state->accept4_works = (errno != ENOSYS); } } - if (accept4_works == 0) + if (state->accept4_works == 0) ctx->result = accept(s->sock_fd, addr, paddrlen); #else ctx->result = accept(s->sock_fd, addr, paddrlen); @@ -2896,7 +2931,8 @@ sock_accept(PySocketSockObject *s, PyObject *Py_UNUSED(ignored)) #else #if defined(HAVE_ACCEPT4) && defined(SOCK_CLOEXEC) - if (!accept4_works) + socket_state *state = s->state; + if (!state->accept4_works) #endif { if (_Py_set_inheritable(newfd, 0, NULL) < 0) { @@ -3538,7 +3574,8 @@ PyDoc_STRVAR(getsockname_doc, \n\ Return the address of the local endpoint. The format depends on the\n\ address family. For IPv4 sockets, the address info is a pair\n\ -(hostaddr, port)."); +(hostaddr, port). For IPv6 sockets, the address info is a 4-tuple\n\ +(hostaddr, port, flowinfo, scope_id)."); #endif @@ -5218,13 +5255,23 @@ sock_finalize(PySocketSockObject *s) PyErr_SetRaisedException(exc); } +static int +sock_traverse(PySocketSockObject *s, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(s)); + return 0; +} + static void sock_dealloc(PySocketSockObject *s) { - if (PyObject_CallFinalizerFromDealloc((PyObject *)s) < 0) + if (PyObject_CallFinalizerFromDealloc((PyObject *)s) < 0) { return; - - Py_TYPE(s)->tp_free((PyObject *)s); + } + PyTypeObject *tp = Py_TYPE(s); + PyObject_GC_UnTrack(s); + tp->tp_free((PyObject *)s); + Py_DECREF(tp); } @@ -5276,12 +5323,6 @@ sock_new(PyTypeObject *type, PyObject *args, PyObject *kwds) /* Initialize a new socket object. */ -#ifdef SOCK_CLOEXEC -/* socket() and socketpair() fail with EINVAL on Linux kernel older - * than 2.6.27 if SOCK_CLOEXEC flag is set in the socket type. */ -static int sock_cloexec_works = -1; -#endif - /*ARGSUSED*/ #ifndef HAVE_SOCKET @@ -5309,10 +5350,11 @@ sock_initobj_impl(PySocketSockObject *self, int family, int type, int proto, { SOCKET_T fd = INVALID_SOCKET; + socket_state *state = find_module_state_by_def(Py_TYPE(self)); #ifndef MS_WINDOWS #ifdef SOCK_CLOEXEC - int *atomic_flag_works = &sock_cloexec_works; + int *atomic_flag_works = &state->sock_cloexec_works; #else int *atomic_flag_works = NULL; #endif @@ -5467,15 +5509,15 @@ sock_initobj_impl(PySocketSockObject *self, int family, int type, int proto, /* UNIX */ Py_BEGIN_ALLOW_THREADS #ifdef SOCK_CLOEXEC - if (sock_cloexec_works != 0) { + if (state->sock_cloexec_works != 0) { fd = socket(family, type | SOCK_CLOEXEC, proto); - if (sock_cloexec_works == -1) { + if (state->sock_cloexec_works == -1) { if (fd >= 0) { - sock_cloexec_works = 1; + state->sock_cloexec_works = 1; } else if (errno == EINVAL) { /* Linux older than 2.6.27 does not support SOCK_CLOEXEC */ - sock_cloexec_works = 0; + state->sock_cloexec_works = 0; fd = socket(family, type, proto); } } @@ -5498,7 +5540,7 @@ sock_initobj_impl(PySocketSockObject *self, int family, int type, int proto, } #endif } - if (init_sockobject(self, fd, family, type, proto) == -1) { + if (init_sockobject(state, self, fd, family, type, proto) == -1) { SOCKETCLOSE(fd); return -1; } @@ -5510,55 +5552,26 @@ sock_initobj_impl(PySocketSockObject *self, int family, int type, int proto, /* Type object for socket objects. */ -static PyTypeObject sock_type = { - PyVarObject_HEAD_INIT(0, 0) /* Must fill in type value later */ - "_socket.socket", /* tp_name */ - sizeof(PySocketSockObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)sock_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - (reprfunc)sock_repr, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - sock_doc, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - sock_methods, /* tp_methods */ - sock_memberlist, /* tp_members */ - sock_getsetlist, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - sock_initobj, /* tp_init */ - PyType_GenericAlloc, /* tp_alloc */ - sock_new, /* tp_new */ - PyObject_Del, /* tp_free */ - 0, /* tp_is_gc */ - 0, /* tp_bases */ - 0, /* tp_mro */ - 0, /* tp_cache */ - 0, /* tp_subclasses */ - 0, /* tp_weaklist */ - 0, /* tp_del */ - 0, /* tp_version_tag */ - (destructor)sock_finalize, /* tp_finalize */ +static PyType_Slot sock_slots[] = { + {Py_tp_dealloc, sock_dealloc}, + {Py_tp_traverse, sock_traverse}, + {Py_tp_repr, sock_repr}, + {Py_tp_doc, (void *)sock_doc}, + {Py_tp_methods, sock_methods}, + {Py_tp_members, sock_memberlist}, + {Py_tp_getset, sock_getsetlist}, + {Py_tp_init, sock_initobj}, + {Py_tp_new, sock_new}, + {Py_tp_finalize, sock_finalize}, + {0, NULL}, +}; + +static PyType_Spec sock_spec = { + .name = "_socket.socket", + .basicsize = sizeof(PySocketSockObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = sock_slots, }; @@ -5686,8 +5699,12 @@ socket_gethostbyname(PyObject *self, PyObject *args) if (PySys_Audit("socket.gethostbyname", "O", args) < 0) { goto finally; } - if (setipaddr(name, (struct sockaddr *)&addrbuf, sizeof(addrbuf), AF_INET) < 0) + socket_state *state = get_module_state(self); + int rc = setipaddr(state, name, (struct sockaddr *)&addrbuf, + sizeof(addrbuf), AF_INET); + if (rc < 0) { goto finally; + } ret = make_ipv4_addr(&addrbuf); finally: PyMem_Free(name); @@ -5718,7 +5735,8 @@ sock_decode_hostname(const char *name) /* Convenience function common to gethostbyname_ex and gethostbyaddr */ static PyObject * -gethost_common(struct hostent *h, struct sockaddr *addr, size_t alen, int af) +gethost_common(socket_state *state, struct hostent *h, struct sockaddr *addr, + size_t alen, int af) { char **pch; PyObject *rtn_tuple = (PyObject *)NULL; @@ -5729,7 +5747,7 @@ gethost_common(struct hostent *h, struct sockaddr *addr, size_t alen, int af) if (h == NULL) { /* Let's get real error message to return */ - set_herror(h_errno); + set_herror(state, h_errno); return NULL; } @@ -5876,8 +5894,10 @@ socket_gethostbyname_ex(PyObject *self, PyObject *args) if (PySys_Audit("socket.gethostbyname", "O", args) < 0) { goto finally; } - if (setipaddr(name, SAS2SA(&addr), sizeof(addr), AF_INET) < 0) + socket_state *state = get_module_state(self); + if (setipaddr(state, name, SAS2SA(&addr), sizeof(addr), AF_INET) < 0) { goto finally; + } Py_BEGIN_ALLOW_THREADS #ifdef HAVE_GETHOSTBYNAME_R #if defined(HAVE_GETHOSTBYNAME_R_6_ARG) @@ -5903,7 +5923,7 @@ socket_gethostbyname_ex(PyObject *self, PyObject *args) Therefore, we cast the sockaddr_storage into sockaddr to access sa_family. */ sa = SAS2SA(&addr); - ret = gethost_common(h, SAS2SA(&addr), sizeof(addr), + ret = gethost_common(state, h, SAS2SA(&addr), sizeof(addr), sa->sa_family); #ifdef USE_GETHOSTBYNAME_LOCK PyThread_release_lock(netdb_lock); @@ -5959,8 +5979,10 @@ socket_gethostbyaddr(PyObject *self, PyObject *args) goto finally; } af = AF_UNSPEC; - if (setipaddr(ip_num, sa, sizeof(addr), af) < 0) + socket_state *state = get_module_state(self); + if (setipaddr(state, ip_num, sa, sizeof(addr), af) < 0) { goto finally; + } af = sa->sa_family; ap = NULL; /* al = 0; */ @@ -6001,7 +6023,7 @@ socket_gethostbyaddr(PyObject *self, PyObject *args) h = gethostbyaddr(ap, al, af); #endif /* HAVE_GETHOSTBYNAME_R */ Py_END_ALLOW_THREADS - ret = gethost_common(h, SAS2SA(&addr), sizeof(addr), af); + ret = gethost_common(state, h, SAS2SA(&addr), sizeof(addr), af); #ifdef USE_GETHOSTBYNAME_LOCK PyThread_release_lock(netdb_lock); #endif @@ -6220,8 +6242,9 @@ socket_socketpair(PyObject *self, PyObject *args) SOCKET_T sv[2]; int family, type = SOCK_STREAM, proto = 0; PyObject *res = NULL; + socket_state *state = get_module_state(self); #ifdef SOCK_CLOEXEC - int *atomic_flag_works = &sock_cloexec_works; + int *atomic_flag_works = &state->sock_cloexec_works; #else int *atomic_flag_works = NULL; #endif @@ -6239,15 +6262,15 @@ socket_socketpair(PyObject *self, PyObject *args) /* Create a pair of socket fds */ Py_BEGIN_ALLOW_THREADS #ifdef SOCK_CLOEXEC - if (sock_cloexec_works != 0) { + if (state->sock_cloexec_works != 0) { ret = socketpair(family, type | SOCK_CLOEXEC, proto, sv); - if (sock_cloexec_works == -1) { + if (state->sock_cloexec_works == -1) { if (ret >= 0) { - sock_cloexec_works = 1; + state->sock_cloexec_works = 1; } else if (errno == EINVAL) { /* Linux older than 2.6.27 does not support SOCK_CLOEXEC */ - sock_cloexec_works = 0; + state->sock_cloexec_works = 0; ret = socketpair(family, type, proto, sv); } } @@ -6267,10 +6290,10 @@ socket_socketpair(PyObject *self, PyObject *args) if (_Py_set_inheritable(sv[1], 0, atomic_flag_works) < 0) goto finally; - s0 = new_sockobject(sv[0], family, type, proto); + s0 = new_sockobject(state, sv[0], family, type, proto); if (s0 == NULL) goto finally; - s1 = new_sockobject(sv[1], family, type, proto); + s1 = new_sockobject(state, sv[1], family, type, proto); if (s1 == NULL) goto finally; res = PyTuple_Pack(2, s0, s1); @@ -6725,7 +6748,8 @@ socket_getaddrinfo(PyObject *self, PyObject *args, PyObject* kwargs) Py_END_ALLOW_THREADS if (error) { res0 = NULL; // gh-100795 - set_gaierror(error); + socket_state *state = get_module_state(self); + set_gaierror(state, error); goto err; } @@ -6824,7 +6848,8 @@ socket_getnameinfo(PyObject *self, PyObject *args) Py_END_ALLOW_THREADS if (error) { res = NULL; // gh-100795 - set_gaierror(error); + socket_state *state = get_module_state(self); + set_gaierror(state, error); goto fail; } if (res->ai_next) { @@ -6856,7 +6881,8 @@ socket_getnameinfo(PyObject *self, PyObject *args) error = getnameinfo(res->ai_addr, (socklen_t) res->ai_addrlen, hbuf, sizeof(hbuf), pbuf, sizeof(pbuf), flags); if (error) { - set_gaierror(error); + socket_state *state = get_module_state(self); + set_gaierror(state, error); goto fail; } @@ -6882,11 +6908,12 @@ Get host and port for a sockaddr."); static PyObject * socket_getdefaulttimeout(PyObject *self, PyObject *Py_UNUSED(ignored)) { - if (defaulttimeout < 0) { + socket_state *state = get_module_state(self); + if (state->defaulttimeout < 0) { Py_RETURN_NONE; } else { - double seconds = _PyTime_AsSecondsDouble(defaulttimeout); + double seconds = _PyTime_AsSecondsDouble(state->defaulttimeout); return PyFloat_FromDouble(seconds); } } @@ -6906,7 +6933,8 @@ socket_setdefaulttimeout(PyObject *self, PyObject *arg) if (socket_parse_timeout(&timeout, arg) < 0) return NULL; - defaulttimeout = timeout; + socket_state *state = get_module_state(self); + state->defaulttimeout = timeout; Py_RETURN_NONE; } @@ -7292,7 +7320,7 @@ sock_destroy_api(PyObject *capsule) } static PySocketModule_APIObject * -sock_get_api(void) +sock_get_api(socket_state *state) { PySocketModule_APIObject *capi = PyMem_Malloc(sizeof(PySocketModule_APIObject)); if (capi == NULL) { @@ -7300,7 +7328,7 @@ sock_get_api(void) return NULL; } - capi->Sock_Type = (PyTypeObject *)Py_NewRef(&sock_type); + capi->Sock_Type = (PyTypeObject *)Py_NewRef(state->sock_type); capi->error = Py_NewRef(PyExc_OSError); capi->timeout_error = Py_NewRef(PyExc_TimeoutError); return capi; @@ -7322,1409 +7350,1451 @@ PyDoc_STRVAR(socket_doc, \n\ See the socket module for documentation."); -static struct PyModuleDef socketmodule = { - PyModuleDef_HEAD_INIT, - PySocket_MODULE_NAME, - socket_doc, - -1, - socket_methods, - NULL, - NULL, - NULL, - NULL -}; - -PyMODINIT_FUNC -PyInit__socket(void) +static int +socket_exec(PyObject *m) { - PyObject *m, *has_ipv6; + if (!os_init()) { + goto error; + } - if (!os_init()) - return NULL; + socket_state *state = get_module_state(m); + state->defaulttimeout = _PYTIME_FROMSECONDS(-1); - Py_SET_TYPE(&sock_type, &PyType_Type); - m = PyModule_Create(&socketmodule); - if (m == NULL) - return NULL; +#if defined(HAVE_ACCEPT) || defined(HAVE_ACCEPT4) +#if defined(HAVE_ACCEPT4) && defined(SOCK_CLOEXEC) + state->accept4_works = -1; +#endif +#endif - PyModule_AddObject(m, "error", Py_NewRef(PyExc_OSError)); - socket_herror = PyErr_NewException("socket.herror", - PyExc_OSError, NULL); - if (socket_herror == NULL) - return NULL; - PyModule_AddObject(m, "herror", Py_NewRef(socket_herror)); - socket_gaierror = PyErr_NewException("socket.gaierror", PyExc_OSError, - NULL); - if (socket_gaierror == NULL) - return NULL; - PyModule_AddObject(m, "gaierror", Py_NewRef(socket_gaierror)); - PyModule_AddObjectRef(m, "timeout", PyExc_TimeoutError); +#ifdef SOCK_CLOEXEC + state->sock_cloexec_works = -1; +#endif - if (PyModule_AddObject(m, "SocketType", Py_NewRef(&sock_type)) != 0) - return NULL; - if (PyModule_AddObject(m, "socket", Py_NewRef(&sock_type)) != 0) - return NULL; +#define ADD_EXC(MOD, NAME, VAR, BASE) do { \ + VAR = PyErr_NewException("socket." NAME, BASE, NULL); \ + if (VAR == NULL) { \ + goto error; \ + } \ + if (PyModule_AddObjectRef(MOD, NAME, VAR) < 0) { \ + goto error; \ + } \ +} while (0) + + ADD_EXC(m, "herror", state->socket_herror, PyExc_OSError); + ADD_EXC(m, "gaierror", state->socket_gaierror, PyExc_OSError); + +#undef ADD_EXC + + if (PyModule_AddObjectRef(m, "error", PyExc_OSError) < 0) { + goto error; + } + if (PyModule_AddObjectRef(m, "timeout", PyExc_TimeoutError) < 0) { + goto error; + } + PyObject *sock_type = PyType_FromMetaclass(NULL, m, &sock_spec, NULL); + if (sock_type == NULL) { + goto error; + } + state->sock_type = (PyTypeObject *)sock_type; + if (PyModule_AddObjectRef(m, "SocketType", sock_type) < 0) { + goto error; + } + if (PyModule_AddType(m, state->sock_type) < 0) { + goto error; + } + + PyObject *has_ipv6; #ifdef ENABLE_IPV6 has_ipv6 = Py_True; #else has_ipv6 = Py_False; #endif - PyModule_AddObject(m, "has_ipv6", Py_NewRef(has_ipv6)); + if (PyModule_AddObjectRef(m, "has_ipv6", has_ipv6) < 0) { + goto error; + } /* Export C API */ - PySocketModule_APIObject *capi = sock_get_api(); + PySocketModule_APIObject *capi = sock_get_api(state); if (capi == NULL) { - Py_DECREF(m); - return NULL; + goto error; } PyObject *capsule = PyCapsule_New(capi, PySocket_CAPSULE_NAME, sock_destroy_api); if (capsule == NULL) { sock_free_api(capi); - Py_DECREF(m); - return NULL; + goto error; } - if (PyModule_AddObject(m, PySocket_CAPI_NAME, capsule) < 0) { - Py_DECREF(capsule); - Py_DECREF(m); - return NULL; + int rc = PyModule_AddObjectRef(m, PySocket_CAPI_NAME, capsule); + Py_DECREF(capsule); + if (rc < 0) { + goto error; } +#define ADD_INT_MACRO(MOD, INT) do { \ + if (PyModule_AddIntConstant(MOD, #INT, INT) < 0) { \ + goto error; \ + } \ +} while (0) + +#define ADD_INT_CONST(MOD, NAME, INT) do { \ + if (PyModule_AddIntConstant(MOD, NAME, INT) < 0) { \ + goto error; \ + } \ +} while (0) + +#define ADD_STR_CONST(MOD, NAME, STR) do { \ + if (PyModule_AddStringConstant(MOD, NAME, STR) < 0) { \ + goto error; \ + } \ +} while (0) + /* Address families (we only support AF_INET and AF_UNIX) */ #ifdef AF_UNSPEC - PyModule_AddIntMacro(m, AF_UNSPEC); + ADD_INT_MACRO(m, AF_UNSPEC); #endif - PyModule_AddIntMacro(m, AF_INET); + ADD_INT_MACRO(m, AF_INET); #if defined(AF_UNIX) - PyModule_AddIntMacro(m, AF_UNIX); + ADD_INT_MACRO(m, AF_UNIX); #endif /* AF_UNIX */ #ifdef AF_AX25 /* Amateur Radio AX.25 */ - PyModule_AddIntMacro(m, AF_AX25); + ADD_INT_MACRO(m, AF_AX25); #endif #ifdef AF_IPX - PyModule_AddIntMacro(m, AF_IPX); /* Novell IPX */ + ADD_INT_MACRO(m, AF_IPX); /* Novell IPX */ #endif #ifdef AF_APPLETALK /* Appletalk DDP */ - PyModule_AddIntMacro(m, AF_APPLETALK); + ADD_INT_MACRO(m, AF_APPLETALK); #endif #ifdef AF_NETROM /* Amateur radio NetROM */ - PyModule_AddIntMacro(m, AF_NETROM); + ADD_INT_MACRO(m, AF_NETROM); #endif #ifdef AF_BRIDGE /* Multiprotocol bridge */ - PyModule_AddIntMacro(m, AF_BRIDGE); + ADD_INT_MACRO(m, AF_BRIDGE); #endif #ifdef AF_ATMPVC /* ATM PVCs */ - PyModule_AddIntMacro(m, AF_ATMPVC); + ADD_INT_MACRO(m, AF_ATMPVC); #endif #ifdef AF_AAL5 /* Reserved for Werner's ATM */ - PyModule_AddIntMacro(m, AF_AAL5); + ADD_INT_MACRO(m, AF_AAL5); #endif #ifdef HAVE_SOCKADDR_ALG - PyModule_AddIntMacro(m, AF_ALG); /* Linux crypto */ + ADD_INT_MACRO(m, AF_ALG); /* Linux crypto */ #endif #ifdef AF_X25 /* Reserved for X.25 project */ - PyModule_AddIntMacro(m, AF_X25); + ADD_INT_MACRO(m, AF_X25); #endif #ifdef AF_INET6 - PyModule_AddIntMacro(m, AF_INET6); /* IP version 6 */ + ADD_INT_MACRO(m, AF_INET6); /* IP version 6 */ #endif #ifdef AF_ROSE /* Amateur Radio X.25 PLP */ - PyModule_AddIntMacro(m, AF_ROSE); + ADD_INT_MACRO(m, AF_ROSE); #endif #ifdef AF_DECnet /* Reserved for DECnet project */ - PyModule_AddIntMacro(m, AF_DECnet); + ADD_INT_MACRO(m, AF_DECnet); #endif #ifdef AF_NETBEUI /* Reserved for 802.2LLC project */ - PyModule_AddIntMacro(m, AF_NETBEUI); + ADD_INT_MACRO(m, AF_NETBEUI); #endif #ifdef AF_SECURITY /* Security callback pseudo AF */ - PyModule_AddIntMacro(m, AF_SECURITY); + ADD_INT_MACRO(m, AF_SECURITY); #endif #ifdef AF_KEY /* PF_KEY key management API */ - PyModule_AddIntMacro(m, AF_KEY); + ADD_INT_MACRO(m, AF_KEY); #endif #ifdef AF_NETLINK /* */ - PyModule_AddIntMacro(m, AF_NETLINK); - PyModule_AddIntMacro(m, NETLINK_ROUTE); + ADD_INT_MACRO(m, AF_NETLINK); + ADD_INT_MACRO(m, NETLINK_ROUTE); #ifdef NETLINK_SKIP - PyModule_AddIntMacro(m, NETLINK_SKIP); + ADD_INT_MACRO(m, NETLINK_SKIP); #endif #ifdef NETLINK_W1 - PyModule_AddIntMacro(m, NETLINK_W1); + ADD_INT_MACRO(m, NETLINK_W1); #endif - PyModule_AddIntMacro(m, NETLINK_USERSOCK); - PyModule_AddIntMacro(m, NETLINK_FIREWALL); + ADD_INT_MACRO(m, NETLINK_USERSOCK); + ADD_INT_MACRO(m, NETLINK_FIREWALL); #ifdef NETLINK_TCPDIAG - PyModule_AddIntMacro(m, NETLINK_TCPDIAG); + ADD_INT_MACRO(m, NETLINK_TCPDIAG); #endif #ifdef NETLINK_NFLOG - PyModule_AddIntMacro(m, NETLINK_NFLOG); + ADD_INT_MACRO(m, NETLINK_NFLOG); #endif #ifdef NETLINK_XFRM - PyModule_AddIntMacro(m, NETLINK_XFRM); + ADD_INT_MACRO(m, NETLINK_XFRM); #endif #ifdef NETLINK_ARPD - PyModule_AddIntMacro(m, NETLINK_ARPD); + ADD_INT_MACRO(m, NETLINK_ARPD); #endif #ifdef NETLINK_ROUTE6 - PyModule_AddIntMacro(m, NETLINK_ROUTE6); + ADD_INT_MACRO(m, NETLINK_ROUTE6); #endif - PyModule_AddIntMacro(m, NETLINK_IP6_FW); + ADD_INT_MACRO(m, NETLINK_IP6_FW); #ifdef NETLINK_DNRTMSG - PyModule_AddIntMacro(m, NETLINK_DNRTMSG); + ADD_INT_MACRO(m, NETLINK_DNRTMSG); #endif #ifdef NETLINK_TAPBASE - PyModule_AddIntMacro(m, NETLINK_TAPBASE); + ADD_INT_MACRO(m, NETLINK_TAPBASE); #endif #ifdef NETLINK_CRYPTO - PyModule_AddIntMacro(m, NETLINK_CRYPTO); + ADD_INT_MACRO(m, NETLINK_CRYPTO); #endif #endif /* AF_NETLINK */ #ifdef AF_QIPCRTR /* Qualcomm IPCROUTER */ - PyModule_AddIntMacro(m, AF_QIPCRTR); + ADD_INT_MACRO(m, AF_QIPCRTR); #endif #ifdef AF_VSOCK - PyModule_AddIntConstant(m, "AF_VSOCK", AF_VSOCK); - PyModule_AddIntConstant(m, "SO_VM_SOCKETS_BUFFER_SIZE", 0); - PyModule_AddIntConstant(m, "SO_VM_SOCKETS_BUFFER_MIN_SIZE", 1); - PyModule_AddIntConstant(m, "SO_VM_SOCKETS_BUFFER_MAX_SIZE", 2); - PyModule_AddIntConstant(m, "VMADDR_CID_ANY", 0xffffffff); - PyModule_AddIntConstant(m, "VMADDR_PORT_ANY", 0xffffffff); - PyModule_AddIntConstant(m, "VMADDR_CID_HOST", 2); - PyModule_AddIntConstant(m, "VM_SOCKETS_INVALID_VERSION", 0xffffffff); - PyModule_AddIntConstant(m, "IOCTL_VM_SOCKETS_GET_LOCAL_CID", _IO(7, 0xb9)); + ADD_INT_CONST(m, "AF_VSOCK", AF_VSOCK); + ADD_INT_CONST(m, "SO_VM_SOCKETS_BUFFER_SIZE", 0); + ADD_INT_CONST(m, "SO_VM_SOCKETS_BUFFER_MIN_SIZE", 1); + ADD_INT_CONST(m, "SO_VM_SOCKETS_BUFFER_MAX_SIZE", 2); + ADD_INT_CONST(m, "VMADDR_CID_ANY", 0xffffffff); + ADD_INT_CONST(m, "VMADDR_PORT_ANY", 0xffffffff); + ADD_INT_CONST(m, "VMADDR_CID_HOST", 2); + ADD_INT_CONST(m, "VM_SOCKETS_INVALID_VERSION", 0xffffffff); + ADD_INT_CONST(m, "IOCTL_VM_SOCKETS_GET_LOCAL_CID", _IO(7, 0xb9)); #endif #ifdef AF_ROUTE /* Alias to emulate 4.4BSD */ - PyModule_AddIntMacro(m, AF_ROUTE); + ADD_INT_MACRO(m, AF_ROUTE); #endif #ifdef AF_LINK - PyModule_AddIntMacro(m, AF_LINK); + ADD_INT_MACRO(m, AF_LINK); #endif #ifdef AF_ASH /* Ash */ - PyModule_AddIntMacro(m, AF_ASH); + ADD_INT_MACRO(m, AF_ASH); #endif #ifdef AF_ECONET /* Acorn Econet */ - PyModule_AddIntMacro(m, AF_ECONET); + ADD_INT_MACRO(m, AF_ECONET); #endif #ifdef AF_ATMSVC /* ATM SVCs */ - PyModule_AddIntMacro(m, AF_ATMSVC); + ADD_INT_MACRO(m, AF_ATMSVC); #endif #ifdef AF_SNA /* Linux SNA Project (nutters!) */ - PyModule_AddIntMacro(m, AF_SNA); + ADD_INT_MACRO(m, AF_SNA); #endif #ifdef AF_IRDA /* IRDA sockets */ - PyModule_AddIntMacro(m, AF_IRDA); + ADD_INT_MACRO(m, AF_IRDA); #endif #ifdef AF_PPPOX /* PPPoX sockets */ - PyModule_AddIntMacro(m, AF_PPPOX); + ADD_INT_MACRO(m, AF_PPPOX); #endif #ifdef AF_WANPIPE /* Wanpipe API Sockets */ - PyModule_AddIntMacro(m, AF_WANPIPE); + ADD_INT_MACRO(m, AF_WANPIPE); #endif #ifdef AF_LLC /* Linux LLC */ - PyModule_AddIntMacro(m, AF_LLC); + ADD_INT_MACRO(m, AF_LLC); #endif #ifdef HAVE_AF_HYPERV /* Hyper-V sockets */ - PyModule_AddIntMacro(m, AF_HYPERV); + ADD_INT_MACRO(m, AF_HYPERV); /* for proto */ - PyModule_AddIntMacro(m, HV_PROTOCOL_RAW); + ADD_INT_MACRO(m, HV_PROTOCOL_RAW); /* for setsockopt() */ - PyModule_AddIntMacro(m, HVSOCKET_CONNECT_TIMEOUT); - PyModule_AddIntMacro(m, HVSOCKET_CONNECT_TIMEOUT_MAX); - PyModule_AddIntMacro(m, HVSOCKET_CONNECTED_SUSPEND); - PyModule_AddIntMacro(m, HVSOCKET_ADDRESS_FLAG_PASSTHRU); + ADD_INT_MACRO(m, HVSOCKET_CONNECT_TIMEOUT); + ADD_INT_MACRO(m, HVSOCKET_CONNECT_TIMEOUT_MAX); + ADD_INT_MACRO(m, HVSOCKET_CONNECTED_SUSPEND); + ADD_INT_MACRO(m, HVSOCKET_ADDRESS_FLAG_PASSTHRU); /* for bind() or connect() */ - PyModule_AddStringConstant(m, "HV_GUID_ZERO", "00000000-0000-0000-0000-000000000000"); - PyModule_AddStringConstant(m, "HV_GUID_WILDCARD", "00000000-0000-0000-0000-000000000000"); - PyModule_AddStringConstant(m, "HV_GUID_BROADCAST", "FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF"); - PyModule_AddStringConstant(m, "HV_GUID_CHILDREN", "90DB8B89-0D35-4F79-8CE9-49EA0AC8B7CD"); - PyModule_AddStringConstant(m, "HV_GUID_LOOPBACK", "E0E16197-DD56-4A10-9195-5EE7A155A838"); - PyModule_AddStringConstant(m, "HV_GUID_PARENT", "A42E7CDA-D03F-480C-9CC2-A4DE20ABB878"); + ADD_STR_CONST(m, "HV_GUID_ZERO", "00000000-0000-0000-0000-000000000000"); + ADD_STR_CONST(m, "HV_GUID_WILDCARD", "00000000-0000-0000-0000-000000000000"); + ADD_STR_CONST(m, "HV_GUID_BROADCAST", "FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF"); + ADD_STR_CONST(m, "HV_GUID_CHILDREN", "90DB8B89-0D35-4F79-8CE9-49EA0AC8B7CD"); + ADD_STR_CONST(m, "HV_GUID_LOOPBACK", "E0E16197-DD56-4A10-9195-5EE7A155A838"); + ADD_STR_CONST(m, "HV_GUID_PARENT", "A42E7CDA-D03F-480C-9CC2-A4DE20ABB878"); #endif /* HAVE_AF_HYPERV */ #ifdef USE_BLUETOOTH - PyModule_AddIntMacro(m, AF_BLUETOOTH); + ADD_INT_MACRO(m, AF_BLUETOOTH); #ifdef BTPROTO_L2CAP - PyModule_AddIntMacro(m, BTPROTO_L2CAP); + ADD_INT_MACRO(m, BTPROTO_L2CAP); #endif /* BTPROTO_L2CAP */ #ifdef BTPROTO_HCI - PyModule_AddIntMacro(m, BTPROTO_HCI); - PyModule_AddIntMacro(m, SOL_HCI); + ADD_INT_MACRO(m, BTPROTO_HCI); + ADD_INT_MACRO(m, SOL_HCI); #if !defined(__NetBSD__) && !defined(__DragonFly__) - PyModule_AddIntMacro(m, HCI_FILTER); + ADD_INT_MACRO(m, HCI_FILTER); #if !defined(__FreeBSD__) - PyModule_AddIntMacro(m, HCI_TIME_STAMP); - PyModule_AddIntMacro(m, HCI_DATA_DIR); + ADD_INT_MACRO(m, HCI_TIME_STAMP); + ADD_INT_MACRO(m, HCI_DATA_DIR); #endif /* !__FreeBSD__ */ #endif /* !__NetBSD__ && !__DragonFly__ */ #endif /* BTPROTO_HCI */ #ifdef BTPROTO_RFCOMM - PyModule_AddIntMacro(m, BTPROTO_RFCOMM); + ADD_INT_MACRO(m, BTPROTO_RFCOMM); #endif /* BTPROTO_RFCOMM */ - PyModule_AddStringConstant(m, "BDADDR_ANY", "00:00:00:00:00:00"); - PyModule_AddStringConstant(m, "BDADDR_LOCAL", "00:00:00:FF:FF:FF"); + ADD_STR_CONST(m, "BDADDR_ANY", "00:00:00:00:00:00"); + ADD_STR_CONST(m, "BDADDR_LOCAL", "00:00:00:FF:FF:FF"); #ifdef BTPROTO_SCO - PyModule_AddIntMacro(m, BTPROTO_SCO); + ADD_INT_MACRO(m, BTPROTO_SCO); #endif /* BTPROTO_SCO */ #endif /* USE_BLUETOOTH */ #ifdef AF_CAN /* Controller Area Network */ - PyModule_AddIntMacro(m, AF_CAN); + ADD_INT_MACRO(m, AF_CAN); #endif #ifdef PF_CAN /* Controller Area Network */ - PyModule_AddIntMacro(m, PF_CAN); + ADD_INT_MACRO(m, PF_CAN); #endif /* Reliable Datagram Sockets */ #ifdef AF_RDS - PyModule_AddIntMacro(m, AF_RDS); + ADD_INT_MACRO(m, AF_RDS); #endif #ifdef PF_RDS - PyModule_AddIntMacro(m, PF_RDS); + ADD_INT_MACRO(m, PF_RDS); #endif /* Kernel event messages */ #ifdef PF_SYSTEM - PyModule_AddIntMacro(m, PF_SYSTEM); + ADD_INT_MACRO(m, PF_SYSTEM); #endif #ifdef AF_SYSTEM - PyModule_AddIntMacro(m, AF_SYSTEM); + ADD_INT_MACRO(m, AF_SYSTEM); #endif #ifdef AF_PACKET - PyModule_AddIntMacro(m, AF_PACKET); + ADD_INT_MACRO(m, AF_PACKET); #endif #ifdef PF_PACKET - PyModule_AddIntMacro(m, PF_PACKET); + ADD_INT_MACRO(m, PF_PACKET); #endif #ifdef PACKET_HOST - PyModule_AddIntMacro(m, PACKET_HOST); + ADD_INT_MACRO(m, PACKET_HOST); #endif #ifdef PACKET_BROADCAST - PyModule_AddIntMacro(m, PACKET_BROADCAST); + ADD_INT_MACRO(m, PACKET_BROADCAST); #endif #ifdef PACKET_MULTICAST - PyModule_AddIntMacro(m, PACKET_MULTICAST); + ADD_INT_MACRO(m, PACKET_MULTICAST); #endif #ifdef PACKET_OTHERHOST - PyModule_AddIntMacro(m, PACKET_OTHERHOST); + ADD_INT_MACRO(m, PACKET_OTHERHOST); #endif #ifdef PACKET_OUTGOING - PyModule_AddIntMacro(m, PACKET_OUTGOING); + ADD_INT_MACRO(m, PACKET_OUTGOING); #endif #ifdef PACKET_LOOPBACK - PyModule_AddIntMacro(m, PACKET_LOOPBACK); + ADD_INT_MACRO(m, PACKET_LOOPBACK); #endif #ifdef PACKET_FASTROUTE - PyModule_AddIntMacro(m, PACKET_FASTROUTE); + ADD_INT_MACRO(m, PACKET_FASTROUTE); #endif #ifdef HAVE_LINUX_TIPC_H - PyModule_AddIntMacro(m, AF_TIPC); + ADD_INT_MACRO(m, AF_TIPC); /* for addresses */ - PyModule_AddIntMacro(m, TIPC_ADDR_NAMESEQ); - PyModule_AddIntMacro(m, TIPC_ADDR_NAME); - PyModule_AddIntMacro(m, TIPC_ADDR_ID); + ADD_INT_MACRO(m, TIPC_ADDR_NAMESEQ); + ADD_INT_MACRO(m, TIPC_ADDR_NAME); + ADD_INT_MACRO(m, TIPC_ADDR_ID); - PyModule_AddIntMacro(m, TIPC_ZONE_SCOPE); - PyModule_AddIntMacro(m, TIPC_CLUSTER_SCOPE); - PyModule_AddIntMacro(m, TIPC_NODE_SCOPE); + ADD_INT_MACRO(m, TIPC_ZONE_SCOPE); + ADD_INT_MACRO(m, TIPC_CLUSTER_SCOPE); + ADD_INT_MACRO(m, TIPC_NODE_SCOPE); /* for setsockopt() */ - PyModule_AddIntMacro(m, SOL_TIPC); - PyModule_AddIntMacro(m, TIPC_IMPORTANCE); - PyModule_AddIntMacro(m, TIPC_SRC_DROPPABLE); - PyModule_AddIntMacro(m, TIPC_DEST_DROPPABLE); - PyModule_AddIntMacro(m, TIPC_CONN_TIMEOUT); + ADD_INT_MACRO(m, SOL_TIPC); + ADD_INT_MACRO(m, TIPC_IMPORTANCE); + ADD_INT_MACRO(m, TIPC_SRC_DROPPABLE); + ADD_INT_MACRO(m, TIPC_DEST_DROPPABLE); + ADD_INT_MACRO(m, TIPC_CONN_TIMEOUT); - PyModule_AddIntMacro(m, TIPC_LOW_IMPORTANCE); - PyModule_AddIntMacro(m, TIPC_MEDIUM_IMPORTANCE); - PyModule_AddIntMacro(m, TIPC_HIGH_IMPORTANCE); - PyModule_AddIntMacro(m, TIPC_CRITICAL_IMPORTANCE); + ADD_INT_MACRO(m, TIPC_LOW_IMPORTANCE); + ADD_INT_MACRO(m, TIPC_MEDIUM_IMPORTANCE); + ADD_INT_MACRO(m, TIPC_HIGH_IMPORTANCE); + ADD_INT_MACRO(m, TIPC_CRITICAL_IMPORTANCE); /* for subscriptions */ - PyModule_AddIntMacro(m, TIPC_SUB_PORTS); - PyModule_AddIntMacro(m, TIPC_SUB_SERVICE); + ADD_INT_MACRO(m, TIPC_SUB_PORTS); + ADD_INT_MACRO(m, TIPC_SUB_SERVICE); #ifdef TIPC_SUB_CANCEL /* doesn't seem to be available everywhere */ - PyModule_AddIntMacro(m, TIPC_SUB_CANCEL); + ADD_INT_MACRO(m, TIPC_SUB_CANCEL); #endif - PyModule_AddIntMacro(m, TIPC_WAIT_FOREVER); - PyModule_AddIntMacro(m, TIPC_PUBLISHED); - PyModule_AddIntMacro(m, TIPC_WITHDRAWN); - PyModule_AddIntMacro(m, TIPC_SUBSCR_TIMEOUT); - PyModule_AddIntMacro(m, TIPC_CFG_SRV); - PyModule_AddIntMacro(m, TIPC_TOP_SRV); + ADD_INT_MACRO(m, TIPC_WAIT_FOREVER); + ADD_INT_MACRO(m, TIPC_PUBLISHED); + ADD_INT_MACRO(m, TIPC_WITHDRAWN); + ADD_INT_MACRO(m, TIPC_SUBSCR_TIMEOUT); + ADD_INT_MACRO(m, TIPC_CFG_SRV); + ADD_INT_MACRO(m, TIPC_TOP_SRV); #endif #ifdef HAVE_SOCKADDR_ALG /* Socket options */ - PyModule_AddIntMacro(m, ALG_SET_KEY); - PyModule_AddIntMacro(m, ALG_SET_IV); - PyModule_AddIntMacro(m, ALG_SET_OP); - PyModule_AddIntMacro(m, ALG_SET_AEAD_ASSOCLEN); - PyModule_AddIntMacro(m, ALG_SET_AEAD_AUTHSIZE); - PyModule_AddIntMacro(m, ALG_SET_PUBKEY); + ADD_INT_MACRO(m, ALG_SET_KEY); + ADD_INT_MACRO(m, ALG_SET_IV); + ADD_INT_MACRO(m, ALG_SET_OP); + ADD_INT_MACRO(m, ALG_SET_AEAD_ASSOCLEN); + ADD_INT_MACRO(m, ALG_SET_AEAD_AUTHSIZE); + ADD_INT_MACRO(m, ALG_SET_PUBKEY); /* Operations */ - PyModule_AddIntMacro(m, ALG_OP_DECRYPT); - PyModule_AddIntMacro(m, ALG_OP_ENCRYPT); - PyModule_AddIntMacro(m, ALG_OP_SIGN); - PyModule_AddIntMacro(m, ALG_OP_VERIFY); + ADD_INT_MACRO(m, ALG_OP_DECRYPT); + ADD_INT_MACRO(m, ALG_OP_ENCRYPT); + ADD_INT_MACRO(m, ALG_OP_SIGN); + ADD_INT_MACRO(m, ALG_OP_VERIFY); #endif /* IEEE 802.3 protocol numbers required for a standard TCP/IP network stack */ #ifdef ETHERTYPE_ARP - PyModule_AddIntMacro(m, ETHERTYPE_ARP); + ADD_INT_MACRO(m, ETHERTYPE_ARP); #endif #ifdef ETHERTYPE_IP - PyModule_AddIntMacro(m, ETHERTYPE_IP); + ADD_INT_MACRO(m, ETHERTYPE_IP); #endif #ifdef ETHERTYPE_IPV6 - PyModule_AddIntMacro(m, ETHERTYPE_IPV6); + ADD_INT_MACRO(m, ETHERTYPE_IPV6); #endif #ifdef ETHERTYPE_VLAN - PyModule_AddIntMacro(m, ETHERTYPE_VLAN); + ADD_INT_MACRO(m, ETHERTYPE_VLAN); #endif /* Linux pseudo-protocol for sniffing every packet */ #ifdef ETH_P_ALL - PyModule_AddIntMacro(m, ETH_P_ALL); + ADD_INT_MACRO(m, ETH_P_ALL); #endif /* Socket types */ - PyModule_AddIntMacro(m, SOCK_STREAM); - PyModule_AddIntMacro(m, SOCK_DGRAM); + ADD_INT_MACRO(m, SOCK_STREAM); + ADD_INT_MACRO(m, SOCK_DGRAM); /* We have incomplete socket support. */ #ifdef SOCK_RAW /* SOCK_RAW is marked as optional in the POSIX specification */ - PyModule_AddIntMacro(m, SOCK_RAW); + ADD_INT_MACRO(m, SOCK_RAW); #endif #ifdef SOCK_SEQPACKET - PyModule_AddIntMacro(m, SOCK_SEQPACKET); + ADD_INT_MACRO(m, SOCK_SEQPACKET); #endif #if defined(SOCK_RDM) - PyModule_AddIntMacro(m, SOCK_RDM); + ADD_INT_MACRO(m, SOCK_RDM); #endif #ifdef SOCK_CLOEXEC - PyModule_AddIntMacro(m, SOCK_CLOEXEC); + ADD_INT_MACRO(m, SOCK_CLOEXEC); #endif #ifdef SOCK_NONBLOCK - PyModule_AddIntMacro(m, SOCK_NONBLOCK); + ADD_INT_MACRO(m, SOCK_NONBLOCK); #endif #ifdef SO_DEBUG - PyModule_AddIntMacro(m, SO_DEBUG); + ADD_INT_MACRO(m, SO_DEBUG); #endif #ifdef SO_ACCEPTCONN - PyModule_AddIntMacro(m, SO_ACCEPTCONN); + ADD_INT_MACRO(m, SO_ACCEPTCONN); #endif #ifdef SO_REUSEADDR - PyModule_AddIntMacro(m, SO_REUSEADDR); + ADD_INT_MACRO(m, SO_REUSEADDR); #endif #ifdef SO_EXCLUSIVEADDRUSE - PyModule_AddIntMacro(m, SO_EXCLUSIVEADDRUSE); + ADD_INT_MACRO(m, SO_EXCLUSIVEADDRUSE); #endif #ifdef SO_INCOMING_CPU - PyModule_AddIntMacro(m, SO_INCOMING_CPU); + ADD_INT_MACRO(m, SO_INCOMING_CPU); #endif #ifdef SO_KEEPALIVE - PyModule_AddIntMacro(m, SO_KEEPALIVE); + ADD_INT_MACRO(m, SO_KEEPALIVE); #endif #ifdef SO_DONTROUTE - PyModule_AddIntMacro(m, SO_DONTROUTE); + ADD_INT_MACRO(m, SO_DONTROUTE); #endif #ifdef SO_BROADCAST - PyModule_AddIntMacro(m, SO_BROADCAST); + ADD_INT_MACRO(m, SO_BROADCAST); #endif #ifdef SO_USELOOPBACK - PyModule_AddIntMacro(m, SO_USELOOPBACK); + ADD_INT_MACRO(m, SO_USELOOPBACK); #endif #ifdef SO_LINGER - PyModule_AddIntMacro(m, SO_LINGER); + ADD_INT_MACRO(m, SO_LINGER); #endif #ifdef SO_OOBINLINE - PyModule_AddIntMacro(m, SO_OOBINLINE); + ADD_INT_MACRO(m, SO_OOBINLINE); #endif #ifndef __GNU__ #ifdef SO_REUSEPORT - PyModule_AddIntMacro(m, SO_REUSEPORT); + ADD_INT_MACRO(m, SO_REUSEPORT); #endif #endif #ifdef SO_SNDBUF - PyModule_AddIntMacro(m, SO_SNDBUF); + ADD_INT_MACRO(m, SO_SNDBUF); #endif #ifdef SO_RCVBUF - PyModule_AddIntMacro(m, SO_RCVBUF); + ADD_INT_MACRO(m, SO_RCVBUF); #endif #ifdef SO_SNDLOWAT - PyModule_AddIntMacro(m, SO_SNDLOWAT); + ADD_INT_MACRO(m, SO_SNDLOWAT); #endif #ifdef SO_RCVLOWAT - PyModule_AddIntMacro(m, SO_RCVLOWAT); + ADD_INT_MACRO(m, SO_RCVLOWAT); #endif #ifdef SO_SNDTIMEO - PyModule_AddIntMacro(m, SO_SNDTIMEO); + ADD_INT_MACRO(m, SO_SNDTIMEO); #endif #ifdef SO_RCVTIMEO - PyModule_AddIntMacro(m, SO_RCVTIMEO); + ADD_INT_MACRO(m, SO_RCVTIMEO); #endif #ifdef SO_ERROR - PyModule_AddIntMacro(m, SO_ERROR); + ADD_INT_MACRO(m, SO_ERROR); #endif #ifdef SO_TYPE - PyModule_AddIntMacro(m, SO_TYPE); + ADD_INT_MACRO(m, SO_TYPE); #endif #ifdef SO_SETFIB - PyModule_AddIntMacro(m, SO_SETFIB); + ADD_INT_MACRO(m, SO_SETFIB); #endif #ifdef SO_PASSCRED - PyModule_AddIntMacro(m, SO_PASSCRED); + ADD_INT_MACRO(m, SO_PASSCRED); #endif #ifdef SO_PEERCRED - PyModule_AddIntMacro(m, SO_PEERCRED); + ADD_INT_MACRO(m, SO_PEERCRED); #endif #ifdef LOCAL_PEERCRED - PyModule_AddIntMacro(m, LOCAL_PEERCRED); + ADD_INT_MACRO(m, LOCAL_PEERCRED); #endif #ifdef SO_PASSSEC - PyModule_AddIntMacro(m, SO_PASSSEC); + ADD_INT_MACRO(m, SO_PASSSEC); #endif #ifdef SO_PEERSEC - PyModule_AddIntMacro(m, SO_PEERSEC); + ADD_INT_MACRO(m, SO_PEERSEC); #endif #ifdef SO_BINDTODEVICE - PyModule_AddIntMacro(m, SO_BINDTODEVICE); + ADD_INT_MACRO(m, SO_BINDTODEVICE); #endif #ifdef SO_PRIORITY - PyModule_AddIntMacro(m, SO_PRIORITY); + ADD_INT_MACRO(m, SO_PRIORITY); #endif #ifdef SO_MARK - PyModule_AddIntMacro(m, SO_MARK); + ADD_INT_MACRO(m, SO_MARK); #endif #ifdef SO_USER_COOKIE - PyModule_AddIntMacro(m, SO_USER_COOKIE); + ADD_INT_MACRO(m, SO_USER_COOKIE); #endif #ifdef SO_RTABLE - PyModule_AddIntMacro(m, SO_RTABLE); + ADD_INT_MACRO(m, SO_RTABLE); #endif #ifdef SO_DOMAIN - PyModule_AddIntMacro(m, SO_DOMAIN); + ADD_INT_MACRO(m, SO_DOMAIN); #endif #ifdef SO_PROTOCOL - PyModule_AddIntMacro(m, SO_PROTOCOL); + ADD_INT_MACRO(m, SO_PROTOCOL); #endif #ifdef LOCAL_CREDS - PyModule_AddIntMacro(m, LOCAL_CREDS); + ADD_INT_MACRO(m, LOCAL_CREDS); #endif #ifdef LOCAL_CREDS_PERSISTENT - PyModule_AddIntMacro(m, LOCAL_CREDS_PERSISTENT); + ADD_INT_MACRO(m, LOCAL_CREDS_PERSISTENT); #endif /* Maximum number of connections for "listen" */ #ifdef SOMAXCONN - PyModule_AddIntMacro(m, SOMAXCONN); + ADD_INT_MACRO(m, SOMAXCONN); #else - PyModule_AddIntConstant(m, "SOMAXCONN", 5); /* Common value */ + ADD_INT_CONST(m, "SOMAXCONN", 5); /* Common value */ #endif /* Ancillary message types */ #ifdef SCM_RIGHTS - PyModule_AddIntMacro(m, SCM_RIGHTS); + ADD_INT_MACRO(m, SCM_RIGHTS); #endif #ifdef SCM_CREDENTIALS - PyModule_AddIntMacro(m, SCM_CREDENTIALS); + ADD_INT_MACRO(m, SCM_CREDENTIALS); #endif #ifdef SCM_CREDS - PyModule_AddIntMacro(m, SCM_CREDS); + ADD_INT_MACRO(m, SCM_CREDS); #endif #ifdef SCM_CREDS2 - PyModule_AddIntMacro(m, SCM_CREDS2); + ADD_INT_MACRO(m, SCM_CREDS2); #endif /* Flags for send, recv */ #ifdef MSG_OOB - PyModule_AddIntMacro(m, MSG_OOB); + ADD_INT_MACRO(m, MSG_OOB); #endif #ifdef MSG_PEEK - PyModule_AddIntMacro(m, MSG_PEEK); + ADD_INT_MACRO(m, MSG_PEEK); #endif #ifdef MSG_DONTROUTE - PyModule_AddIntMacro(m, MSG_DONTROUTE); + ADD_INT_MACRO(m, MSG_DONTROUTE); #endif #ifdef MSG_DONTWAIT - PyModule_AddIntMacro(m, MSG_DONTWAIT); + ADD_INT_MACRO(m, MSG_DONTWAIT); #endif #ifdef MSG_EOR - PyModule_AddIntMacro(m, MSG_EOR); + ADD_INT_MACRO(m, MSG_EOR); #endif #ifdef MSG_TRUNC // workaround for https://github.com/WebAssembly/wasi-libc/issues/305 #if defined(__wasi__) && !defined(__WASI_RIFLAGS_RECV_DATA_TRUNCATED) # define __WASI_RIFLAGS_RECV_DATA_TRUNCATED 2 #endif - PyModule_AddIntMacro(m, MSG_TRUNC); + ADD_INT_MACRO(m, MSG_TRUNC); #endif #ifdef MSG_CTRUNC - PyModule_AddIntMacro(m, MSG_CTRUNC); + ADD_INT_MACRO(m, MSG_CTRUNC); #endif #ifdef MSG_WAITALL - PyModule_AddIntMacro(m, MSG_WAITALL); + ADD_INT_MACRO(m, MSG_WAITALL); #endif #ifdef MSG_BTAG - PyModule_AddIntMacro(m, MSG_BTAG); + ADD_INT_MACRO(m, MSG_BTAG); #endif #ifdef MSG_ETAG - PyModule_AddIntMacro(m, MSG_ETAG); + ADD_INT_MACRO(m, MSG_ETAG); #endif #ifdef MSG_NOSIGNAL - PyModule_AddIntMacro(m, MSG_NOSIGNAL); + ADD_INT_MACRO(m, MSG_NOSIGNAL); #endif #ifdef MSG_NOTIFICATION - PyModule_AddIntMacro(m, MSG_NOTIFICATION); + ADD_INT_MACRO(m, MSG_NOTIFICATION); #endif #ifdef MSG_CMSG_CLOEXEC - PyModule_AddIntMacro(m, MSG_CMSG_CLOEXEC); + ADD_INT_MACRO(m, MSG_CMSG_CLOEXEC); #endif #ifdef MSG_ERRQUEUE - PyModule_AddIntMacro(m, MSG_ERRQUEUE); + ADD_INT_MACRO(m, MSG_ERRQUEUE); #endif #ifdef MSG_CONFIRM - PyModule_AddIntMacro(m, MSG_CONFIRM); + ADD_INT_MACRO(m, MSG_CONFIRM); #endif #ifdef MSG_MORE - PyModule_AddIntMacro(m, MSG_MORE); + ADD_INT_MACRO(m, MSG_MORE); #endif #ifdef MSG_EOF - PyModule_AddIntMacro(m, MSG_EOF); + ADD_INT_MACRO(m, MSG_EOF); #endif #ifdef MSG_BCAST - PyModule_AddIntMacro(m, MSG_BCAST); + ADD_INT_MACRO(m, MSG_BCAST); #endif #ifdef MSG_MCAST - PyModule_AddIntMacro(m, MSG_MCAST); + ADD_INT_MACRO(m, MSG_MCAST); #endif #ifdef MSG_FASTOPEN - PyModule_AddIntMacro(m, MSG_FASTOPEN); + ADD_INT_MACRO(m, MSG_FASTOPEN); #endif /* Protocol level and numbers, usable for [gs]etsockopt */ #ifdef SOL_SOCKET - PyModule_AddIntMacro(m, SOL_SOCKET); + ADD_INT_MACRO(m, SOL_SOCKET); #endif #ifdef SOL_IP - PyModule_AddIntMacro(m, SOL_IP); + ADD_INT_MACRO(m, SOL_IP); #else - PyModule_AddIntConstant(m, "SOL_IP", 0); + ADD_INT_CONST(m, "SOL_IP", 0); #endif #ifdef SOL_IPX - PyModule_AddIntMacro(m, SOL_IPX); + ADD_INT_MACRO(m, SOL_IPX); #endif #ifdef SOL_AX25 - PyModule_AddIntMacro(m, SOL_AX25); + ADD_INT_MACRO(m, SOL_AX25); #endif #ifdef SOL_ATALK - PyModule_AddIntMacro(m, SOL_ATALK); + ADD_INT_MACRO(m, SOL_ATALK); #endif #ifdef SOL_NETROM - PyModule_AddIntMacro(m, SOL_NETROM); + ADD_INT_MACRO(m, SOL_NETROM); #endif #ifdef SOL_ROSE - PyModule_AddIntMacro(m, SOL_ROSE); + ADD_INT_MACRO(m, SOL_ROSE); #endif #ifdef SOL_TCP - PyModule_AddIntMacro(m, SOL_TCP); + ADD_INT_MACRO(m, SOL_TCP); #else - PyModule_AddIntConstant(m, "SOL_TCP", 6); + ADD_INT_CONST(m, "SOL_TCP", 6); #endif #ifdef SOL_UDP - PyModule_AddIntMacro(m, SOL_UDP); + ADD_INT_MACRO(m, SOL_UDP); #else - PyModule_AddIntConstant(m, "SOL_UDP", 17); + ADD_INT_CONST(m, "SOL_UDP", 17); #endif #ifdef SOL_CAN_BASE - PyModule_AddIntMacro(m, SOL_CAN_BASE); + ADD_INT_MACRO(m, SOL_CAN_BASE); #endif #ifdef SOL_CAN_RAW - PyModule_AddIntMacro(m, SOL_CAN_RAW); - PyModule_AddIntMacro(m, CAN_RAW); + ADD_INT_MACRO(m, SOL_CAN_RAW); + ADD_INT_MACRO(m, CAN_RAW); #endif #if defined(HAVE_LINUX_CAN_H) || defined(HAVE_NETCAN_CAN_H) - PyModule_AddIntMacro(m, CAN_EFF_FLAG); - PyModule_AddIntMacro(m, CAN_RTR_FLAG); - PyModule_AddIntMacro(m, CAN_ERR_FLAG); + ADD_INT_MACRO(m, CAN_EFF_FLAG); + ADD_INT_MACRO(m, CAN_RTR_FLAG); + ADD_INT_MACRO(m, CAN_ERR_FLAG); - PyModule_AddIntMacro(m, CAN_SFF_MASK); - PyModule_AddIntMacro(m, CAN_EFF_MASK); - PyModule_AddIntMacro(m, CAN_ERR_MASK); + ADD_INT_MACRO(m, CAN_SFF_MASK); + ADD_INT_MACRO(m, CAN_EFF_MASK); + ADD_INT_MACRO(m, CAN_ERR_MASK); #ifdef CAN_ISOTP - PyModule_AddIntMacro(m, CAN_ISOTP); + ADD_INT_MACRO(m, CAN_ISOTP); #endif #ifdef CAN_J1939 - PyModule_AddIntMacro(m, CAN_J1939); + ADD_INT_MACRO(m, CAN_J1939); #endif #endif #if defined(HAVE_LINUX_CAN_RAW_H) || defined(HAVE_NETCAN_CAN_H) - PyModule_AddIntMacro(m, CAN_RAW_FILTER); + ADD_INT_MACRO(m, CAN_RAW_FILTER); #ifdef CAN_RAW_ERR_FILTER - PyModule_AddIntMacro(m, CAN_RAW_ERR_FILTER); + ADD_INT_MACRO(m, CAN_RAW_ERR_FILTER); #endif - PyModule_AddIntMacro(m, CAN_RAW_LOOPBACK); - PyModule_AddIntMacro(m, CAN_RAW_RECV_OWN_MSGS); + ADD_INT_MACRO(m, CAN_RAW_LOOPBACK); + ADD_INT_MACRO(m, CAN_RAW_RECV_OWN_MSGS); #endif #ifdef HAVE_LINUX_CAN_RAW_FD_FRAMES - PyModule_AddIntMacro(m, CAN_RAW_FD_FRAMES); + ADD_INT_MACRO(m, CAN_RAW_FD_FRAMES); #endif #ifdef HAVE_LINUX_CAN_RAW_JOIN_FILTERS - PyModule_AddIntMacro(m, CAN_RAW_JOIN_FILTERS); + ADD_INT_MACRO(m, CAN_RAW_JOIN_FILTERS); #endif #ifdef HAVE_LINUX_CAN_BCM_H - PyModule_AddIntMacro(m, CAN_BCM); + ADD_INT_MACRO(m, CAN_BCM); /* BCM opcodes */ - PyModule_AddIntConstant(m, "CAN_BCM_TX_SETUP", TX_SETUP); - PyModule_AddIntConstant(m, "CAN_BCM_TX_DELETE", TX_DELETE); - PyModule_AddIntConstant(m, "CAN_BCM_TX_READ", TX_READ); - PyModule_AddIntConstant(m, "CAN_BCM_TX_SEND", TX_SEND); - PyModule_AddIntConstant(m, "CAN_BCM_RX_SETUP", RX_SETUP); - PyModule_AddIntConstant(m, "CAN_BCM_RX_DELETE", RX_DELETE); - PyModule_AddIntConstant(m, "CAN_BCM_RX_READ", RX_READ); - PyModule_AddIntConstant(m, "CAN_BCM_TX_STATUS", TX_STATUS); - PyModule_AddIntConstant(m, "CAN_BCM_TX_EXPIRED", TX_EXPIRED); - PyModule_AddIntConstant(m, "CAN_BCM_RX_STATUS", RX_STATUS); - PyModule_AddIntConstant(m, "CAN_BCM_RX_TIMEOUT", RX_TIMEOUT); - PyModule_AddIntConstant(m, "CAN_BCM_RX_CHANGED", RX_CHANGED); + ADD_INT_CONST(m, "CAN_BCM_TX_SETUP", TX_SETUP); + ADD_INT_CONST(m, "CAN_BCM_TX_DELETE", TX_DELETE); + ADD_INT_CONST(m, "CAN_BCM_TX_READ", TX_READ); + ADD_INT_CONST(m, "CAN_BCM_TX_SEND", TX_SEND); + ADD_INT_CONST(m, "CAN_BCM_RX_SETUP", RX_SETUP); + ADD_INT_CONST(m, "CAN_BCM_RX_DELETE", RX_DELETE); + ADD_INT_CONST(m, "CAN_BCM_RX_READ", RX_READ); + ADD_INT_CONST(m, "CAN_BCM_TX_STATUS", TX_STATUS); + ADD_INT_CONST(m, "CAN_BCM_TX_EXPIRED", TX_EXPIRED); + ADD_INT_CONST(m, "CAN_BCM_RX_STATUS", RX_STATUS); + ADD_INT_CONST(m, "CAN_BCM_RX_TIMEOUT", RX_TIMEOUT); + ADD_INT_CONST(m, "CAN_BCM_RX_CHANGED", RX_CHANGED); /* BCM flags */ - PyModule_AddIntConstant(m, "CAN_BCM_SETTIMER", SETTIMER); - PyModule_AddIntConstant(m, "CAN_BCM_STARTTIMER", STARTTIMER); - PyModule_AddIntConstant(m, "CAN_BCM_TX_COUNTEVT", TX_COUNTEVT); - PyModule_AddIntConstant(m, "CAN_BCM_TX_ANNOUNCE", TX_ANNOUNCE); - PyModule_AddIntConstant(m, "CAN_BCM_TX_CP_CAN_ID", TX_CP_CAN_ID); - PyModule_AddIntConstant(m, "CAN_BCM_RX_FILTER_ID", RX_FILTER_ID); - PyModule_AddIntConstant(m, "CAN_BCM_RX_CHECK_DLC", RX_CHECK_DLC); - PyModule_AddIntConstant(m, "CAN_BCM_RX_NO_AUTOTIMER", RX_NO_AUTOTIMER); - PyModule_AddIntConstant(m, "CAN_BCM_RX_ANNOUNCE_RESUME", RX_ANNOUNCE_RESUME); - PyModule_AddIntConstant(m, "CAN_BCM_TX_RESET_MULTI_IDX", TX_RESET_MULTI_IDX); - PyModule_AddIntConstant(m, "CAN_BCM_RX_RTR_FRAME", RX_RTR_FRAME); + ADD_INT_CONST(m, "CAN_BCM_SETTIMER", SETTIMER); + ADD_INT_CONST(m, "CAN_BCM_STARTTIMER", STARTTIMER); + ADD_INT_CONST(m, "CAN_BCM_TX_COUNTEVT", TX_COUNTEVT); + ADD_INT_CONST(m, "CAN_BCM_TX_ANNOUNCE", TX_ANNOUNCE); + ADD_INT_CONST(m, "CAN_BCM_TX_CP_CAN_ID", TX_CP_CAN_ID); + ADD_INT_CONST(m, "CAN_BCM_RX_FILTER_ID", RX_FILTER_ID); + ADD_INT_CONST(m, "CAN_BCM_RX_CHECK_DLC", RX_CHECK_DLC); + ADD_INT_CONST(m, "CAN_BCM_RX_NO_AUTOTIMER", RX_NO_AUTOTIMER); + ADD_INT_CONST(m, "CAN_BCM_RX_ANNOUNCE_RESUME", RX_ANNOUNCE_RESUME); + ADD_INT_CONST(m, "CAN_BCM_TX_RESET_MULTI_IDX", TX_RESET_MULTI_IDX); + ADD_INT_CONST(m, "CAN_BCM_RX_RTR_FRAME", RX_RTR_FRAME); #ifdef CAN_FD_FRAME /* CAN_FD_FRAME was only introduced in the 4.8.x kernel series */ - PyModule_AddIntConstant(m, "CAN_BCM_CAN_FD_FRAME", CAN_FD_FRAME); + ADD_INT_CONST(m, "CAN_BCM_CAN_FD_FRAME", CAN_FD_FRAME); #endif #endif #ifdef HAVE_LINUX_CAN_J1939_H - PyModule_AddIntMacro(m, J1939_MAX_UNICAST_ADDR); - PyModule_AddIntMacro(m, J1939_IDLE_ADDR); - PyModule_AddIntMacro(m, J1939_NO_ADDR); - PyModule_AddIntMacro(m, J1939_NO_NAME); - PyModule_AddIntMacro(m, J1939_PGN_REQUEST); - PyModule_AddIntMacro(m, J1939_PGN_ADDRESS_CLAIMED); - PyModule_AddIntMacro(m, J1939_PGN_ADDRESS_COMMANDED); - PyModule_AddIntMacro(m, J1939_PGN_PDU1_MAX); - PyModule_AddIntMacro(m, J1939_PGN_MAX); - PyModule_AddIntMacro(m, J1939_NO_PGN); + ADD_INT_MACRO(m, J1939_MAX_UNICAST_ADDR); + ADD_INT_MACRO(m, J1939_IDLE_ADDR); + ADD_INT_MACRO(m, J1939_NO_ADDR); + ADD_INT_MACRO(m, J1939_NO_NAME); + ADD_INT_MACRO(m, J1939_PGN_REQUEST); + ADD_INT_MACRO(m, J1939_PGN_ADDRESS_CLAIMED); + ADD_INT_MACRO(m, J1939_PGN_ADDRESS_COMMANDED); + ADD_INT_MACRO(m, J1939_PGN_PDU1_MAX); + ADD_INT_MACRO(m, J1939_PGN_MAX); + ADD_INT_MACRO(m, J1939_NO_PGN); /* J1939 socket options */ - PyModule_AddIntMacro(m, SO_J1939_FILTER); - PyModule_AddIntMacro(m, SO_J1939_PROMISC); - PyModule_AddIntMacro(m, SO_J1939_SEND_PRIO); - PyModule_AddIntMacro(m, SO_J1939_ERRQUEUE); + ADD_INT_MACRO(m, SO_J1939_FILTER); + ADD_INT_MACRO(m, SO_J1939_PROMISC); + ADD_INT_MACRO(m, SO_J1939_SEND_PRIO); + ADD_INT_MACRO(m, SO_J1939_ERRQUEUE); - PyModule_AddIntMacro(m, SCM_J1939_DEST_ADDR); - PyModule_AddIntMacro(m, SCM_J1939_DEST_NAME); - PyModule_AddIntMacro(m, SCM_J1939_PRIO); - PyModule_AddIntMacro(m, SCM_J1939_ERRQUEUE); + ADD_INT_MACRO(m, SCM_J1939_DEST_ADDR); + ADD_INT_MACRO(m, SCM_J1939_DEST_NAME); + ADD_INT_MACRO(m, SCM_J1939_PRIO); + ADD_INT_MACRO(m, SCM_J1939_ERRQUEUE); - PyModule_AddIntMacro(m, J1939_NLA_PAD); - PyModule_AddIntMacro(m, J1939_NLA_BYTES_ACKED); + ADD_INT_MACRO(m, J1939_NLA_PAD); + ADD_INT_MACRO(m, J1939_NLA_BYTES_ACKED); - PyModule_AddIntMacro(m, J1939_EE_INFO_NONE); - PyModule_AddIntMacro(m, J1939_EE_INFO_TX_ABORT); + ADD_INT_MACRO(m, J1939_EE_INFO_NONE); + ADD_INT_MACRO(m, J1939_EE_INFO_TX_ABORT); - PyModule_AddIntMacro(m, J1939_FILTER_MAX); + ADD_INT_MACRO(m, J1939_FILTER_MAX); #endif #ifdef SOL_RDS - PyModule_AddIntMacro(m, SOL_RDS); + ADD_INT_MACRO(m, SOL_RDS); #endif #ifdef HAVE_SOCKADDR_ALG - PyModule_AddIntMacro(m, SOL_ALG); + ADD_INT_MACRO(m, SOL_ALG); #endif #ifdef RDS_CANCEL_SENT_TO - PyModule_AddIntMacro(m, RDS_CANCEL_SENT_TO); + ADD_INT_MACRO(m, RDS_CANCEL_SENT_TO); #endif #ifdef RDS_GET_MR - PyModule_AddIntMacro(m, RDS_GET_MR); + ADD_INT_MACRO(m, RDS_GET_MR); #endif #ifdef RDS_FREE_MR - PyModule_AddIntMacro(m, RDS_FREE_MR); + ADD_INT_MACRO(m, RDS_FREE_MR); #endif #ifdef RDS_RECVERR - PyModule_AddIntMacro(m, RDS_RECVERR); + ADD_INT_MACRO(m, RDS_RECVERR); #endif #ifdef RDS_CONG_MONITOR - PyModule_AddIntMacro(m, RDS_CONG_MONITOR); + ADD_INT_MACRO(m, RDS_CONG_MONITOR); #endif #ifdef RDS_GET_MR_FOR_DEST - PyModule_AddIntMacro(m, RDS_GET_MR_FOR_DEST); + ADD_INT_MACRO(m, RDS_GET_MR_FOR_DEST); #endif #ifdef IPPROTO_IP - PyModule_AddIntMacro(m, IPPROTO_IP); + ADD_INT_MACRO(m, IPPROTO_IP); #else - PyModule_AddIntConstant(m, "IPPROTO_IP", 0); + ADD_INT_CONST(m, "IPPROTO_IP", 0); #endif #ifdef IPPROTO_HOPOPTS - PyModule_AddIntMacro(m, IPPROTO_HOPOPTS); + ADD_INT_MACRO(m, IPPROTO_HOPOPTS); #endif #ifdef IPPROTO_ICMP - PyModule_AddIntMacro(m, IPPROTO_ICMP); + ADD_INT_MACRO(m, IPPROTO_ICMP); #else - PyModule_AddIntConstant(m, "IPPROTO_ICMP", 1); + ADD_INT_CONST(m, "IPPROTO_ICMP", 1); #endif #ifdef IPPROTO_IGMP - PyModule_AddIntMacro(m, IPPROTO_IGMP); + ADD_INT_MACRO(m, IPPROTO_IGMP); #endif #ifdef IPPROTO_GGP - PyModule_AddIntMacro(m, IPPROTO_GGP); + ADD_INT_MACRO(m, IPPROTO_GGP); #endif #ifdef IPPROTO_IPV4 - PyModule_AddIntMacro(m, IPPROTO_IPV4); + ADD_INT_MACRO(m, IPPROTO_IPV4); #endif #ifdef IPPROTO_IPV6 - PyModule_AddIntMacro(m, IPPROTO_IPV6); + ADD_INT_MACRO(m, IPPROTO_IPV6); #endif #ifdef IPPROTO_IPIP - PyModule_AddIntMacro(m, IPPROTO_IPIP); + ADD_INT_MACRO(m, IPPROTO_IPIP); #endif #ifdef IPPROTO_TCP - PyModule_AddIntMacro(m, IPPROTO_TCP); + ADD_INT_MACRO(m, IPPROTO_TCP); #else - PyModule_AddIntConstant(m, "IPPROTO_TCP", 6); + ADD_INT_CONST(m, "IPPROTO_TCP", 6); #endif #ifdef IPPROTO_EGP - PyModule_AddIntMacro(m, IPPROTO_EGP); + ADD_INT_MACRO(m, IPPROTO_EGP); #endif #ifdef IPPROTO_PUP - PyModule_AddIntMacro(m, IPPROTO_PUP); + ADD_INT_MACRO(m, IPPROTO_PUP); #endif #ifdef IPPROTO_UDP - PyModule_AddIntMacro(m, IPPROTO_UDP); + ADD_INT_MACRO(m, IPPROTO_UDP); #else - PyModule_AddIntConstant(m, "IPPROTO_UDP", 17); + ADD_INT_CONST(m, "IPPROTO_UDP", 17); #endif #ifdef IPPROTO_UDPLITE - PyModule_AddIntMacro(m, IPPROTO_UDPLITE); + ADD_INT_MACRO(m, IPPROTO_UDPLITE); #ifndef UDPLITE_SEND_CSCOV #define UDPLITE_SEND_CSCOV 10 #endif - PyModule_AddIntMacro(m, UDPLITE_SEND_CSCOV); + ADD_INT_MACRO(m, UDPLITE_SEND_CSCOV); #ifndef UDPLITE_RECV_CSCOV #define UDPLITE_RECV_CSCOV 11 #endif - PyModule_AddIntMacro(m, UDPLITE_RECV_CSCOV); + ADD_INT_MACRO(m, UDPLITE_RECV_CSCOV); #endif #ifdef IPPROTO_IDP - PyModule_AddIntMacro(m, IPPROTO_IDP); + ADD_INT_MACRO(m, IPPROTO_IDP); #endif #ifdef IPPROTO_HELLO - PyModule_AddIntMacro(m, IPPROTO_HELLO); + ADD_INT_MACRO(m, IPPROTO_HELLO); #endif #ifdef IPPROTO_ND - PyModule_AddIntMacro(m, IPPROTO_ND); + ADD_INT_MACRO(m, IPPROTO_ND); #endif #ifdef IPPROTO_TP - PyModule_AddIntMacro(m, IPPROTO_TP); + ADD_INT_MACRO(m, IPPROTO_TP); #endif #ifdef IPPROTO_ROUTING - PyModule_AddIntMacro(m, IPPROTO_ROUTING); + ADD_INT_MACRO(m, IPPROTO_ROUTING); #endif #ifdef IPPROTO_FRAGMENT - PyModule_AddIntMacro(m, IPPROTO_FRAGMENT); + ADD_INT_MACRO(m, IPPROTO_FRAGMENT); #endif #ifdef IPPROTO_RSVP - PyModule_AddIntMacro(m, IPPROTO_RSVP); + ADD_INT_MACRO(m, IPPROTO_RSVP); #endif #ifdef IPPROTO_GRE - PyModule_AddIntMacro(m, IPPROTO_GRE); + ADD_INT_MACRO(m, IPPROTO_GRE); #endif #ifdef IPPROTO_ESP - PyModule_AddIntMacro(m, IPPROTO_ESP); + ADD_INT_MACRO(m, IPPROTO_ESP); #endif #ifdef IPPROTO_AH - PyModule_AddIntMacro(m, IPPROTO_AH); + ADD_INT_MACRO(m, IPPROTO_AH); #endif #ifdef IPPROTO_MOBILE - PyModule_AddIntMacro(m, IPPROTO_MOBILE); + ADD_INT_MACRO(m, IPPROTO_MOBILE); #endif #ifdef IPPROTO_ICMPV6 - PyModule_AddIntMacro(m, IPPROTO_ICMPV6); + ADD_INT_MACRO(m, IPPROTO_ICMPV6); #endif #ifdef IPPROTO_NONE - PyModule_AddIntMacro(m, IPPROTO_NONE); + ADD_INT_MACRO(m, IPPROTO_NONE); #endif #ifdef IPPROTO_DSTOPTS - PyModule_AddIntMacro(m, IPPROTO_DSTOPTS); + ADD_INT_MACRO(m, IPPROTO_DSTOPTS); #endif #ifdef IPPROTO_XTP - PyModule_AddIntMacro(m, IPPROTO_XTP); + ADD_INT_MACRO(m, IPPROTO_XTP); #endif #ifdef IPPROTO_EON - PyModule_AddIntMacro(m, IPPROTO_EON); + ADD_INT_MACRO(m, IPPROTO_EON); #endif #ifdef IPPROTO_PIM - PyModule_AddIntMacro(m, IPPROTO_PIM); + ADD_INT_MACRO(m, IPPROTO_PIM); #endif #ifdef IPPROTO_IPCOMP - PyModule_AddIntMacro(m, IPPROTO_IPCOMP); + ADD_INT_MACRO(m, IPPROTO_IPCOMP); #endif #ifdef IPPROTO_VRRP - PyModule_AddIntMacro(m, IPPROTO_VRRP); + ADD_INT_MACRO(m, IPPROTO_VRRP); #endif #ifdef IPPROTO_SCTP - PyModule_AddIntMacro(m, IPPROTO_SCTP); + ADD_INT_MACRO(m, IPPROTO_SCTP); #endif #ifdef IPPROTO_BIP - PyModule_AddIntMacro(m, IPPROTO_BIP); + ADD_INT_MACRO(m, IPPROTO_BIP); #endif #ifdef IPPROTO_MPTCP - PyModule_AddIntMacro(m, IPPROTO_MPTCP); + ADD_INT_MACRO(m, IPPROTO_MPTCP); #endif /**/ #ifdef IPPROTO_RAW - PyModule_AddIntMacro(m, IPPROTO_RAW); + ADD_INT_MACRO(m, IPPROTO_RAW); #else - PyModule_AddIntConstant(m, "IPPROTO_RAW", 255); + ADD_INT_CONST(m, "IPPROTO_RAW", 255); #endif #ifdef IPPROTO_MAX - PyModule_AddIntMacro(m, IPPROTO_MAX); + ADD_INT_MACRO(m, IPPROTO_MAX); #endif #ifdef MS_WINDOWS - PyModule_AddIntMacro(m, IPPROTO_ICLFXBM); - PyModule_AddIntMacro(m, IPPROTO_ST); - PyModule_AddIntMacro(m, IPPROTO_CBT); - PyModule_AddIntMacro(m, IPPROTO_IGP); - PyModule_AddIntMacro(m, IPPROTO_RDP); - PyModule_AddIntMacro(m, IPPROTO_PGM); - PyModule_AddIntMacro(m, IPPROTO_L2TP); - PyModule_AddIntMacro(m, IPPROTO_SCTP); + ADD_INT_MACRO(m, IPPROTO_ICLFXBM); + ADD_INT_MACRO(m, IPPROTO_ST); + ADD_INT_MACRO(m, IPPROTO_CBT); + ADD_INT_MACRO(m, IPPROTO_IGP); + ADD_INT_MACRO(m, IPPROTO_RDP); + ADD_INT_MACRO(m, IPPROTO_PGM); + ADD_INT_MACRO(m, IPPROTO_L2TP); + ADD_INT_MACRO(m, IPPROTO_SCTP); #endif #ifdef SYSPROTO_CONTROL - PyModule_AddIntMacro(m, SYSPROTO_CONTROL); + ADD_INT_MACRO(m, SYSPROTO_CONTROL); #endif /* Some port configuration */ #ifdef IPPORT_RESERVED - PyModule_AddIntMacro(m, IPPORT_RESERVED); + ADD_INT_MACRO(m, IPPORT_RESERVED); #else - PyModule_AddIntConstant(m, "IPPORT_RESERVED", 1024); + ADD_INT_CONST(m, "IPPORT_RESERVED", 1024); #endif #ifdef IPPORT_USERRESERVED - PyModule_AddIntMacro(m, IPPORT_USERRESERVED); + ADD_INT_MACRO(m, IPPORT_USERRESERVED); #else - PyModule_AddIntConstant(m, "IPPORT_USERRESERVED", 5000); + ADD_INT_CONST(m, "IPPORT_USERRESERVED", 5000); #endif /* Some reserved IP v.4 addresses */ #ifdef INADDR_ANY - PyModule_AddIntMacro(m, INADDR_ANY); + ADD_INT_MACRO(m, INADDR_ANY); #else - PyModule_AddIntConstant(m, "INADDR_ANY", 0x00000000); + ADD_INT_CONST(m, "INADDR_ANY", 0x00000000); #endif #ifdef INADDR_BROADCAST - PyModule_AddIntMacro(m, INADDR_BROADCAST); + ADD_INT_MACRO(m, INADDR_BROADCAST); #else - PyModule_AddIntConstant(m, "INADDR_BROADCAST", 0xffffffff); + ADD_INT_CONST(m, "INADDR_BROADCAST", 0xffffffff); #endif #ifdef INADDR_LOOPBACK - PyModule_AddIntMacro(m, INADDR_LOOPBACK); + ADD_INT_MACRO(m, INADDR_LOOPBACK); #else - PyModule_AddIntConstant(m, "INADDR_LOOPBACK", 0x7F000001); + ADD_INT_CONST(m, "INADDR_LOOPBACK", 0x7F000001); #endif #ifdef INADDR_UNSPEC_GROUP - PyModule_AddIntMacro(m, INADDR_UNSPEC_GROUP); + ADD_INT_MACRO(m, INADDR_UNSPEC_GROUP); #else - PyModule_AddIntConstant(m, "INADDR_UNSPEC_GROUP", 0xe0000000); + ADD_INT_CONST(m, "INADDR_UNSPEC_GROUP", 0xe0000000); #endif #ifdef INADDR_ALLHOSTS_GROUP - PyModule_AddIntConstant(m, "INADDR_ALLHOSTS_GROUP", + ADD_INT_CONST(m, "INADDR_ALLHOSTS_GROUP", INADDR_ALLHOSTS_GROUP); #else - PyModule_AddIntConstant(m, "INADDR_ALLHOSTS_GROUP", 0xe0000001); + ADD_INT_CONST(m, "INADDR_ALLHOSTS_GROUP", 0xe0000001); #endif #ifdef INADDR_MAX_LOCAL_GROUP - PyModule_AddIntMacro(m, INADDR_MAX_LOCAL_GROUP); + ADD_INT_MACRO(m, INADDR_MAX_LOCAL_GROUP); #else - PyModule_AddIntConstant(m, "INADDR_MAX_LOCAL_GROUP", 0xe00000ff); + ADD_INT_CONST(m, "INADDR_MAX_LOCAL_GROUP", 0xe00000ff); #endif #ifdef INADDR_NONE - PyModule_AddIntMacro(m, INADDR_NONE); + ADD_INT_MACRO(m, INADDR_NONE); #else - PyModule_AddIntConstant(m, "INADDR_NONE", 0xffffffff); + ADD_INT_CONST(m, "INADDR_NONE", 0xffffffff); #endif /* IPv4 [gs]etsockopt options */ #ifdef IP_OPTIONS - PyModule_AddIntMacro(m, IP_OPTIONS); + ADD_INT_MACRO(m, IP_OPTIONS); #endif #ifdef IP_HDRINCL - PyModule_AddIntMacro(m, IP_HDRINCL); + ADD_INT_MACRO(m, IP_HDRINCL); #endif #ifdef IP_TOS - PyModule_AddIntMacro(m, IP_TOS); + ADD_INT_MACRO(m, IP_TOS); #endif #ifdef IP_TTL - PyModule_AddIntMacro(m, IP_TTL); + ADD_INT_MACRO(m, IP_TTL); #endif #ifdef IP_RECVOPTS - PyModule_AddIntMacro(m, IP_RECVOPTS); + ADD_INT_MACRO(m, IP_RECVOPTS); #endif #ifdef IP_RECVRETOPTS - PyModule_AddIntMacro(m, IP_RECVRETOPTS); + ADD_INT_MACRO(m, IP_RECVRETOPTS); #endif #ifdef IP_RECVTOS - PyModule_AddIntMacro(m, IP_RECVTOS); + ADD_INT_MACRO(m, IP_RECVTOS); #endif #ifdef IP_RECVDSTADDR - PyModule_AddIntMacro(m, IP_RECVDSTADDR); + ADD_INT_MACRO(m, IP_RECVDSTADDR); #endif #ifdef IP_RETOPTS - PyModule_AddIntMacro(m, IP_RETOPTS); + ADD_INT_MACRO(m, IP_RETOPTS); #endif #ifdef IP_MULTICAST_IF - PyModule_AddIntMacro(m, IP_MULTICAST_IF); + ADD_INT_MACRO(m, IP_MULTICAST_IF); #endif #ifdef IP_MULTICAST_TTL - PyModule_AddIntMacro(m, IP_MULTICAST_TTL); + ADD_INT_MACRO(m, IP_MULTICAST_TTL); #endif #ifdef IP_MULTICAST_LOOP - PyModule_AddIntMacro(m, IP_MULTICAST_LOOP); + ADD_INT_MACRO(m, IP_MULTICAST_LOOP); #endif #ifdef IP_ADD_MEMBERSHIP - PyModule_AddIntMacro(m, IP_ADD_MEMBERSHIP); + ADD_INT_MACRO(m, IP_ADD_MEMBERSHIP); #endif #ifdef IP_DROP_MEMBERSHIP - PyModule_AddIntMacro(m, IP_DROP_MEMBERSHIP); + ADD_INT_MACRO(m, IP_DROP_MEMBERSHIP); #endif #ifdef IP_DEFAULT_MULTICAST_TTL - PyModule_AddIntMacro(m, IP_DEFAULT_MULTICAST_TTL); + ADD_INT_MACRO(m, IP_DEFAULT_MULTICAST_TTL); #endif #ifdef IP_DEFAULT_MULTICAST_LOOP - PyModule_AddIntMacro(m, IP_DEFAULT_MULTICAST_LOOP); + ADD_INT_MACRO(m, IP_DEFAULT_MULTICAST_LOOP); #endif #ifdef IP_MAX_MEMBERSHIPS - PyModule_AddIntMacro(m, IP_MAX_MEMBERSHIPS); + ADD_INT_MACRO(m, IP_MAX_MEMBERSHIPS); #endif #ifdef IP_TRANSPARENT - PyModule_AddIntMacro(m, IP_TRANSPARENT); + ADD_INT_MACRO(m, IP_TRANSPARENT); #endif #ifdef IP_PKTINFO - PyModule_AddIntMacro(m, IP_PKTINFO); + ADD_INT_MACRO(m, IP_PKTINFO); #endif #ifdef IP_BIND_ADDRESS_NO_PORT - PyModule_AddIntMacro(m, IP_BIND_ADDRESS_NO_PORT); + ADD_INT_MACRO(m, IP_BIND_ADDRESS_NO_PORT); +#endif +#ifdef IP_UNBLOCK_SOURCE + ADD_INT_MACRO(m, IP_UNBLOCK_SOURCE); +#endif +#ifdef IP_BLOCK_SOURCE + ADD_INT_MACRO(m, IP_BLOCK_SOURCE); +#endif +#ifdef IP_ADD_SOURCE_MEMBERSHIP + ADD_INT_MACRO(m, IP_ADD_SOURCE_MEMBERSHIP); +#endif +#ifdef IP_DROP_SOURCE_MEMBERSHIP + ADD_INT_MACRO(m, IP_DROP_SOURCE_MEMBERSHIP); #endif /* IPv6 [gs]etsockopt options, defined in RFC2553 */ #ifdef IPV6_JOIN_GROUP - PyModule_AddIntMacro(m, IPV6_JOIN_GROUP); + ADD_INT_MACRO(m, IPV6_JOIN_GROUP); #endif #ifdef IPV6_LEAVE_GROUP - PyModule_AddIntMacro(m, IPV6_LEAVE_GROUP); + ADD_INT_MACRO(m, IPV6_LEAVE_GROUP); #endif #ifdef IPV6_MULTICAST_HOPS - PyModule_AddIntMacro(m, IPV6_MULTICAST_HOPS); + ADD_INT_MACRO(m, IPV6_MULTICAST_HOPS); #endif #ifdef IPV6_MULTICAST_IF - PyModule_AddIntMacro(m, IPV6_MULTICAST_IF); + ADD_INT_MACRO(m, IPV6_MULTICAST_IF); #endif #ifdef IPV6_MULTICAST_LOOP - PyModule_AddIntMacro(m, IPV6_MULTICAST_LOOP); + ADD_INT_MACRO(m, IPV6_MULTICAST_LOOP); #endif #ifdef IPV6_UNICAST_HOPS - PyModule_AddIntMacro(m, IPV6_UNICAST_HOPS); + ADD_INT_MACRO(m, IPV6_UNICAST_HOPS); #endif /* Additional IPV6 socket options, defined in RFC 3493 */ #ifdef IPV6_V6ONLY - PyModule_AddIntMacro(m, IPV6_V6ONLY); + ADD_INT_MACRO(m, IPV6_V6ONLY); #endif /* Advanced IPV6 socket options, from RFC 3542 */ #ifdef IPV6_CHECKSUM - PyModule_AddIntMacro(m, IPV6_CHECKSUM); + ADD_INT_MACRO(m, IPV6_CHECKSUM); #endif #ifdef IPV6_DONTFRAG - PyModule_AddIntMacro(m, IPV6_DONTFRAG); + ADD_INT_MACRO(m, IPV6_DONTFRAG); #endif #ifdef IPV6_DSTOPTS - PyModule_AddIntMacro(m, IPV6_DSTOPTS); + ADD_INT_MACRO(m, IPV6_DSTOPTS); #endif #ifdef IPV6_HOPLIMIT - PyModule_AddIntMacro(m, IPV6_HOPLIMIT); + ADD_INT_MACRO(m, IPV6_HOPLIMIT); #endif #ifdef IPV6_HOPOPTS - PyModule_AddIntMacro(m, IPV6_HOPOPTS); + ADD_INT_MACRO(m, IPV6_HOPOPTS); #endif #ifdef IPV6_NEXTHOP - PyModule_AddIntMacro(m, IPV6_NEXTHOP); + ADD_INT_MACRO(m, IPV6_NEXTHOP); #endif #ifdef IPV6_PATHMTU - PyModule_AddIntMacro(m, IPV6_PATHMTU); + ADD_INT_MACRO(m, IPV6_PATHMTU); #endif #ifdef IPV6_PKTINFO - PyModule_AddIntMacro(m, IPV6_PKTINFO); + ADD_INT_MACRO(m, IPV6_PKTINFO); #endif #ifdef IPV6_RECVDSTOPTS - PyModule_AddIntMacro(m, IPV6_RECVDSTOPTS); + ADD_INT_MACRO(m, IPV6_RECVDSTOPTS); #endif #ifdef IPV6_RECVHOPLIMIT - PyModule_AddIntMacro(m, IPV6_RECVHOPLIMIT); + ADD_INT_MACRO(m, IPV6_RECVHOPLIMIT); #endif #ifdef IPV6_RECVHOPOPTS - PyModule_AddIntMacro(m, IPV6_RECVHOPOPTS); + ADD_INT_MACRO(m, IPV6_RECVHOPOPTS); #endif #ifdef IPV6_RECVPKTINFO - PyModule_AddIntMacro(m, IPV6_RECVPKTINFO); + ADD_INT_MACRO(m, IPV6_RECVPKTINFO); #endif #ifdef IPV6_RECVRTHDR - PyModule_AddIntMacro(m, IPV6_RECVRTHDR); + ADD_INT_MACRO(m, IPV6_RECVRTHDR); #endif #ifdef IPV6_RECVTCLASS - PyModule_AddIntMacro(m, IPV6_RECVTCLASS); + ADD_INT_MACRO(m, IPV6_RECVTCLASS); #endif #ifdef IPV6_RTHDR - PyModule_AddIntMacro(m, IPV6_RTHDR); + ADD_INT_MACRO(m, IPV6_RTHDR); #endif #ifdef IPV6_RTHDRDSTOPTS - PyModule_AddIntMacro(m, IPV6_RTHDRDSTOPTS); + ADD_INT_MACRO(m, IPV6_RTHDRDSTOPTS); #endif #ifdef IPV6_RTHDR_TYPE_0 - PyModule_AddIntMacro(m, IPV6_RTHDR_TYPE_0); + ADD_INT_MACRO(m, IPV6_RTHDR_TYPE_0); #endif #ifdef IPV6_RECVPATHMTU - PyModule_AddIntMacro(m, IPV6_RECVPATHMTU); + ADD_INT_MACRO(m, IPV6_RECVPATHMTU); #endif #ifdef IPV6_TCLASS - PyModule_AddIntMacro(m, IPV6_TCLASS); + ADD_INT_MACRO(m, IPV6_TCLASS); #endif #ifdef IPV6_USE_MIN_MTU - PyModule_AddIntMacro(m, IPV6_USE_MIN_MTU); + ADD_INT_MACRO(m, IPV6_USE_MIN_MTU); #endif /* TCP options */ #ifdef TCP_NODELAY - PyModule_AddIntMacro(m, TCP_NODELAY); + ADD_INT_MACRO(m, TCP_NODELAY); #endif #ifdef TCP_MAXSEG - PyModule_AddIntMacro(m, TCP_MAXSEG); + ADD_INT_MACRO(m, TCP_MAXSEG); #endif #ifdef TCP_CORK - PyModule_AddIntMacro(m, TCP_CORK); + ADD_INT_MACRO(m, TCP_CORK); #endif #ifdef TCP_KEEPIDLE - PyModule_AddIntMacro(m, TCP_KEEPIDLE); + ADD_INT_MACRO(m, TCP_KEEPIDLE); #endif /* TCP_KEEPALIVE is OSX's TCP_KEEPIDLE equivalent */ #if defined(__APPLE__) && defined(TCP_KEEPALIVE) - PyModule_AddIntMacro(m, TCP_KEEPALIVE); + ADD_INT_MACRO(m, TCP_KEEPALIVE); #endif #ifdef TCP_KEEPINTVL - PyModule_AddIntMacro(m, TCP_KEEPINTVL); + ADD_INT_MACRO(m, TCP_KEEPINTVL); #endif #ifdef TCP_KEEPCNT - PyModule_AddIntMacro(m, TCP_KEEPCNT); + ADD_INT_MACRO(m, TCP_KEEPCNT); #endif #ifdef TCP_SYNCNT - PyModule_AddIntMacro(m, TCP_SYNCNT); + ADD_INT_MACRO(m, TCP_SYNCNT); #endif #ifdef TCP_LINGER2 - PyModule_AddIntMacro(m, TCP_LINGER2); + ADD_INT_MACRO(m, TCP_LINGER2); #endif #ifdef TCP_DEFER_ACCEPT - PyModule_AddIntMacro(m, TCP_DEFER_ACCEPT); + ADD_INT_MACRO(m, TCP_DEFER_ACCEPT); #endif #ifdef TCP_WINDOW_CLAMP - PyModule_AddIntMacro(m, TCP_WINDOW_CLAMP); + ADD_INT_MACRO(m, TCP_WINDOW_CLAMP); #endif #ifdef TCP_INFO - PyModule_AddIntMacro(m, TCP_INFO); + ADD_INT_MACRO(m, TCP_INFO); #endif #ifdef TCP_CONNECTION_INFO - PyModule_AddIntMacro(m, TCP_CONNECTION_INFO); + ADD_INT_MACRO(m, TCP_CONNECTION_INFO); #endif #ifdef TCP_QUICKACK - PyModule_AddIntMacro(m, TCP_QUICKACK); + ADD_INT_MACRO(m, TCP_QUICKACK); #endif #ifdef TCP_CONGESTION - PyModule_AddIntMacro(m, TCP_CONGESTION); + ADD_INT_MACRO(m, TCP_CONGESTION); #endif #ifdef TCP_MD5SIG - PyModule_AddIntMacro(m, TCP_MD5SIG); + ADD_INT_MACRO(m, TCP_MD5SIG); #endif #ifdef TCP_THIN_LINEAR_TIMEOUTS - PyModule_AddIntMacro(m, TCP_THIN_LINEAR_TIMEOUTS); + ADD_INT_MACRO(m, TCP_THIN_LINEAR_TIMEOUTS); #endif #ifdef TCP_THIN_DUPACK - PyModule_AddIntMacro(m, TCP_THIN_DUPACK); + ADD_INT_MACRO(m, TCP_THIN_DUPACK); #endif #ifdef TCP_USER_TIMEOUT - PyModule_AddIntMacro(m, TCP_USER_TIMEOUT); + ADD_INT_MACRO(m, TCP_USER_TIMEOUT); #endif #ifdef TCP_REPAIR - PyModule_AddIntMacro(m, TCP_REPAIR); + ADD_INT_MACRO(m, TCP_REPAIR); #endif #ifdef TCP_REPAIR_QUEUE - PyModule_AddIntMacro(m, TCP_REPAIR_QUEUE); + ADD_INT_MACRO(m, TCP_REPAIR_QUEUE); #endif #ifdef TCP_QUEUE_SEQ - PyModule_AddIntMacro(m, TCP_QUEUE_SEQ); + ADD_INT_MACRO(m, TCP_QUEUE_SEQ); #endif #ifdef TCP_REPAIR_OPTIONS - PyModule_AddIntMacro(m, TCP_REPAIR_OPTIONS); + ADD_INT_MACRO(m, TCP_REPAIR_OPTIONS); #endif #ifdef TCP_FASTOPEN - PyModule_AddIntMacro(m, TCP_FASTOPEN); + ADD_INT_MACRO(m, TCP_FASTOPEN); #endif #ifdef TCP_TIMESTAMP - PyModule_AddIntMacro(m, TCP_TIMESTAMP); + ADD_INT_MACRO(m, TCP_TIMESTAMP); #endif #ifdef TCP_NOTSENT_LOWAT - PyModule_AddIntMacro(m, TCP_NOTSENT_LOWAT); + ADD_INT_MACRO(m, TCP_NOTSENT_LOWAT); #endif #ifdef TCP_CC_INFO - PyModule_AddIntMacro(m, TCP_CC_INFO); + ADD_INT_MACRO(m, TCP_CC_INFO); #endif #ifdef TCP_SAVE_SYN - PyModule_AddIntMacro(m, TCP_SAVE_SYN); + ADD_INT_MACRO(m, TCP_SAVE_SYN); #endif #ifdef TCP_SAVED_SYN - PyModule_AddIntMacro(m, TCP_SAVED_SYN); + ADD_INT_MACRO(m, TCP_SAVED_SYN); #endif #ifdef TCP_REPAIR_WINDOW - PyModule_AddIntMacro(m, TCP_REPAIR_WINDOW); + ADD_INT_MACRO(m, TCP_REPAIR_WINDOW); #endif #ifdef TCP_FASTOPEN_CONNECT - PyModule_AddIntMacro(m, TCP_FASTOPEN_CONNECT); + ADD_INT_MACRO(m, TCP_FASTOPEN_CONNECT); #endif #ifdef TCP_ULP - PyModule_AddIntMacro(m, TCP_ULP); + ADD_INT_MACRO(m, TCP_ULP); #endif #ifdef TCP_MD5SIG_EXT - PyModule_AddIntMacro(m, TCP_MD5SIG_EXT); + ADD_INT_MACRO(m, TCP_MD5SIG_EXT); #endif #ifdef TCP_FASTOPEN_KEY - PyModule_AddIntMacro(m, TCP_FASTOPEN_KEY); + ADD_INT_MACRO(m, TCP_FASTOPEN_KEY); #endif #ifdef TCP_FASTOPEN_NO_COOKIE - PyModule_AddIntMacro(m, TCP_FASTOPEN_NO_COOKIE); + ADD_INT_MACRO(m, TCP_FASTOPEN_NO_COOKIE); #endif #ifdef TCP_ZEROCOPY_RECEIVE - PyModule_AddIntMacro(m, TCP_ZEROCOPY_RECEIVE); + ADD_INT_MACRO(m, TCP_ZEROCOPY_RECEIVE); #endif #ifdef TCP_INQ - PyModule_AddIntMacro(m, TCP_INQ); + ADD_INT_MACRO(m, TCP_INQ); #endif #ifdef TCP_TX_DELAY - PyModule_AddIntMacro(m, TCP_TX_DELAY); + ADD_INT_MACRO(m, TCP_TX_DELAY); #endif /* IPX options */ #ifdef IPX_TYPE - PyModule_AddIntMacro(m, IPX_TYPE); + ADD_INT_MACRO(m, IPX_TYPE); #endif /* Reliable Datagram Sockets */ #ifdef RDS_CMSG_RDMA_ARGS - PyModule_AddIntMacro(m, RDS_CMSG_RDMA_ARGS); + ADD_INT_MACRO(m, RDS_CMSG_RDMA_ARGS); #endif #ifdef RDS_CMSG_RDMA_DEST - PyModule_AddIntMacro(m, RDS_CMSG_RDMA_DEST); + ADD_INT_MACRO(m, RDS_CMSG_RDMA_DEST); #endif #ifdef RDS_CMSG_RDMA_MAP - PyModule_AddIntMacro(m, RDS_CMSG_RDMA_MAP); + ADD_INT_MACRO(m, RDS_CMSG_RDMA_MAP); #endif #ifdef RDS_CMSG_RDMA_STATUS - PyModule_AddIntMacro(m, RDS_CMSG_RDMA_STATUS); + ADD_INT_MACRO(m, RDS_CMSG_RDMA_STATUS); #endif #ifdef RDS_CMSG_RDMA_UPDATE - PyModule_AddIntMacro(m, RDS_CMSG_RDMA_UPDATE); + ADD_INT_MACRO(m, RDS_CMSG_RDMA_UPDATE); #endif #ifdef RDS_RDMA_READWRITE - PyModule_AddIntMacro(m, RDS_RDMA_READWRITE); + ADD_INT_MACRO(m, RDS_RDMA_READWRITE); #endif #ifdef RDS_RDMA_FENCE - PyModule_AddIntMacro(m, RDS_RDMA_FENCE); + ADD_INT_MACRO(m, RDS_RDMA_FENCE); #endif #ifdef RDS_RDMA_INVALIDATE - PyModule_AddIntMacro(m, RDS_RDMA_INVALIDATE); + ADD_INT_MACRO(m, RDS_RDMA_INVALIDATE); #endif #ifdef RDS_RDMA_USE_ONCE - PyModule_AddIntMacro(m, RDS_RDMA_USE_ONCE); + ADD_INT_MACRO(m, RDS_RDMA_USE_ONCE); #endif #ifdef RDS_RDMA_DONTWAIT - PyModule_AddIntMacro(m, RDS_RDMA_DONTWAIT); + ADD_INT_MACRO(m, RDS_RDMA_DONTWAIT); #endif #ifdef RDS_RDMA_NOTIFY_ME - PyModule_AddIntMacro(m, RDS_RDMA_NOTIFY_ME); + ADD_INT_MACRO(m, RDS_RDMA_NOTIFY_ME); #endif #ifdef RDS_RDMA_SILENT - PyModule_AddIntMacro(m, RDS_RDMA_SILENT); + ADD_INT_MACRO(m, RDS_RDMA_SILENT); #endif /* get{addr,name}info parameters */ #ifdef EAI_ADDRFAMILY - PyModule_AddIntMacro(m, EAI_ADDRFAMILY); + ADD_INT_MACRO(m, EAI_ADDRFAMILY); #endif #ifdef EAI_AGAIN - PyModule_AddIntMacro(m, EAI_AGAIN); + ADD_INT_MACRO(m, EAI_AGAIN); #endif #ifdef EAI_BADFLAGS - PyModule_AddIntMacro(m, EAI_BADFLAGS); + ADD_INT_MACRO(m, EAI_BADFLAGS); #endif #ifdef EAI_FAIL - PyModule_AddIntMacro(m, EAI_FAIL); + ADD_INT_MACRO(m, EAI_FAIL); #endif #ifdef EAI_FAMILY - PyModule_AddIntMacro(m, EAI_FAMILY); + ADD_INT_MACRO(m, EAI_FAMILY); #endif #ifdef EAI_MEMORY - PyModule_AddIntMacro(m, EAI_MEMORY); + ADD_INT_MACRO(m, EAI_MEMORY); #endif #ifdef EAI_NODATA - PyModule_AddIntMacro(m, EAI_NODATA); + ADD_INT_MACRO(m, EAI_NODATA); #endif #ifdef EAI_NONAME - PyModule_AddIntMacro(m, EAI_NONAME); + ADD_INT_MACRO(m, EAI_NONAME); #endif #ifdef EAI_OVERFLOW - PyModule_AddIntMacro(m, EAI_OVERFLOW); + ADD_INT_MACRO(m, EAI_OVERFLOW); #endif #ifdef EAI_SERVICE - PyModule_AddIntMacro(m, EAI_SERVICE); + ADD_INT_MACRO(m, EAI_SERVICE); #endif #ifdef EAI_SOCKTYPE - PyModule_AddIntMacro(m, EAI_SOCKTYPE); + ADD_INT_MACRO(m, EAI_SOCKTYPE); #endif #ifdef EAI_SYSTEM - PyModule_AddIntMacro(m, EAI_SYSTEM); + ADD_INT_MACRO(m, EAI_SYSTEM); #endif #ifdef EAI_BADHINTS - PyModule_AddIntMacro(m, EAI_BADHINTS); + ADD_INT_MACRO(m, EAI_BADHINTS); #endif #ifdef EAI_PROTOCOL - PyModule_AddIntMacro(m, EAI_PROTOCOL); + ADD_INT_MACRO(m, EAI_PROTOCOL); #endif #ifdef EAI_MAX - PyModule_AddIntMacro(m, EAI_MAX); + ADD_INT_MACRO(m, EAI_MAX); #endif #ifdef AI_PASSIVE - PyModule_AddIntMacro(m, AI_PASSIVE); + ADD_INT_MACRO(m, AI_PASSIVE); #endif #ifdef AI_CANONNAME - PyModule_AddIntMacro(m, AI_CANONNAME); + ADD_INT_MACRO(m, AI_CANONNAME); #endif #ifdef AI_NUMERICHOST - PyModule_AddIntMacro(m, AI_NUMERICHOST); + ADD_INT_MACRO(m, AI_NUMERICHOST); #endif #ifdef AI_NUMERICSERV - PyModule_AddIntMacro(m, AI_NUMERICSERV); + ADD_INT_MACRO(m, AI_NUMERICSERV); #endif #ifdef AI_MASK - PyModule_AddIntMacro(m, AI_MASK); + ADD_INT_MACRO(m, AI_MASK); #endif #ifdef AI_ALL - PyModule_AddIntMacro(m, AI_ALL); + ADD_INT_MACRO(m, AI_ALL); #endif #ifdef AI_V4MAPPED_CFG - PyModule_AddIntMacro(m, AI_V4MAPPED_CFG); + ADD_INT_MACRO(m, AI_V4MAPPED_CFG); #endif #ifdef AI_ADDRCONFIG - PyModule_AddIntMacro(m, AI_ADDRCONFIG); + ADD_INT_MACRO(m, AI_ADDRCONFIG); #endif #ifdef AI_V4MAPPED - PyModule_AddIntMacro(m, AI_V4MAPPED); + ADD_INT_MACRO(m, AI_V4MAPPED); #endif #ifdef AI_DEFAULT - PyModule_AddIntMacro(m, AI_DEFAULT); + ADD_INT_MACRO(m, AI_DEFAULT); #endif #ifdef NI_MAXHOST - PyModule_AddIntMacro(m, NI_MAXHOST); + ADD_INT_MACRO(m, NI_MAXHOST); #endif #ifdef NI_MAXSERV - PyModule_AddIntMacro(m, NI_MAXSERV); + ADD_INT_MACRO(m, NI_MAXSERV); #endif #ifdef NI_NOFQDN - PyModule_AddIntMacro(m, NI_NOFQDN); + ADD_INT_MACRO(m, NI_NOFQDN); #endif #ifdef NI_NUMERICHOST - PyModule_AddIntMacro(m, NI_NUMERICHOST); + ADD_INT_MACRO(m, NI_NUMERICHOST); #endif #ifdef NI_NAMEREQD - PyModule_AddIntMacro(m, NI_NAMEREQD); + ADD_INT_MACRO(m, NI_NAMEREQD); #endif #ifdef NI_NUMERICSERV - PyModule_AddIntMacro(m, NI_NUMERICSERV); + ADD_INT_MACRO(m, NI_NUMERICSERV); #endif #ifdef NI_DGRAM - PyModule_AddIntMacro(m, NI_DGRAM); + ADD_INT_MACRO(m, NI_DGRAM); #endif /* shutdown() parameters */ #ifdef SHUT_RD - PyModule_AddIntMacro(m, SHUT_RD); + ADD_INT_MACRO(m, SHUT_RD); #elif defined(SD_RECEIVE) - PyModule_AddIntConstant(m, "SHUT_RD", SD_RECEIVE); + ADD_INT_CONST(m, "SHUT_RD", SD_RECEIVE); #else - PyModule_AddIntConstant(m, "SHUT_RD", 0); + ADD_INT_CONST(m, "SHUT_RD", 0); #endif #ifdef SHUT_WR - PyModule_AddIntMacro(m, SHUT_WR); + ADD_INT_MACRO(m, SHUT_WR); #elif defined(SD_SEND) - PyModule_AddIntConstant(m, "SHUT_WR", SD_SEND); + ADD_INT_CONST(m, "SHUT_WR", SD_SEND); #else - PyModule_AddIntConstant(m, "SHUT_WR", 1); + ADD_INT_CONST(m, "SHUT_WR", 1); #endif #ifdef SHUT_RDWR - PyModule_AddIntMacro(m, SHUT_RDWR); + ADD_INT_MACRO(m, SHUT_RDWR); #elif defined(SD_BOTH) - PyModule_AddIntConstant(m, "SHUT_RDWR", SD_BOTH); + ADD_INT_CONST(m, "SHUT_RDWR", SD_BOTH); #else - PyModule_AddIntConstant(m, "SHUT_RDWR", 2); + ADD_INT_CONST(m, "SHUT_RDWR", 2); #endif #ifdef SIO_RCVALL @@ -8740,22 +8810,26 @@ PyInit__socket(void) #endif }; int i; - for(i = 0; i<Py_ARRAY_LENGTH(codes); ++i) { - PyObject *tmp; - tmp = PyLong_FromUnsignedLong(codes[i]); - if (tmp == NULL) - return NULL; - PyModule_AddObject(m, names[i], tmp); + for (i = 0; i < Py_ARRAY_LENGTH(codes); ++i) { + PyObject *tmp = PyLong_FromUnsignedLong(codes[i]); + if (tmp == NULL) { + goto error; + } + int rc = PyModule_AddObjectRef(m, names[i], tmp); + Py_DECREF(tmp); + if (rc < 0) { + goto error; + } } } - PyModule_AddIntMacro(m, RCVALL_OFF); - PyModule_AddIntMacro(m, RCVALL_ON); - PyModule_AddIntMacro(m, RCVALL_SOCKETLEVELONLY); + ADD_INT_MACRO(m, RCVALL_OFF); + ADD_INT_MACRO(m, RCVALL_ON); + ADD_INT_MACRO(m, RCVALL_SOCKETLEVELONLY); #ifdef RCVALL_IPLEVEL - PyModule_AddIntMacro(m, RCVALL_IPLEVEL); + ADD_INT_MACRO(m, RCVALL_IPLEVEL); #endif #ifdef RCVALL_MAX - PyModule_AddIntMacro(m, RCVALL_MAX); + ADD_INT_MACRO(m, RCVALL_MAX); #endif #endif /* _MSTCPIP_ */ @@ -8767,10 +8841,65 @@ PyInit__socket(void) #ifdef MS_WINDOWS /* remove some flags on older version Windows during run-time */ if (remove_unusable_flags(m) < 0) { - Py_DECREF(m); - return NULL; + goto error; } #endif - return m; +#undef ADD_INT_MACRO +#undef ADD_INT_CONST +#undef ADD_STR_CONST + + return 0; + +error: + return -1; +} + +static struct PyModuleDef_Slot socket_slots[] = { + {Py_mod_exec, socket_exec}, + {0, NULL}, +}; + +static int +socket_traverse(PyObject *mod, visitproc visit, void *arg) +{ + socket_state *state = get_module_state(mod); + Py_VISIT(state->sock_type); + Py_VISIT(state->socket_herror); + Py_VISIT(state->socket_gaierror); + return 0; +} + +static int +socket_clear(PyObject *mod) +{ + socket_state *state = get_module_state(mod); + Py_CLEAR(state->sock_type); + Py_CLEAR(state->socket_herror); + Py_CLEAR(state->socket_gaierror); + return 0; +} + +static void +socket_free(void *mod) +{ + (void)socket_clear((PyObject *)mod); +} + +static struct PyModuleDef socketmodule = { + .m_base = PyModuleDef_HEAD_INIT, + .m_name = PySocket_MODULE_NAME, + .m_doc = socket_doc, + .m_size = sizeof(socket_state), + .m_methods = socket_methods, + .m_slots = socket_slots, + .m_traverse = socket_traverse, + .m_clear = socket_clear, + .m_free = socket_free, +}; + +PyMODINIT_FUNC +PyInit__socket(void) +{ + return PyModuleDef_Init(&socketmodule); } diff --git a/Modules/socketmodule.h b/Modules/socketmodule.h index f31ba532a6c60d..f5ca00450ee92a 100644 --- a/Modules/socketmodule.h +++ b/Modules/socketmodule.h @@ -322,6 +322,7 @@ typedef struct { sets a Python exception */ _PyTime_t sock_timeout; /* Operation timeout in seconds; 0.0 means non-blocking */ + struct _socket_state *state; } PySocketSockObject; /* --- C API ----------------------------------------------------*/ diff --git a/Modules/symtablemodule.c b/Modules/symtablemodule.c index 4ef1d8cde07db6..91538b4fb15cbd 100644 --- a/Modules/symtablemodule.c +++ b/Modules/symtablemodule.c @@ -66,12 +66,6 @@ static PyMethodDef symtable_methods[] = { {NULL, NULL} /* sentinel */ }; -static int -symtable_init_stentry_type(PyObject *m) -{ - return PyType_Ready(&PySTEntry_Type); -} - static int symtable_init_constants(PyObject *m) { @@ -105,7 +99,6 @@ symtable_init_constants(PyObject *m) } static PyModuleDef_Slot symtable_slots[] = { - {Py_mod_exec, symtable_init_stentry_type}, {Py_mod_exec, symtable_init_constants}, {0, NULL} }; diff --git a/Modules/tkappinit.c b/Modules/tkappinit.c index 7616d9d319d228..67d6250318c616 100644 --- a/Modules/tkappinit.c +++ b/Modules/tkappinit.c @@ -18,18 +18,10 @@ #include "tkinter.h" -#ifdef TKINTER_PROTECT_LOADTK -/* See Tkapp_TkInit in _tkinter.c for the usage of tk_load_faile */ -static int tk_load_failed; -#endif - int Tcl_AppInit(Tcl_Interp *interp) { const char *_tkinter_skip_tk_init; -#ifdef TKINTER_PROTECT_LOADTK - const char *_tkinter_tk_failed; -#endif #ifdef TK_AQUA #ifndef MAX_PATH_LEN @@ -90,23 +82,7 @@ Tcl_AppInit(Tcl_Interp *interp) return TCL_OK; } -#ifdef TKINTER_PROTECT_LOADTK - _tkinter_tk_failed = Tcl_GetVar(interp, - "_tkinter_tk_failed", TCL_GLOBAL_ONLY); - - if (tk_load_failed || ( - _tkinter_tk_failed != NULL && - strcmp(_tkinter_tk_failed, "1") == 0)) { - Tcl_SetResult(interp, TKINTER_LOADTK_ERRMSG, TCL_STATIC); - return TCL_ERROR; - } -#endif - if (Tk_Init(interp) == TCL_ERROR) { -#ifdef TKINTER_PROTECT_LOADTK - tk_load_failed = 1; - Tcl_SetVar(interp, "_tkinter_tk_failed", "1", TCL_GLOBAL_ONLY); -#endif return TCL_ERROR; } diff --git a/Modules/tkinter.h b/Modules/tkinter.h index cb5a806b0c4326..40281c21760331 100644 --- a/Modules/tkinter.h +++ b/Modules/tkinter.h @@ -16,12 +16,4 @@ (TK_RELEASE_LEVEL << 8) | \ (TK_RELEASE_SERIAL << 0)) -/* Protect Tk 8.4.13 and older from a deadlock that happens when trying - * to load tk after a failed attempt. */ -#if TK_HEX_VERSION < 0x0804020e -#define TKINTER_PROTECT_LOADTK -#define TKINTER_LOADTK_ERRMSG \ - "Calling Tk_Init again after a previous call failed might deadlock" -#endif - #endif /* !TKINTER_H */ diff --git a/Objects/abstract.c b/Objects/abstract.c index 9dc74fb9c2608c..e95785900c9c5f 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -5,6 +5,7 @@ #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_ceval.h" // _Py_EnterRecursiveCallTstate() #include "pycore_object.h" // _Py_CheckSlotResult() +#include "pycore_long.h" // _Py_IsNegative #include "pycore_pyerrors.h" // _PyErr_Occurred() #include "pycore_pystate.h" // _PyThreadState_GET() #include "pycore_unionobject.h" // _PyUnion_Check() @@ -1483,7 +1484,7 @@ PyNumber_AsSsize_t(PyObject *item, PyObject *err) /* Whether or not it is less than or equal to zero is determined by the sign of ob_size */ - if (_PyLong_Sign(value) < 0) + if (_PyLong_IsNegative((PyLongObject *)value)) result = PY_SSIZE_T_MIN; else result = PY_SSIZE_T_MAX; diff --git a/Objects/boolobject.c b/Objects/boolobject.c index a035f463323823..597a76fa5cb162 100644 --- a/Objects/boolobject.c +++ b/Objects/boolobject.c @@ -2,6 +2,7 @@ #include "Python.h" #include "pycore_object.h" // _Py_FatalRefcountError() +#include "pycore_long.h" // FALSE_TAG TRUE_TAG #include "pycore_runtime.h" // _Py_ID() #include <stddef.h> @@ -144,10 +145,14 @@ static PyNumberMethods bool_as_number = { 0, /* nb_index */ }; -static void _Py_NO_RETURN -bool_dealloc(PyObject* Py_UNUSED(ignore)) +static void +bool_dealloc(PyObject *boolean) { - _Py_FatalRefcountError("deallocating True or False"); + /* This should never get called, but we also don't want to SEGV if + * we accidentally decref Booleans out of existence. Instead, + * since bools are immortal, re-set the reference count. + */ + _Py_SetImmortal(boolean); } /* The type object for bool. Note that this cannot be subclassed! */ @@ -198,10 +203,14 @@ PyTypeObject PyBool_Type = { struct _longobject _Py_FalseStruct = { PyObject_HEAD_INIT(&PyBool_Type) - { 0, { 0 } } + { .lv_tag = _PyLong_FALSE_TAG, + { 0 } + } }; struct _longobject _Py_TrueStruct = { PyObject_HEAD_INIT(&PyBool_Type) - { 1, { 1 } } + { .lv_tag = _PyLong_TRUE_TAG, + { 1 } + } }; diff --git a/Objects/bytes_methods.c b/Objects/bytes_methods.c index 6b8166385d375b..33aa9c3db6e805 100644 --- a/Objects/bytes_methods.c +++ b/Objects/bytes_methods.c @@ -258,9 +258,12 @@ _Py_bytes_istitle(const char *cptr, Py_ssize_t len) const unsigned char *e; int cased, previous_is_cased; - /* Shortcut for single character strings */ - if (len == 1) - return PyBool_FromLong(Py_ISUPPER(*p)); + if (len == 1) { + if (Py_ISUPPER(*p)) { + Py_RETURN_TRUE; + } + Py_RETURN_FALSE; + } /* Special case for empty strings */ if (len == 0) @@ -774,7 +777,7 @@ _Py_bytes_tailmatch(const char *str, Py_ssize_t len, { Py_ssize_t start = 0; Py_ssize_t end = PY_SSIZE_T_MAX; - PyObject *subobj; + PyObject *subobj = NULL; int result; if (!stringlib_parse_args_finds(function_name, args, &subobj, &start, &end)) diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index 687a654bdae137..e7e85cc19cda75 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -705,7 +705,6 @@ _PyBytes_FormatEx(const char *format, Py_ssize_t format_len, case ' ': flags |= F_BLANK; continue; case '#': flags |= F_ALT; continue; case '0': flags |= F_ZERO; continue; - case 'z': flags |= F_NO_NEG_0; continue; } break; } @@ -3067,7 +3066,7 @@ _PyBytes_Resize(PyObject **pv, Py_ssize_t newsize) PyObject_Realloc(v, PyBytesObject_SIZE + newsize); if (*pv == NULL) { #ifdef Py_REF_DEBUG - _Py_DecRefTotal(); + _Py_DecRefTotal(_PyInterpreterState_GET()); #endif PyObject_Free(v); PyErr_NoMemory(); @@ -3090,25 +3089,6 @@ _Py_COMP_DIAG_POP } -PyStatus -_PyBytes_InitTypes(PyInterpreterState *interp) -{ - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - - if (PyType_Ready(&PyBytes_Type) < 0) { - return _PyStatus_ERR("Can't initialize bytes type"); - } - - if (PyType_Ready(&PyBytesIter_Type) < 0) { - return _PyStatus_ERR("Can't initialize bytes iterator type"); - } - - return _PyStatus_OK(); -} - - /*********************** Bytes Iterator ****************************/ typedef struct { diff --git a/Objects/call.c b/Objects/call.c index bd027e41f8a9a5..cf6e357a990441 100644 --- a/Objects/call.c +++ b/Objects/call.c @@ -8,16 +8,6 @@ #include "pycore_tuple.h" // _PyTuple_ITEMS() -static PyObject *const * -_PyStack_UnpackDict(PyThreadState *tstate, - PyObject *const *args, Py_ssize_t nargs, - PyObject *kwargs, PyObject **p_kwnames); - -static void -_PyStack_UnpackDict_Free(PyObject *const *stack, Py_ssize_t nargs, - PyObject *kwnames); - - static PyObject * null_error(PyThreadState *tstate) { @@ -965,7 +955,7 @@ _PyStack_AsDict(PyObject *const *values, PyObject *kwnames) The newly allocated argument vector supports PY_VECTORCALL_ARGUMENTS_OFFSET. When done, you must call _PyStack_UnpackDict_Free(stack, nargs, kwnames) */ -static PyObject *const * +PyObject *const * _PyStack_UnpackDict(PyThreadState *tstate, PyObject *const *args, Py_ssize_t nargs, PyObject *kwargs, PyObject **p_kwnames) @@ -1034,7 +1024,7 @@ _PyStack_UnpackDict(PyThreadState *tstate, return stack; } -static void +void _PyStack_UnpackDict_Free(PyObject *const *stack, Py_ssize_t nargs, PyObject *kwnames) { @@ -1042,6 +1032,12 @@ _PyStack_UnpackDict_Free(PyObject *const *stack, Py_ssize_t nargs, for (Py_ssize_t i = 0; i < n; i++) { Py_DECREF(stack[i]); } + _PyStack_UnpackDict_FreeNoDecRef(stack, kwnames); +} + +void +_PyStack_UnpackDict_FreeNoDecRef(PyObject *const *stack, PyObject *kwnames) +{ PyMem_Free((PyObject **)stack - 1); Py_DECREF(kwnames); } diff --git a/Objects/classobject.c b/Objects/classobject.c index 2cb192e725d40d..71c4a4e5d0f8ab 100644 --- a/Objects/classobject.c +++ b/Objects/classobject.c @@ -181,7 +181,7 @@ method_getattro(PyObject *obj, PyObject *name) PyObject *descr = NULL; { - if (tp->tp_dict == NULL) { + if (!_PyType_IsReady(tp)) { if (PyType_Ready(tp) < 0) return NULL; } @@ -395,7 +395,7 @@ instancemethod_getattro(PyObject *self, PyObject *name) PyTypeObject *tp = Py_TYPE(self); PyObject *descr = NULL; - if (tp->tp_dict == NULL) { + if (!_PyType_IsReady(tp)) { if (PyType_Ready(tp) < 0) return NULL; } diff --git a/Objects/codeobject.c b/Objects/codeobject.c index 65b1d258fb76af..9b54c610581174 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -431,13 +431,13 @@ init_code(PyCodeObject *co, struct _PyCodeConstructor *con) if (_Py_next_func_version != 0) { _Py_next_func_version++; } + co->_co_monitoring = NULL; + co->_co_instrumentation_version = 0; /* not set */ co->co_weakreflist = NULL; co->co_extra = NULL; co->_co_cached = NULL; - co->_co_linearray_entry_size = 0; - co->_co_linearray = NULL; memcpy(_PyCode_CODE(co), PyBytes_AS_STRING(con->code), PyBytes_GET_SIZE(con->code)); int entry_point = 0; @@ -816,54 +816,6 @@ PyCode_NewEmpty(const char *filename, const char *funcname, int firstlineno) * source location tracking (co_lines/co_positions) ******************/ -/* Use co_linetable to compute the line number from a bytecode index, addrq. See - lnotab_notes.txt for the details of the lnotab representation. -*/ - -int -_PyCode_CreateLineArray(PyCodeObject *co) -{ - assert(co->_co_linearray == NULL); - PyCodeAddressRange bounds; - int size; - int max_line = 0; - _PyCode_InitAddressRange(co, &bounds); - while(_PyLineTable_NextAddressRange(&bounds)) { - if (bounds.ar_line > max_line) { - max_line = bounds.ar_line; - } - } - if (max_line < (1 << 15)) { - size = 2; - } - else { - size = 4; - } - co->_co_linearray = PyMem_Malloc(Py_SIZE(co)*size); - if (co->_co_linearray == NULL) { - PyErr_NoMemory(); - return -1; - } - co->_co_linearray_entry_size = size; - _PyCode_InitAddressRange(co, &bounds); - while(_PyLineTable_NextAddressRange(&bounds)) { - int start = bounds.ar_start / sizeof(_Py_CODEUNIT); - int end = bounds.ar_end / sizeof(_Py_CODEUNIT); - for (int index = start; index < end; index++) { - assert(index < (int)Py_SIZE(co)); - if (size == 2) { - assert(((int16_t)bounds.ar_line) == bounds.ar_line); - ((int16_t *)co->_co_linearray)[index] = bounds.ar_line; - } - else { - assert(size == 4); - ((int32_t *)co->_co_linearray)[index] = bounds.ar_line; - } - } - } - return 0; -} - int PyCode_Addr2Line(PyCodeObject *co, int addrq) { @@ -871,9 +823,6 @@ PyCode_Addr2Line(PyCodeObject *co, int addrq) return co->co_firstlineno; } assert(addrq >= 0 && addrq < _PyCode_NBYTES(co)); - if (co->_co_linearray) { - return _PyCode_LineNumberFromArray(co, addrq / sizeof(_Py_CODEUNIT)); - } PyCodeAddressRange bounds; _PyCode_InitAddressRange(co, &bounds); return _PyCode_CheckLineNumber(addrq, &bounds); @@ -1531,17 +1480,17 @@ PyCode_GetFreevars(PyCodeObject *code) } static void -deopt_code(_Py_CODEUNIT *instructions, Py_ssize_t len) +deopt_code(PyCodeObject *code, _Py_CODEUNIT *instructions) { + Py_ssize_t len = Py_SIZE(code); for (int i = 0; i < len; i++) { - _Py_CODEUNIT instruction = instructions[i]; - int opcode = _PyOpcode_Deopt[instruction.op.code]; + int opcode = _Py_GetBaseOpcode(code, i); int caches = _PyOpcode_Caches[opcode]; instructions[i].op.code = opcode; - while (caches--) { - instructions[++i].op.code = CACHE; - instructions[i].op.arg = 0; + for (int j = 1; j <= caches; j++) { + instructions[i+j].cache = 0; } + i += caches; } } @@ -1559,7 +1508,7 @@ _PyCode_GetCode(PyCodeObject *co) if (code == NULL) { return NULL; } - deopt_code((_Py_CODEUNIT *)PyBytes_AS_STRING(code), Py_SIZE(co)); + deopt_code(co, (_Py_CODEUNIT *)PyBytes_AS_STRING(code)); assert(co->_co_cached->_co_code == NULL); co->_co_cached->_co_code = Py_NewRef(code); return code; @@ -1693,6 +1642,30 @@ code_new_impl(PyTypeObject *type, int argcount, int posonlyargcount, return co; } +static void +free_monitoring_data(_PyCoMonitoringData *data) +{ + if (data == NULL) { + return; + } + if (data->tools) { + PyMem_Free(data->tools); + } + if (data->lines) { + PyMem_Free(data->lines); + } + if (data->line_tools) { + PyMem_Free(data->line_tools); + } + if (data->per_instruction_opcodes) { + PyMem_Free(data->per_instruction_opcodes); + } + if (data->per_instruction_tools) { + PyMem_Free(data->per_instruction_tools); + } + PyMem_Free(data); +} + static void code_dealloc(PyCodeObject *co) { @@ -1739,9 +1712,7 @@ code_dealloc(PyCodeObject *co) if (co->co_weakreflist != NULL) { PyObject_ClearWeakRefs((PyObject*)co); } - if (co->_co_linearray) { - PyMem_Free(co->_co_linearray); - } + free_monitoring_data(co->_co_monitoring); PyObject_Free(co); } @@ -1885,7 +1856,7 @@ code_hash(PyCodeObject *co) SCRAMBLE_IN(co->co_firstlineno); SCRAMBLE_IN(Py_SIZE(co)); for (int i = 0; i < Py_SIZE(co); i++) { - int deop = _PyOpcode_Deopt[_PyCode_CODE(co)[i].op.code]; + int deop = _Py_GetBaseOpcode(co, i); SCRAMBLE_IN(deop); SCRAMBLE_IN(_PyCode_CODE(co)[i].op.arg); i += _PyOpcode_Caches[deop]; @@ -1921,6 +1892,11 @@ static PyMemberDef code_memberlist[] = { static PyObject * code_getlnotab(PyCodeObject *code, void *closure) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "co_lnotab is deprecated, use co_lines instead.", + 1) < 0) { + return NULL; + } return decode_linetable(code); } @@ -2309,7 +2285,7 @@ _PyCode_ConstantKey(PyObject *op) void _PyStaticCode_Fini(PyCodeObject *co) { - deopt_code(_PyCode_CODE(co), Py_SIZE(co)); + deopt_code(co, _PyCode_CODE(co)); PyMem_Free(co->co_extra); if (co->_co_cached != NULL) { Py_CLEAR(co->_co_cached->_co_code); @@ -2324,10 +2300,8 @@ _PyStaticCode_Fini(PyCodeObject *co) PyObject_ClearWeakRefs((PyObject *)co); co->co_weakreflist = NULL; } - if (co->_co_linearray) { - PyMem_Free(co->_co_linearray); - co->_co_linearray = NULL; - } + free_monitoring_data(co->_co_monitoring); + co->_co_monitoring = NULL; } int diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 53f9a380346a0d..2ef520044340ee 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -304,7 +304,7 @@ static inline void dictkeys_incref(PyDictKeysObject *dk) { #ifdef Py_REF_DEBUG - _Py_IncRefTotal(); + _Py_IncRefTotal(_PyInterpreterState_GET()); #endif dk->dk_refcnt++; } @@ -314,7 +314,7 @@ dictkeys_decref(PyInterpreterState *interp, PyDictKeysObject *dk) { assert(dk->dk_refcnt > 0); #ifdef Py_REF_DEBUG - _Py_DecRefTotal(); + _Py_DecRefTotal(_PyInterpreterState_GET()); #endif if (--dk->dk_refcnt == 0) { free_keys_object(interp, dk); @@ -634,7 +634,7 @@ new_keys_object(PyInterpreterState *interp, uint8_t log2_size, bool unicode) } } #ifdef Py_REF_DEBUG - _Py_IncRefTotal(); + _Py_IncRefTotal(_PyInterpreterState_GET()); #endif dk->dk_refcnt = 1; dk->dk_log2_size = log2_size; @@ -824,7 +824,7 @@ clone_combined_dict_keys(PyDictObject *orig) we have it now; calling dictkeys_incref would be an error as keys->dk_refcnt is already set to 1 (after memcpy). */ #ifdef Py_REF_DEBUG - _Py_IncRefTotal(); + _Py_IncRefTotal(_PyInterpreterState_GET()); #endif return keys; } @@ -1530,7 +1530,7 @@ dictresize(PyInterpreterState *interp, PyDictObject *mp, // We can not use free_keys_object here because key's reference // are moved already. #ifdef Py_REF_DEBUG - _Py_DecRefTotal(); + _Py_DecRefTotal(_PyInterpreterState_GET()); #endif if (oldkeys == Py_EMPTY_KEYS) { oldkeys->dk_refcnt--; diff --git a/Objects/exceptions.c b/Objects/exceptions.c index d69f7400ca6042..6c9dfbd9b415cf 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -1421,7 +1421,12 @@ _PyExc_PrepReraiseStar(PyObject *orig, PyObject *excs) if (res < 0) { goto done; } - result = _PyExc_CreateExceptionGroup("", raised_list); + if (PyList_GET_SIZE(raised_list) > 1) { + result = _PyExc_CreateExceptionGroup("", raised_list); + } + else { + result = Py_NewRef(PyList_GetItem(raised_list, 0)); + } if (result == NULL) { goto done; } @@ -3591,10 +3596,6 @@ static struct static_exception static_exceptions[] = { int _PyExc_InitTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return 0; - } - for (size_t i=0; i < Py_ARRAY_LENGTH(static_exceptions); i++) { PyTypeObject *exc = static_exceptions[i].exc; if (_PyStaticType_InitBuiltin(exc) < 0) { @@ -3764,113 +3765,3 @@ _PyException_AddNote(PyObject *exc, PyObject *note) return res; } -/* Helper to do the equivalent of "raise X from Y" in C, but always using - * the current exception rather than passing one in. - * - * We currently limit this to *only* exceptions that use the BaseException - * tp_init and tp_new methods, since we can be reasonably sure we can wrap - * those correctly without losing data and without losing backwards - * compatibility. - * - * We also aim to rule out *all* exceptions that might be storing additional - * state, whether by having a size difference relative to BaseException, - * additional arguments passed in during construction or by having a - * non-empty instance dict. - * - * We need to be very careful with what we wrap, since changing types to - * a broader exception type would be backwards incompatible for - * existing codecs, and with different init or new method implementations - * may either not support instantiation with PyErr_Format or lose - * information when instantiated that way. - * - * XXX (ncoghlan): This could be made more comprehensive by exploiting the - * fact that exceptions are expected to support pickling. If more builtin - * exceptions (e.g. AttributeError) start to be converted to rich - * exceptions with additional attributes, that's probably a better approach - * to pursue over adding special cases for particular stateful subclasses. - * - * Returns a borrowed reference to the new exception (if any), NULL if the - * existing exception was left in place. - */ -PyObject * -_PyErr_TrySetFromCause(const char *format, ...) -{ - PyObject* msg_prefix; - PyObject *instance_args; - Py_ssize_t num_args, caught_type_size, base_exc_size; - va_list vargs; - int same_basic_size; - - PyObject *exc = PyErr_GetRaisedException(); - PyTypeObject *caught_type = Py_TYPE(exc); - /* Ensure type info indicates no extra state is stored at the C level - * and that the type can be reinstantiated using PyErr_Format - */ - caught_type_size = caught_type->tp_basicsize; - base_exc_size = _PyExc_BaseException.tp_basicsize; - same_basic_size = ( - caught_type_size == base_exc_size || - (_PyType_SUPPORTS_WEAKREFS(caught_type) && - (caught_type_size == base_exc_size + (Py_ssize_t)sizeof(PyObject *)) - ) - ); - if (caught_type->tp_init != (initproc)BaseException_init || - caught_type->tp_new != BaseException_new || - !same_basic_size || - caught_type->tp_itemsize != _PyExc_BaseException.tp_itemsize) { - /* We can't be sure we can wrap this safely, since it may contain - * more state than just the exception type. Accordingly, we just - * leave it alone. - */ - PyErr_SetRaisedException(exc); - return NULL; - } - - /* Check the args are empty or contain a single string */ - instance_args = ((PyBaseExceptionObject *)exc)->args; - num_args = PyTuple_GET_SIZE(instance_args); - if (num_args > 1 || - (num_args == 1 && - !PyUnicode_CheckExact(PyTuple_GET_ITEM(instance_args, 0)))) { - /* More than 1 arg, or the one arg we do have isn't a string - */ - PyErr_SetRaisedException(exc); - return NULL; - } - - /* Ensure the instance dict is also empty */ - if (!_PyObject_IsInstanceDictEmpty(exc)) { - /* While we could potentially copy a non-empty instance dictionary - * to the replacement exception, for now we take the more - * conservative path of leaving exceptions with attributes set - * alone. - */ - PyErr_SetRaisedException(exc); - return NULL; - } - - /* For exceptions that we can wrap safely, we chain the original - * exception to a new one of the exact same type with an - * error message that mentions the additional details and the - * original exception. - * - * It would be nice to wrap OSError and various other exception - * types as well, but that's quite a bit trickier due to the extra - * state potentially stored on OSError instances. - */ - va_start(vargs, format); - msg_prefix = PyUnicode_FromFormatV(format, vargs); - va_end(vargs); - if (msg_prefix == NULL) { - Py_DECREF(exc); - return NULL; - } - - PyErr_Format((PyObject*)Py_TYPE(exc), "%U (%s: %S)", - msg_prefix, Py_TYPE(exc)->tp_name, exc); - Py_DECREF(msg_prefix); - PyObject *new_exc = PyErr_GetRaisedException(); - PyException_SetCause(new_exc, exc); - PyErr_SetRaisedException(new_exc); - return new_exc; -} diff --git a/Objects/floatobject.c b/Objects/floatobject.c index d641311f1126cd..a694ddcd019ee8 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -12,7 +12,7 @@ #include "pycore_object.h" // _PyObject_Init() #include "pycore_pymath.h" // _PY_SHORT_FLOAT_REPR #include "pycore_pystate.h" // _PyInterpreterState_GET() -#include "pycore_structseq.h" // _PyStructSequence_FiniType() +#include "pycore_structseq.h" // _PyStructSequence_FiniBuiltin() #include <ctype.h> #include <float.h> @@ -1990,20 +1990,10 @@ _PyFloat_InitState(PyInterpreterState *interp) PyStatus _PyFloat_InitTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - - if (PyType_Ready(&PyFloat_Type) < 0) { - return _PyStatus_ERR("Can't initialize float type"); - } - /* Init float info */ - if (FloatInfoType.tp_name == NULL) { - if (_PyStructSequence_InitBuiltin(&FloatInfoType, - &floatinfo_desc) < 0) { - return _PyStatus_ERR("can't init float info type"); - } + if (_PyStructSequence_InitBuiltin(&FloatInfoType, + &floatinfo_desc) < 0) { + return _PyStatus_ERR("can't init float info type"); } return _PyStatus_OK(); @@ -2039,7 +2029,7 @@ void _PyFloat_FiniType(PyInterpreterState *interp) { if (_Py_IsMainInterpreter(interp)) { - _PyStructSequence_FiniType(&FloatInfoType); + _PyStructSequence_FiniBuiltin(&FloatInfoType); } } diff --git a/Objects/frameobject.c b/Objects/frameobject.c index b5051790bcdd11..c66d5f4d8e6689 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -17,7 +17,6 @@ static PyMemberDef frame_memberlist[] = { {"f_trace_lines", T_BOOL, OFF(f_trace_lines), 0}, - {"f_trace_opcodes", T_BOOL, OFF(f_trace_opcodes), 0}, {NULL} /* Sentinel */ }; @@ -104,24 +103,29 @@ frame_getback(PyFrameObject *f, void *closure) return res; } -// Given the index of the effective opcode, scan back to construct the oparg -// with EXTENDED_ARG. This only works correctly with *unquickened* code, -// obtained via a call to _PyCode_GetCode! -static unsigned int -get_arg(const _Py_CODEUNIT *codestr, Py_ssize_t i) +static PyObject * +frame_gettrace_opcodes(PyFrameObject *f, void *closure) { - _Py_CODEUNIT word; - unsigned int oparg = codestr[i].op.arg; - if (i >= 1 && (word = codestr[i-1]).op.code == EXTENDED_ARG) { - oparg |= word.op.arg << 8; - if (i >= 2 && (word = codestr[i-2]).op.code == EXTENDED_ARG) { - oparg |= word.op.arg << 16; - if (i >= 3 && (word = codestr[i-3]).op.code == EXTENDED_ARG) { - oparg |= word.op.arg << 24; - } - } + PyObject *result = f->f_trace_opcodes ? Py_True : Py_False; + return Py_NewRef(result); +} + +static int +frame_settrace_opcodes(PyFrameObject *f, PyObject* value, void *Py_UNUSED(ignored)) +{ + if (!PyBool_Check(value)) { + PyErr_SetString(PyExc_TypeError, + "attribute value type must be bool"); + return -1; + } + if (value == Py_True) { + f->f_trace_opcodes = 1; + _PyInterpreterState_GET()->f_opcode_trace_set = true; } - return oparg; + else { + f->f_trace_opcodes = 0; + } + return 0; } /* Model the evaluation stack, to determine which jumps @@ -299,56 +303,52 @@ mark_stacks(PyCodeObject *code_obj, int len) while (todo) { todo = 0; /* Scan instructions */ - for (i = 0; i < len; i++) { + for (i = 0; i < len;) { int64_t next_stack = stacks[i]; + opcode = _Py_GetBaseOpcode(code_obj, i); + int oparg = 0; + while (opcode == EXTENDED_ARG) { + oparg = (oparg << 8) | code[i].op.arg; + i++; + opcode = _Py_GetBaseOpcode(code_obj, i); + stacks[i] = next_stack; + } + int next_i = i + _PyOpcode_Caches[opcode] + 1; if (next_stack == UNINITIALIZED) { + i = next_i; continue; } - opcode = code[i].op.code; + oparg = (oparg << 8) | code[i].op.arg; switch (opcode) { - case JUMP_IF_FALSE_OR_POP: - case JUMP_IF_TRUE_OR_POP: case POP_JUMP_IF_FALSE: case POP_JUMP_IF_TRUE: { int64_t target_stack; - int j = get_arg(code, i); - j += i + 1; + int j = next_i + oparg; assert(j < len); - if (stacks[j] == UNINITIALIZED && j < i) { - todo = 1; - } - if (opcode == JUMP_IF_FALSE_OR_POP || - opcode == JUMP_IF_TRUE_OR_POP) - { - target_stack = next_stack; - next_stack = pop_value(next_stack); - } - else { - next_stack = pop_value(next_stack); - target_stack = next_stack; - } + next_stack = pop_value(next_stack); + target_stack = next_stack; assert(stacks[j] == UNINITIALIZED || stacks[j] == target_stack); stacks[j] = target_stack; - stacks[i+1] = next_stack; + stacks[next_i] = next_stack; break; } case SEND: - j = get_arg(code, i) + i + INLINE_CACHE_ENTRIES_SEND + 1; + j = oparg + i + INLINE_CACHE_ENTRIES_SEND + 1; assert(j < len); assert(stacks[j] == UNINITIALIZED || stacks[j] == next_stack); stacks[j] = next_stack; - stacks[i+1] = next_stack; + stacks[next_i] = next_stack; break; case JUMP_FORWARD: - j = get_arg(code, i) + i + 1; + j = oparg + i + 1; assert(j < len); assert(stacks[j] == UNINITIALIZED || stacks[j] == next_stack); stacks[j] = next_stack; break; case JUMP_BACKWARD: case JUMP_BACKWARD_NO_INTERRUPT: - j = i + 1 - get_arg(code, i); + j = i + 1 - oparg; assert(j >= 0); assert(j < len); if (stacks[j] == UNINITIALIZED && j < i) { @@ -357,24 +357,16 @@ mark_stacks(PyCodeObject *code_obj, int len) assert(stacks[j] == UNINITIALIZED || stacks[j] == next_stack); stacks[j] = next_stack; break; - case COMPARE_AND_BRANCH: - next_stack = pop_value(pop_value(next_stack)); - i++; - j = get_arg(code, i) + i + 1; - assert(j < len); - assert(stacks[j] == UNINITIALIZED || stacks[j] == next_stack); - stacks[j] = next_stack; - break; case GET_ITER: case GET_AITER: next_stack = push_value(pop_value(next_stack), Iterator); - stacks[i+1] = next_stack; + stacks[next_i] = next_stack; break; case FOR_ITER: { int64_t target_stack = push_value(next_stack, Object); - stacks[i+1] = target_stack; - j = get_arg(code, i) + 1 + INLINE_CACHE_ENTRIES_FOR_ITER + i; + stacks[next_i] = target_stack; + j = oparg + 1 + INLINE_CACHE_ENTRIES_FOR_ITER + i; assert(j < len); assert(stacks[j] == UNINITIALIZED || stacks[j] == target_stack); stacks[j] = target_stack; @@ -382,16 +374,16 @@ mark_stacks(PyCodeObject *code_obj, int len) } case END_ASYNC_FOR: next_stack = pop_value(pop_value(next_stack)); - stacks[i+1] = next_stack; + stacks[next_i] = next_stack; break; case PUSH_EXC_INFO: next_stack = push_value(next_stack, Except); - stacks[i+1] = next_stack; + stacks[next_i] = next_stack; break; case POP_EXCEPT: assert(top_of_stack(next_stack) == Except); next_stack = pop_value(next_stack); - stacks[i+1] = next_stack; + stacks[next_i] = next_stack; break; case RETURN_VALUE: assert(pop_value(next_stack) == EMPTY_STACK); @@ -407,57 +399,62 @@ mark_stacks(PyCodeObject *code_obj, int len) break; case PUSH_NULL: next_stack = push_value(next_stack, Null); - stacks[i+1] = next_stack; + stacks[next_i] = next_stack; break; case LOAD_GLOBAL: { - int j = get_arg(code, i); + int j = oparg; if (j & 1) { next_stack = push_value(next_stack, Null); } next_stack = push_value(next_stack, Object); - stacks[i+1] = next_stack; + stacks[next_i] = next_stack; break; } case LOAD_ATTR: { assert(top_of_stack(next_stack) == Object); - int j = get_arg(code, i); + int j = oparg; if (j & 1) { next_stack = pop_value(next_stack); next_stack = push_value(next_stack, Null); next_stack = push_value(next_stack, Object); } - stacks[i+1] = next_stack; + stacks[next_i] = next_stack; break; } case CALL: { - int args = get_arg(code, i); + int args = oparg; for (int j = 0; j < args+2; j++) { next_stack = pop_value(next_stack); } next_stack = push_value(next_stack, Object); - stacks[i+1] = next_stack; + stacks[next_i] = next_stack; break; } case SWAP: { - int n = get_arg(code, i); + int n = oparg; next_stack = stack_swap(next_stack, n); - stacks[i+1] = next_stack; + stacks[next_i] = next_stack; break; } case COPY: { - int n = get_arg(code, i); + int n = oparg; next_stack = push_value(next_stack, peek(next_stack, n)); - stacks[i+1] = next_stack; + stacks[next_i] = next_stack; break; } + case CACHE: + case RESERVED: + { + assert(0); + } default: { - int delta = PyCompile_OpcodeStackEffect(opcode, get_arg(code, i)); + int delta = PyCompile_OpcodeStackEffect(opcode, oparg); assert(delta != PY_INVALID_STACK_EFFECT); while (delta < 0) { next_stack = pop_value(next_stack); @@ -467,9 +464,10 @@ mark_stacks(PyCodeObject *code_obj, int len) next_stack = push_value(next_stack, Object); delta--; } - stacks[i+1] = next_stack; + stacks[next_i] = next_stack; } } + i = next_i; } /* Scan exception table */ unsigned char *start = (unsigned char *)PyBytes_AS_STRING(code_obj->co_exceptiontable); @@ -664,31 +662,43 @@ frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignore * In addition, jumps are forbidden when not tracing, * as this is a debugging feature. */ - switch(PyThreadState_GET()->tracing_what) { - case PyTrace_EXCEPTION: - PyErr_SetString(PyExc_ValueError, - "can only jump from a 'line' trace event"); - return -1; - case PyTrace_CALL: + int what_event = PyThreadState_GET()->what_event; + if (what_event < 0) { + PyErr_Format(PyExc_ValueError, + "f_lineno can only be set in a trace function"); + return -1; + } + switch (what_event) { + case PY_MONITORING_EVENT_PY_RESUME: + case PY_MONITORING_EVENT_JUMP: + case PY_MONITORING_EVENT_BRANCH: + case PY_MONITORING_EVENT_LINE: + case PY_MONITORING_EVENT_PY_YIELD: + /* Setting f_lineno is allowed for the above events */ + break; + case PY_MONITORING_EVENT_PY_START: PyErr_Format(PyExc_ValueError, "can't jump from the 'call' trace event of a new frame"); return -1; - case PyTrace_LINE: - break; - case PyTrace_RETURN: - if (state == FRAME_SUSPENDED) { - break; - } - /* fall through */ - default: + case PY_MONITORING_EVENT_CALL: + case PY_MONITORING_EVENT_C_RETURN: PyErr_SetString(PyExc_ValueError, + "can't jump during a call"); + return -1; + case PY_MONITORING_EVENT_PY_RETURN: + case PY_MONITORING_EVENT_PY_UNWIND: + case PY_MONITORING_EVENT_PY_THROW: + case PY_MONITORING_EVENT_RAISE: + case PY_MONITORING_EVENT_C_RAISE: + case PY_MONITORING_EVENT_INSTRUCTION: + case PY_MONITORING_EVENT_EXCEPTION_HANDLED: + PyErr_Format(PyExc_ValueError, "can only jump from a 'line' trace event"); return -1; - } - if (!f->f_trace) { - PyErr_Format(PyExc_ValueError, - "f_lineno can only be set by a trace function"); - return -1; + default: + PyErr_SetString(PyExc_SystemError, + "unexpected event type"); + return -1; } int new_lineno; @@ -821,6 +831,7 @@ frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignore start_stack = pop_value(start_stack); } /* Finally set the new lasti and return OK. */ + f->f_last_traced_line = new_lineno; f->f_lineno = 0; f->f_frame->prev_instr = _PyCode_CODE(f->f_frame->f_code) + best_addr; return 0; @@ -841,7 +852,10 @@ frame_settrace(PyFrameObject *f, PyObject* v, void *closure) if (v == Py_None) { v = NULL; } - Py_XSETREF(f->f_trace, Py_XNewRef(v)); + if (v != f->f_trace) { + Py_XSETREF(f->f_trace, Py_XNewRef(v)); + f->f_last_traced_line = -1; + } return 0; } @@ -856,6 +870,7 @@ static PyGetSetDef frame_getsetlist[] = { {"f_globals", (getter)frame_getglobals, NULL, NULL}, {"f_builtins", (getter)frame_getbuiltins, NULL, NULL}, {"f_code", (getter)frame_getcode, NULL, NULL}, + {"f_trace_opcodes", (getter)frame_gettrace_opcodes, (setter)frame_settrace_opcodes, NULL}, {0} }; @@ -1041,6 +1056,7 @@ _PyFrame_New_NoTrack(PyCodeObject *code) f->f_trace_opcodes = 0; f->f_fast_as_locals = 0; f->f_lineno = 0; + f->f_last_traced_line = -1; return f; } diff --git a/Objects/funcobject.c b/Objects/funcobject.c index ce5d7bda32c032..78c1144afca2eb 100644 --- a/Objects/funcobject.c +++ b/Objects/funcobject.c @@ -942,7 +942,7 @@ functools_wraps(PyObject *wrapper, PyObject *wrapped) class C: @classmethod - def f(cls, arg1, arg2, ...): + def f(cls, arg1, arg2, argN): ... It can be called either on the class (e.g. C.f()) or on an instance @@ -1066,7 +1066,7 @@ To declare a class method, use this idiom:\n\ \n\ class C:\n\ @classmethod\n\ - def f(cls, arg1, arg2, ...):\n\ + def f(cls, arg1, arg2, argN):\n\ ...\n\ \n\ It can be called either on the class (e.g. C.f()) or on an instance\n\ @@ -1138,7 +1138,7 @@ PyClassMethod_New(PyObject *callable) class C: @staticmethod - def f(arg1, arg2, ...): + def f(arg1, arg2, argN): ... It can be called either on the class (e.g. C.f()) or on an instance @@ -1260,7 +1260,7 @@ To declare a static method, use this idiom:\n\ \n\ class C:\n\ @staticmethod\n\ - def f(arg1, arg2, ...):\n\ + def f(arg1, arg2, argN):\n\ ...\n\ \n\ It can be called either on the class (e.g. C.f()) or on an instance\n\ diff --git a/Objects/listobject.c b/Objects/listobject.c index 1a210e77d55c29..f1edfb3a9a039d 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -4,6 +4,7 @@ #include "pycore_abstract.h" // _PyIndex_Check() #include "pycore_interp.h" // PyInterpreterState.list #include "pycore_list.h" // struct _Py_list_state, _PyListIterObject +#include "pycore_long.h" // _PyLong_DigitCount #include "pycore_object.h" // _PyObject_GC_TRACK() #include "pycore_tuple.h" // _PyTuple_FromArray() #include <stddef.h> @@ -2144,24 +2145,21 @@ unsafe_latin_compare(PyObject *v, PyObject *w, MergeState *ms) static int unsafe_long_compare(PyObject *v, PyObject *w, MergeState *ms) { - PyLongObject *vl, *wl; sdigit v0, w0; int res; + PyLongObject *vl, *wl; + intptr_t v0, w0; + int res; /* Modified from Objects/longobject.c:long_compare, assuming: */ assert(Py_IS_TYPE(v, &PyLong_Type)); assert(Py_IS_TYPE(w, &PyLong_Type)); - assert(Py_ABS(Py_SIZE(v)) <= 1); - assert(Py_ABS(Py_SIZE(w)) <= 1); + assert(_PyLong_IsCompact((PyLongObject *)v)); + assert(_PyLong_IsCompact((PyLongObject *)w)); vl = (PyLongObject*)v; wl = (PyLongObject*)w; - v0 = Py_SIZE(vl) == 0 ? 0 : (sdigit)vl->long_value.ob_digit[0]; - w0 = Py_SIZE(wl) == 0 ? 0 : (sdigit)wl->long_value.ob_digit[0]; - - if (Py_SIZE(vl) < 0) - v0 = -v0; - if (Py_SIZE(wl) < 0) - w0 = -w0; + v0 = _PyLong_CompactValue(vl); + w0 = _PyLong_CompactValue(wl); res = v0 < w0; assert(res == PyObject_RichCompareBool(v, w, Py_LT)); @@ -2359,7 +2357,7 @@ list_sort_impl(PyListObject *self, PyObject *keyfunc, int reverse) if (keys_are_all_same_type) { if (key_type == &PyLong_Type && ints_are_bounded && - Py_ABS(Py_SIZE(key)) > 1) { + !_PyLong_IsCompact((PyLongObject *)key)) { ints_are_bounded = 0; } diff --git a/Objects/longobject.c b/Objects/longobject.c index 51655cd0bad9ec..de043488d7a173 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -6,10 +6,10 @@ #include "pycore_bitutils.h" // _Py_popcount32() #include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_long.h" // _Py_SmallInts -#include "pycore_object.h" // _PyObject_InitVar() +#include "pycore_object.h" // _PyObject_Init() #include "pycore_pystate.h" // _Py_IsMainInterpreter() #include "pycore_runtime.h" // _PY_NSMALLPOSINTS -#include "pycore_structseq.h" // _PyStructSequence_FiniType() +#include "pycore_structseq.h" // _PyStructSequence_FiniBuiltin() #include <ctype.h> #include <float.h> @@ -22,16 +22,7 @@ class int "PyObject *" "&PyLong_Type" [clinic start generated code]*/ /*[clinic end generated code: output=da39a3ee5e6b4b0d input=ec0275e3422a36e3]*/ -/* Is this PyLong of size 1, 0 or -1? */ -#define IS_MEDIUM_VALUE(x) (((size_t)Py_SIZE(x)) + 1U < 3U) - -/* convert a PyLong of size 1, 0 or -1 to a C integer */ -static inline stwodigits -medium_value(PyLongObject *x) -{ - assert(IS_MEDIUM_VALUE(x)); - return ((stwodigits)Py_SIZE(x)) * x->long_value.ob_digit[0]; -} +#define medium_value(x) ((stwodigits)_PyLong_CompactValue(x)) #define IS_SMALL_INT(ival) (-_PY_NSMALLNEGINTS <= (ival) && (ival) < _PY_NSMALLPOSINTS) #define IS_SMALL_UINT(ival) ((ival) < _PY_NSMALLPOSINTS) @@ -61,14 +52,13 @@ static PyObject * get_small_int(sdigit ival) { assert(IS_SMALL_INT(ival)); - PyObject *v = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + ival]; - return Py_NewRef(v); + return (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + ival]; } static PyLongObject * maybe_small_long(PyLongObject *v) { - if (v && IS_MEDIUM_VALUE(v)) { + if (v && _PyLong_IsCompact(v)) { stwodigits ival = medium_value(v); if (IS_SMALL_INT(ival)) { _Py_DECREF_INT(v); @@ -126,13 +116,18 @@ maybe_small_long(PyLongObject *v) static PyLongObject * long_normalize(PyLongObject *v) { - Py_ssize_t j = Py_ABS(Py_SIZE(v)); + Py_ssize_t j = _PyLong_DigitCount(v); Py_ssize_t i = j; while (i > 0 && v->long_value.ob_digit[i-1] == 0) --i; if (i != j) { - Py_SET_SIZE(v, (Py_SIZE(v) < 0) ? -(i) : i); + if (i == 0) { + _PyLong_SetSignAndDigitCount(v, 0, 0); + } + else { + _PyLong_SetDigitCount(v, i); + } } return v; } @@ -146,6 +141,7 @@ long_normalize(PyLongObject *v) PyLongObject * _PyLong_New(Py_ssize_t size) { + assert(size >= 0); PyLongObject *result; if (size > (Py_ssize_t)MAX_LONG_DIGITS) { PyErr_SetString(PyExc_OverflowError, @@ -157,8 +153,8 @@ _PyLong_New(Py_ssize_t size) Py_ssize_t ndigits = size ? size : 1; /* Number of bytes needed is: offsetof(PyLongObject, ob_digit) + sizeof(digit)*size. Previous incarnations of this code used - sizeof(PyVarObject) instead of the offsetof, but this risks being - incorrect in the presence of padding between the PyVarObject header + sizeof() instead of the offsetof, but this risks being + incorrect in the presence of padding between the header and the digits. */ result = PyObject_Malloc(offsetof(PyLongObject, long_value.ob_digit) + ndigits*sizeof(digit)); @@ -166,34 +162,41 @@ _PyLong_New(Py_ssize_t size) PyErr_NoMemory(); return NULL; } - _PyObject_InitVar((PyVarObject*)result, &PyLong_Type, size); + _PyLong_SetSignAndDigitCount(result, size != 0, size); + _PyObject_Init((PyObject*)result, &PyLong_Type); + return result; +} + +PyLongObject * +_PyLong_FromDigits(int negative, Py_ssize_t digit_count, digit *digits) +{ + assert(digit_count >= 0); + if (digit_count == 0) { + return (PyLongObject *)Py_NewRef(_PyLong_GetZero()); + } + PyLongObject *result = _PyLong_New(digit_count); + if (result == NULL) { + PyErr_NoMemory(); + return NULL; + } + _PyLong_SetSignAndDigitCount(result, negative?-1:1, digit_count); + memcpy(result->long_value.ob_digit, digits, digit_count * sizeof(digit)); return result; } PyObject * _PyLong_Copy(PyLongObject *src) { - PyLongObject *result; - Py_ssize_t i; - assert(src != NULL); - i = Py_SIZE(src); - if (i < 0) - i = -(i); - if (i < 2) { + + if (_PyLong_IsCompact(src)) { stwodigits ival = medium_value(src); if (IS_SMALL_INT(ival)) { return get_small_int((sdigit)ival); } } - result = _PyLong_New(i); - if (result != NULL) { - Py_SET_SIZE(result, Py_SIZE(src)); - while (--i >= 0) { - result->long_value.ob_digit[i] = src->long_value.ob_digit[i]; - } - } - return (PyObject *)result; + Py_ssize_t size = _PyLong_DigitCount(src); + return (PyObject *)_PyLong_FromDigits(_PyLong_IsNegative(src), size, src->long_value.ob_digit); } static PyObject * @@ -207,9 +210,9 @@ _PyLong_FromMedium(sdigit x) PyErr_NoMemory(); return NULL; } - Py_ssize_t sign = x < 0 ? -1: 1; digit abs_x = x < 0 ? -x : x; - _PyObject_InitVar((PyVarObject*)v, &PyLong_Type, sign); + _PyLong_SetSignAndDigitCount(v, x<0?-1:1, 1); + _PyObject_Init((PyObject*)v, &PyLong_Type); v->long_value.ob_digit[0] = abs_x; return (PyObject*)v; } @@ -242,7 +245,7 @@ _PyLong_FromLarge(stwodigits ival) PyLongObject *v = _PyLong_New(ndigits); if (v != NULL) { digit *p = v->long_value.ob_digit; - Py_SET_SIZE(v, ndigits * sign); + _PyLong_SetSignAndDigitCount(v, sign, ndigits); t = abs_ival; while (t) { *p++ = Py_SAFE_DOWNCAST( @@ -267,38 +270,6 @@ _PyLong_FromSTwoDigits(stwodigits x) return _PyLong_FromLarge(x); } -int -_PyLong_AssignValue(PyObject **target, Py_ssize_t value) -{ - PyObject *old = *target; - if (IS_SMALL_INT(value)) { - *target = get_small_int(Py_SAFE_DOWNCAST(value, Py_ssize_t, sdigit)); - Py_XDECREF(old); - return 0; - } - else if (old != NULL && PyLong_CheckExact(old) && - Py_REFCNT(old) == 1 && Py_SIZE(old) == 1 && - (size_t)value <= PyLong_MASK) - { - // Mutate in place if there are no other references the old - // object. This avoids an allocation in a common case. - // Since the primary use-case is iterating over ranges, which - // are typically positive, only do this optimization - // for positive integers (for now). - ((PyLongObject *)old)->long_value.ob_digit[0] = - Py_SAFE_DOWNCAST(value, Py_ssize_t, digit); - return 0; - } - else { - *target = PyLong_FromSsize_t(value); - Py_XDECREF(old); - if (*target == NULL) { - return -1; - } - return 0; - } -} - /* If a freshly-allocated int is already shared, it must be a small integer, so negating it must go to PyLong_FromLong */ Py_LOCAL_INLINE(void) @@ -308,7 +279,7 @@ _PyLong_Negate(PyLongObject **x_p) x = (PyLongObject *)*x_p; if (Py_REFCNT(x) == 1) { - Py_SET_SIZE(x, -Py_SIZE(x)); + _PyLong_FlipSign(x); return; } @@ -347,7 +318,7 @@ PyLong_FromLong(long ival) v = _PyLong_New(ndigits); if (v != NULL) { digit *p = v->long_value.ob_digit; - Py_SET_SIZE(v, ival < 0 ? -ndigits : ndigits); + _PyLong_SetSignAndDigitCount(v, ival < 0 ? -1 : 1, ndigits); t = abs_ival; while (t) { *p++ = (digit)(t & PyLong_MASK); @@ -457,7 +428,7 @@ PyLong_FromDouble(double dval) frac = ldexp(frac, PyLong_SHIFT); } if (neg) { - Py_SET_SIZE(v, -(Py_SIZE(v))); + _PyLong_FlipSign(v); } return (PyObject *)v; } @@ -510,27 +481,22 @@ PyLong_AsLongAndOverflow(PyObject *vv, int *overflow) return -1; do_decref = 1; } - - res = -1; - i = Py_SIZE(v); - - switch (i) { - case -1: - res = -(sdigit)v->long_value.ob_digit[0]; - break; - case 0: - res = 0; - break; - case 1: - res = v->long_value.ob_digit[0]; - break; - default: - sign = 1; - x = 0; - if (i < 0) { - sign = -1; - i = -(i); + if (_PyLong_IsCompact(v)) { +#if SIZEOF_LONG < SIZEOF_VOID_P + intptr_t tmp = _PyLong_CompactValue(v); + res = (long)tmp; + if (res != tmp) { + *overflow = tmp < 0 ? -1 : 1; } +#else + res = _PyLong_CompactValue(v); +#endif + } + else { + res = -1; + i = _PyLong_DigitCount(v); + sign = _PyLong_NonCompactSign(v); + x = 0; while (--i >= 0) { prev = x; x = (x << PyLong_SHIFT) | v->long_value.ob_digit[i]; @@ -540,8 +506,8 @@ PyLong_AsLongAndOverflow(PyObject *vv, int *overflow) } } /* Haven't lost any bits, but casting to long requires extra - * care (see comment above). - */ + * care (see comment above). + */ if (x <= (unsigned long)LONG_MAX) { res = (long)x * sign; } @@ -615,18 +581,12 @@ PyLong_AsSsize_t(PyObject *vv) { } v = (PyLongObject *)vv; - i = Py_SIZE(v); - switch (i) { - case -1: return -(sdigit)v->long_value.ob_digit[0]; - case 0: return 0; - case 1: return v->long_value.ob_digit[0]; + if (_PyLong_IsCompact(v)) { + return _PyLong_CompactValue(v); } - sign = 1; + i = _PyLong_DigitCount(v); + sign = _PyLong_NonCompactSign(v); x = 0; - if (i < 0) { - sign = -1; - i = -(i); - } while (--i >= 0) { prev = x; x = (x << PyLong_SHIFT) | v->long_value.ob_digit[i]; @@ -670,28 +630,37 @@ PyLong_AsUnsignedLong(PyObject *vv) } v = (PyLongObject *)vv; - i = Py_SIZE(v); - x = 0; - if (i < 0) { + if (_PyLong_IsNonNegativeCompact(v)) { +#if SIZEOF_LONG < SIZEOF_VOID_P + intptr_t tmp = _PyLong_CompactValue(v); + unsigned long res = (unsigned long)tmp; + if (res != tmp) { + goto overflow; + } +#else + return _PyLong_CompactValue(v); +#endif + } + if (_PyLong_IsNegative(v)) { PyErr_SetString(PyExc_OverflowError, "can't convert negative value to unsigned int"); return (unsigned long) -1; } - switch (i) { - case 0: return 0; - case 1: return v->long_value.ob_digit[0]; - } + i = _PyLong_DigitCount(v); + x = 0; while (--i >= 0) { prev = x; x = (x << PyLong_SHIFT) | v->long_value.ob_digit[i]; if ((x >> PyLong_SHIFT) != prev) { - PyErr_SetString(PyExc_OverflowError, - "Python int too large to convert " - "to C unsigned long"); - return (unsigned long) -1; + goto overflow; } } return x; +overflow: + PyErr_SetString(PyExc_OverflowError, + "Python int too large to convert " + "to C unsigned long"); + return (unsigned long) -1; } /* Get a C size_t from an int object. Returns (size_t)-1 and sets @@ -714,17 +683,16 @@ PyLong_AsSize_t(PyObject *vv) } v = (PyLongObject *)vv; - i = Py_SIZE(v); - x = 0; - if (i < 0) { + if (_PyLong_IsNonNegativeCompact(v)) { + return _PyLong_CompactValue(v); + } + if (_PyLong_IsNegative(v)) { PyErr_SetString(PyExc_OverflowError, "can't convert negative value to size_t"); return (size_t) -1; } - switch (i) { - case 0: return 0; - case 1: return v->long_value.ob_digit[0]; - } + i = _PyLong_DigitCount(v); + x = 0; while (--i >= 0) { prev = x; x = (x << PyLong_SHIFT) | v->long_value.ob_digit[i]; @@ -746,24 +714,18 @@ _PyLong_AsUnsignedLongMask(PyObject *vv) PyLongObject *v; unsigned long x; Py_ssize_t i; - int sign; if (vv == NULL || !PyLong_Check(vv)) { PyErr_BadInternalCall(); return (unsigned long) -1; } v = (PyLongObject *)vv; - i = Py_SIZE(v); - switch (i) { - case 0: return 0; - case 1: return v->long_value.ob_digit[0]; + if (_PyLong_IsCompact(v)) { + return (unsigned long)_PyLong_CompactValue(v); } - sign = 1; + i = _PyLong_DigitCount(v); + int sign = _PyLong_NonCompactSign(v); x = 0; - if (i < 0) { - sign = -1; - i = -i; - } while (--i >= 0) { x = (x << PyLong_SHIFT) | v->long_value.ob_digit[i]; } @@ -801,8 +763,10 @@ _PyLong_Sign(PyObject *vv) assert(v != NULL); assert(PyLong_Check(v)); - - return Py_SIZE(v) == 0 ? 0 : (Py_SIZE(v) < 0 ? -1 : 1); + if (_PyLong_IsCompact(v)) { + return _PyLong_CompactSign(v); + } + return _PyLong_NonCompactSign(v); } static int @@ -825,7 +789,7 @@ _PyLong_NumBits(PyObject *vv) assert(v != NULL); assert(PyLong_Check(v)); - ndigits = Py_ABS(Py_SIZE(v)); + ndigits = _PyLong_DigitCount(v); assert(ndigits == 0 || v->long_value.ob_digit[ndigits - 1] != 0); if (ndigits > 0) { digit msd = v->long_value.ob_digit[ndigits - 1]; @@ -952,7 +916,11 @@ _PyLong_FromByteArray(const unsigned char* bytes, size_t n, } } - Py_SET_SIZE(v, is_signed ? -idigit : idigit); + int sign = is_signed ? -1: 1; + if (idigit == 0) { + sign = 0; + } + _PyLong_SetSignAndDigitCount(v, sign, idigit); return (PyObject *)maybe_small_long(long_normalize(v)); } @@ -962,7 +930,7 @@ _PyLong_AsByteArray(PyLongObject* v, int little_endian, int is_signed) { Py_ssize_t i; /* index into v->long_value.ob_digit */ - Py_ssize_t ndigits; /* |v->ob_size| */ + Py_ssize_t ndigits; /* number of digits */ twodigits accum; /* sliding register */ unsigned int accumbits; /* # bits in accum */ int do_twos_comp; /* store 2's-comp? is_signed and v < 0 */ @@ -973,8 +941,8 @@ _PyLong_AsByteArray(PyLongObject* v, assert(v != NULL && PyLong_Check(v)); - if (Py_SIZE(v) < 0) { - ndigits = -(Py_SIZE(v)); + ndigits = _PyLong_DigitCount(v); + if (_PyLong_IsNegative(v)) { if (!is_signed) { PyErr_SetString(PyExc_OverflowError, "can't convert negative int to unsigned"); @@ -983,7 +951,6 @@ _PyLong_AsByteArray(PyLongObject* v, do_twos_comp = 1; } else { - ndigits = Py_SIZE(v); do_twos_comp = 0; } @@ -1114,10 +1081,12 @@ PyLong_AsVoidPtr(PyObject *vv) #if SIZEOF_VOID_P <= SIZEOF_LONG long x; - if (PyLong_Check(vv) && _PyLong_Sign(vv) < 0) + if (PyLong_Check(vv) && _PyLong_IsNegative((PyLongObject *)vv)) { x = PyLong_AsLong(vv); - else + } + else { x = PyLong_AsUnsignedLong(vv); + } #else #if SIZEOF_LONG_LONG < SIZEOF_VOID_P @@ -1125,10 +1094,12 @@ PyLong_AsVoidPtr(PyObject *vv) #endif long long x; - if (PyLong_Check(vv) && _PyLong_Sign(vv) < 0) + if (PyLong_Check(vv) && _PyLong_IsNegative((PyLongObject *)vv)) { x = PyLong_AsLongLong(vv); - else + } + else { x = PyLong_AsUnsignedLongLong(vv); + } #endif /* SIZEOF_VOID_P <= SIZEOF_LONG */ @@ -1174,7 +1145,7 @@ PyLong_FromLongLong(long long ival) v = _PyLong_New(ndigits); if (v != NULL) { digit *p = v->long_value.ob_digit; - Py_SET_SIZE(v, ival < 0 ? -ndigits : ndigits); + _PyLong_SetSignAndDigitCount(v, ival < 0 ? -1 : 1, ndigits); t = abs_ival; while (t) { *p++ = (digit)(t & PyLong_MASK); @@ -1217,7 +1188,7 @@ PyLong_FromSsize_t(Py_ssize_t ival) v = _PyLong_New(ndigits); if (v != NULL) { digit *p = v->long_value.ob_digit; - Py_SET_SIZE(v, negative ? -ndigits : ndigits); + _PyLong_SetSignAndDigitCount(v, negative ? -1 : 1, ndigits); t = abs_ival; while (t) { *p++ = (digit)(t & PyLong_MASK); @@ -1253,18 +1224,11 @@ PyLong_AsLongLong(PyObject *vv) do_decref = 1; } - res = 0; - switch(Py_SIZE(v)) { - case -1: - bytes = -(sdigit)v->long_value.ob_digit[0]; - break; - case 0: - bytes = 0; - break; - case 1: - bytes = v->long_value.ob_digit[0]; - break; - default: + if (_PyLong_IsCompact(v)) { + res = 0; + bytes = _PyLong_CompactValue(v); + } + else { res = _PyLong_AsByteArray((PyLongObject *)v, (unsigned char *)&bytes, SIZEOF_LONG_LONG, PY_LITTLE_ENDIAN, 1); } @@ -1299,13 +1263,14 @@ PyLong_AsUnsignedLongLong(PyObject *vv) } v = (PyLongObject*)vv; - switch(Py_SIZE(v)) { - case 0: return 0; - case 1: return v->long_value.ob_digit[0]; + if (_PyLong_IsNonNegativeCompact(v)) { + res = 0; + bytes = _PyLong_CompactValue(v); } - - res = _PyLong_AsByteArray((PyLongObject *)vv, (unsigned char *)&bytes, + else { + res = _PyLong_AsByteArray((PyLongObject *)vv, (unsigned char *)&bytes, SIZEOF_LONG_LONG, PY_LITTLE_ENDIAN, 0); + } /* Plan 9 can't handle long long in ? : expressions */ if (res < 0) @@ -1330,17 +1295,12 @@ _PyLong_AsUnsignedLongLongMask(PyObject *vv) return (unsigned long long) -1; } v = (PyLongObject *)vv; - switch(Py_SIZE(v)) { - case 0: return 0; - case 1: return v->long_value.ob_digit[0]; + if (_PyLong_IsCompact(v)) { + return (unsigned long long)(signed long long)_PyLong_CompactValue(v); } - i = Py_SIZE(v); - sign = 1; + i = _PyLong_DigitCount(v); + sign = _PyLong_NonCompactSign(v); x = 0; - if (i < 0) { - sign = -1; - i = -i; - } while (--i >= 0) { x = (x << PyLong_SHIFT) | v->long_value.ob_digit[i]; } @@ -1407,32 +1367,19 @@ PyLong_AsLongLongAndOverflow(PyObject *vv, int *overflow) return -1; do_decref = 1; } - - res = -1; - i = Py_SIZE(v); - - switch (i) { - case -1: - res = -(sdigit)v->long_value.ob_digit[0]; - break; - case 0: - res = 0; - break; - case 1: - res = v->long_value.ob_digit[0]; - break; - default: - sign = 1; + if (_PyLong_IsCompact(v)) { + res = _PyLong_CompactValue(v); + } + else { + i = _PyLong_DigitCount(v); + sign = _PyLong_NonCompactSign(v); x = 0; - if (i < 0) { - sign = -1; - i = -(i); - } while (--i >= 0) { prev = x; x = (x << PyLong_SHIFT) + v->long_value.ob_digit[i]; if ((x >> PyLong_SHIFT) != prev) { *overflow = sign; + res = -1; goto exit; } } @@ -1447,7 +1394,7 @@ PyLong_AsLongLongAndOverflow(PyObject *vv, int *overflow) } else { *overflow = sign; - /* res is already set to -1 */ + res = -1; } } exit: @@ -1462,7 +1409,7 @@ _PyLong_UnsignedShort_Converter(PyObject *obj, void *ptr) { unsigned long uval; - if (PyLong_Check(obj) && _PyLong_Sign(obj) < 0) { + if (PyLong_Check(obj) && _PyLong_IsNegative((PyLongObject *)obj)) { PyErr_SetString(PyExc_ValueError, "value must be positive"); return 0; } @@ -1484,7 +1431,7 @@ _PyLong_UnsignedInt_Converter(PyObject *obj, void *ptr) { unsigned long uval; - if (PyLong_Check(obj) && _PyLong_Sign(obj) < 0) { + if (PyLong_Check(obj) && _PyLong_IsNegative((PyLongObject *)obj)) { PyErr_SetString(PyExc_ValueError, "value must be positive"); return 0; } @@ -1506,7 +1453,7 @@ _PyLong_UnsignedLong_Converter(PyObject *obj, void *ptr) { unsigned long uval; - if (PyLong_Check(obj) && _PyLong_Sign(obj) < 0) { + if (PyLong_Check(obj) && _PyLong_IsNegative((PyLongObject *)obj)) { PyErr_SetString(PyExc_ValueError, "value must be positive"); return 0; } @@ -1523,7 +1470,7 @@ _PyLong_UnsignedLongLong_Converter(PyObject *obj, void *ptr) { unsigned long long uval; - if (PyLong_Check(obj) && _PyLong_Sign(obj) < 0) { + if (PyLong_Check(obj) && _PyLong_IsNegative((PyLongObject *)obj)) { PyErr_SetString(PyExc_ValueError, "value must be positive"); return 0; } @@ -1540,7 +1487,7 @@ _PyLong_Size_t_Converter(PyObject *obj, void *ptr) { size_t uval; - if (PyLong_Check(obj) && _PyLong_Sign(obj) < 0) { + if (PyLong_Check(obj) && _PyLong_IsNegative((PyLongObject *)obj)) { PyErr_SetString(PyExc_ValueError, "value must be positive"); return 0; } @@ -1694,7 +1641,7 @@ inplace_divrem1(digit *pout, digit *pin, Py_ssize_t size, digit n) static PyLongObject * divrem1(PyLongObject *a, digit n, digit *prem) { - const Py_ssize_t size = Py_ABS(Py_SIZE(a)); + const Py_ssize_t size = _PyLong_DigitCount(a); PyLongObject *z; assert(n > 0 && n <= PyLong_MASK); @@ -1726,7 +1673,7 @@ inplace_rem1(digit *pin, Py_ssize_t size, digit n) static PyLongObject * rem1(PyLongObject *a, digit n) { - const Py_ssize_t size = Py_ABS(Py_SIZE(a)); + const Py_ssize_t size = _PyLong_DigitCount(a); assert(n > 0 && n <= PyLong_MASK); return (PyLongObject *)PyLong_FromLong( @@ -1824,8 +1771,8 @@ long_to_decimal_string_internal(PyObject *aa, PyErr_BadInternalCall(); return -1; } - size_a = Py_ABS(Py_SIZE(a)); - negative = Py_SIZE(a) < 0; + size_a = _PyLong_DigitCount(a); + negative = _PyLong_IsNegative(a); /* quick and dirty pre-check for overflowing the decimal digit limit, based on the inequality 10/3 >= log2(10) @@ -2055,8 +2002,8 @@ long_format_binary(PyObject *aa, int base, int alternate, PyErr_BadInternalCall(); return -1; } - size_a = Py_ABS(Py_SIZE(a)); - negative = Py_SIZE(a) < 0; + size_a = _PyLong_DigitCount(a); + negative = _PyLong_IsNegative(a); /* Compute a rough upper bound for the length of the string */ switch (base) { @@ -2532,7 +2479,7 @@ long_from_non_binary_base(const char *start, const char *end, Py_ssize_t digits, *res = NULL; return 0; } - Py_SET_SIZE(z, 0); + _PyLong_SetSignAndDigitCount(z, 0, 0); /* `convwidth` consecutive input digits are treated as a single * digit in base `convmultmax`. @@ -2572,7 +2519,7 @@ long_from_non_binary_base(const char *start, const char *end, Py_ssize_t digits, /* Multiply z by convmult, and add c. */ pz = z->long_value.ob_digit; - pzstop = pz + Py_SIZE(z); + pzstop = pz + _PyLong_DigitCount(z); for (; pz < pzstop; ++pz) { c += (twodigits)*pz * convmult; *pz = (digit)(c & PyLong_MASK); @@ -2581,14 +2528,15 @@ long_from_non_binary_base(const char *start, const char *end, Py_ssize_t digits, /* carry off the current end? */ if (c) { assert(c < PyLong_BASE); - if (Py_SIZE(z) < size_z) { + if (_PyLong_DigitCount(z) < size_z) { *pz = (digit)c; - Py_SET_SIZE(z, Py_SIZE(z) + 1); + assert(!_PyLong_IsNegative(z)); + _PyLong_SetSignAndDigitCount(z, 1, _PyLong_DigitCount(z) + 1); } else { PyLongObject *tmp; /* Extremely rare. Get more space. */ - assert(Py_SIZE(z) == size_z); + assert(_PyLong_DigitCount(z) == size_z); tmp = _PyLong_New(size_z + 1); if (tmp == NULL) { Py_DECREF(z); @@ -2790,7 +2738,7 @@ PyLong_FromString(const char *str, char **pend, int base) /* reset the base to 0, else the exception message doesn't make too much sense */ base = 0; - if (Py_SIZE(z) != 0) { + if (!_PyLong_IsZero(z)) { goto onError; } /* there might still be other problems, therefore base @@ -2799,7 +2747,7 @@ PyLong_FromString(const char *str, char **pend, int base) /* Set sign and normalize */ if (sign < 0) { - Py_SET_SIZE(z, -(Py_SIZE(z))); + _PyLong_FlipSign(z); } long_normalize(z); z = maybe_small_long(z); @@ -2891,7 +2839,7 @@ static int long_divrem(PyLongObject *a, PyLongObject *b, PyLongObject **pdiv, PyLongObject **prem) { - Py_ssize_t size_a = Py_ABS(Py_SIZE(a)), size_b = Py_ABS(Py_SIZE(b)); + Py_ssize_t size_a = _PyLong_DigitCount(a), size_b = _PyLong_DigitCount(b); PyLongObject *z; if (size_b == 0) { @@ -2932,14 +2880,14 @@ long_divrem(PyLongObject *a, PyLongObject *b, The quotient z has the sign of a*b; the remainder r has the sign of a, so a = b*z + r. */ - if ((Py_SIZE(a) < 0) != (Py_SIZE(b) < 0)) { + if ((_PyLong_IsNegative(a)) != (_PyLong_IsNegative(b))) { _PyLong_Negate(&z); if (z == NULL) { Py_CLEAR(*prem); return -1; } } - if (Py_SIZE(a) < 0 && Py_SIZE(*prem) != 0) { + if (_PyLong_IsNegative(a) && !_PyLong_IsZero(*prem)) { _PyLong_Negate(prem); if (*prem == NULL) { Py_DECREF(z); @@ -2956,7 +2904,7 @@ long_divrem(PyLongObject *a, PyLongObject *b, static int long_rem(PyLongObject *a, PyLongObject *b, PyLongObject **prem) { - Py_ssize_t size_a = Py_ABS(Py_SIZE(a)), size_b = Py_ABS(Py_SIZE(b)); + Py_ssize_t size_a = _PyLong_DigitCount(a), size_b = _PyLong_DigitCount(b); if (size_b == 0) { PyErr_SetString(PyExc_ZeroDivisionError, @@ -2983,7 +2931,7 @@ long_rem(PyLongObject *a, PyLongObject *b, PyLongObject **prem) return -1; } /* Set the sign. */ - if (Py_SIZE(a) < 0 && Py_SIZE(*prem) != 0) { + if (_PyLong_IsNegative(a) && !_PyLong_IsZero(*prem)) { _PyLong_Negate(prem); if (*prem == NULL) { Py_CLEAR(*prem); @@ -2994,7 +2942,7 @@ long_rem(PyLongObject *a, PyLongObject *b, PyLongObject **prem) } /* Unsigned int division with remainder -- the algorithm. The arguments v1 - and w1 should satisfy 2 <= Py_ABS(Py_SIZE(w1)) <= Py_ABS(Py_SIZE(v1)). */ + and w1 should satisfy 2 <= _PyLong_DigitCount(w1) <= _PyLong_DigitCount(v1). */ static PyLongObject * x_divrem(PyLongObject *v1, PyLongObject *w1, PyLongObject **prem) @@ -3014,8 +2962,8 @@ x_divrem(PyLongObject *v1, PyLongObject *w1, PyLongObject **prem) that won't overflow a digit. */ /* allocate space; w will also be used to hold the final remainder */ - size_v = Py_ABS(Py_SIZE(v1)); - size_w = Py_ABS(Py_SIZE(w1)); + size_v = _PyLong_DigitCount(v1); + size_w = _PyLong_DigitCount(w1); assert(size_v >= size_w && size_w >= 2); /* Assert checks by div() */ v = _PyLong_New(size_v+1); if (v == NULL) { @@ -3154,7 +3102,7 @@ _PyLong_Frexp(PyLongObject *a, Py_ssize_t *e) multiple of 4, rounding ties to a multiple of 8. */ static const int half_even_correction[8] = {0, -1, -2, 1, 0, -1, 2, 1}; - a_size = Py_ABS(Py_SIZE(a)); + a_size = _PyLong_DigitCount(a); if (a_size == 0) { /* Special case for 0: significand 0.0, exponent 0. */ *e = 0; @@ -3240,7 +3188,7 @@ _PyLong_Frexp(PyLongObject *a, Py_ssize_t *e) } *e = a_bits; - return Py_SIZE(a) < 0 ? -dx : dx; + return _PyLong_IsNegative(a) ? -dx : dx; overflow: /* exponent > PY_SSIZE_T_MAX */ @@ -3267,7 +3215,7 @@ PyLong_AsDouble(PyObject *v) PyErr_SetString(PyExc_TypeError, "an integer is required"); return -1.0; } - if (IS_MEDIUM_VALUE(v)) { + if (_PyLong_IsCompact((PyLongObject *)v)) { /* Fast path; single digit long (31 bits) will cast safely to double. This improves performance of FP/long operations by 20%. @@ -3292,9 +3240,12 @@ PyLong_AsDouble(PyObject *v) static Py_ssize_t long_compare(PyLongObject *a, PyLongObject *b) { - Py_ssize_t sign = Py_SIZE(a) - Py_SIZE(b); + if (_PyLong_BothAreCompact(a, b)) { + return _PyLong_CompactValue(a) - _PyLong_CompactValue(b); + } + Py_ssize_t sign = _PyLong_SignedDigitCount(a) - _PyLong_SignedDigitCount(b); if (sign == 0) { - Py_ssize_t i = Py_ABS(Py_SIZE(a)); + Py_ssize_t i = _PyLong_DigitCount(a); sdigit diff = 0; while (--i >= 0) { diff = (sdigit) a->long_value.ob_digit[i] - (sdigit) b->long_value.ob_digit[i]; @@ -3302,7 +3253,7 @@ long_compare(PyLongObject *a, PyLongObject *b) break; } } - sign = Py_SIZE(a) < 0 ? -diff : diff; + sign = _PyLong_IsNegative(a) ? -diff : diff; } return sign; } @@ -3319,6 +3270,27 @@ long_richcompare(PyObject *self, PyObject *other, int op) Py_RETURN_RICHCOMPARE(result, 0, op); } +static void +long_dealloc(PyObject *self) +{ + /* This should never get called, but we also don't want to SEGV if + * we accidentally decref small Ints out of existence. Instead, + * since small Ints are immortal, re-set the reference count. + */ + PyLongObject *pylong = (PyLongObject*)self; + if (pylong && _PyLong_IsCompact(pylong)) { + stwodigits ival = medium_value(pylong); + if (IS_SMALL_INT(ival)) { + PyLongObject *small_pylong = (PyLongObject *)get_small_int((sdigit)ival); + if (pylong == small_pylong) { + _Py_SetImmortal(self); + return; + } + } + } + Py_TYPE(self)->tp_free(self); +} + static Py_hash_t long_hash(PyLongObject *v) { @@ -3326,18 +3298,16 @@ long_hash(PyLongObject *v) Py_ssize_t i; int sign; - i = Py_SIZE(v); - switch(i) { - case -1: return v->long_value.ob_digit[0]==1 ? -2 : -(sdigit)v->long_value.ob_digit[0]; - case 0: return 0; - case 1: return v->long_value.ob_digit[0]; + if (_PyLong_IsCompact(v)) { + x = _PyLong_CompactValue(v); + if (x == (Py_uhash_t)-1) { + x = (Py_uhash_t)-2; + } + return x; } - sign = 1; + i = _PyLong_DigitCount(v); + sign = _PyLong_NonCompactSign(v); x = 0; - if (i < 0) { - sign = -1; - i = -(i); - } while (--i >= 0) { /* Here x is a quantity in the range [0, _PyHASH_MODULUS); we want to compute x * 2**PyLong_SHIFT + v->long_value.ob_digit[i] modulo @@ -3382,7 +3352,7 @@ long_hash(PyLongObject *v) static PyLongObject * x_add(PyLongObject *a, PyLongObject *b) { - Py_ssize_t size_a = Py_ABS(Py_SIZE(a)), size_b = Py_ABS(Py_SIZE(b)); + Py_ssize_t size_a = _PyLong_DigitCount(a), size_b = _PyLong_DigitCount(b); PyLongObject *z; Py_ssize_t i; digit carry = 0; @@ -3416,7 +3386,7 @@ x_add(PyLongObject *a, PyLongObject *b) static PyLongObject * x_sub(PyLongObject *a, PyLongObject *b) { - Py_ssize_t size_a = Py_ABS(Py_SIZE(a)), size_b = Py_ABS(Py_SIZE(b)); + Py_ssize_t size_a = _PyLong_DigitCount(a), size_b = _PyLong_DigitCount(b); PyLongObject *z; Py_ssize_t i; int sign = 1; @@ -3462,7 +3432,7 @@ x_sub(PyLongObject *a, PyLongObject *b) } assert(borrow == 0); if (sign < 0) { - Py_SET_SIZE(z, -Py_SIZE(z)); + _PyLong_FlipSign(z); } return maybe_small_long(long_normalize(z)); } @@ -3470,13 +3440,13 @@ x_sub(PyLongObject *a, PyLongObject *b) PyObject * _PyLong_Add(PyLongObject *a, PyLongObject *b) { - if (IS_MEDIUM_VALUE(a) && IS_MEDIUM_VALUE(b)) { + if (_PyLong_BothAreCompact(a, b)) { return _PyLong_FromSTwoDigits(medium_value(a) + medium_value(b)); } PyLongObject *z; - if (Py_SIZE(a) < 0) { - if (Py_SIZE(b) < 0) { + if (_PyLong_IsNegative(a)) { + if (_PyLong_IsNegative(b)) { z = x_add(a, b); if (z != NULL) { /* x_add received at least one multiple-digit int, @@ -3484,14 +3454,14 @@ _PyLong_Add(PyLongObject *a, PyLongObject *b) That also means z is not an element of small_ints, so negating it in-place is safe. */ assert(Py_REFCNT(z) == 1); - Py_SET_SIZE(z, -(Py_SIZE(z))); + _PyLong_FlipSign(z); } } else z = x_sub(b, a); } else { - if (Py_SIZE(b) < 0) + if (_PyLong_IsNegative(b)) z = x_sub(a, b); else z = x_add(a, b); @@ -3511,23 +3481,23 @@ _PyLong_Subtract(PyLongObject *a, PyLongObject *b) { PyLongObject *z; - if (IS_MEDIUM_VALUE(a) && IS_MEDIUM_VALUE(b)) { + if (_PyLong_BothAreCompact(a, b)) { return _PyLong_FromSTwoDigits(medium_value(a) - medium_value(b)); } - if (Py_SIZE(a) < 0) { - if (Py_SIZE(b) < 0) { + if (_PyLong_IsNegative(a)) { + if (_PyLong_IsNegative(b)) { z = x_sub(b, a); } else { z = x_add(a, b); if (z != NULL) { - assert(Py_SIZE(z) == 0 || Py_REFCNT(z) == 1); - Py_SET_SIZE(z, -(Py_SIZE(z))); + assert(_PyLong_IsZero(z) || Py_REFCNT(z) == 1); + _PyLong_FlipSign(z); } } } else { - if (Py_SIZE(b) < 0) + if (_PyLong_IsNegative(b)) z = x_add(a, b); else z = x_sub(a, b); @@ -3549,15 +3519,15 @@ static PyLongObject * x_mul(PyLongObject *a, PyLongObject *b) { PyLongObject *z; - Py_ssize_t size_a = Py_ABS(Py_SIZE(a)); - Py_ssize_t size_b = Py_ABS(Py_SIZE(b)); + Py_ssize_t size_a = _PyLong_DigitCount(a); + Py_ssize_t size_b = _PyLong_DigitCount(b); Py_ssize_t i; z = _PyLong_New(size_a + size_b); if (z == NULL) return NULL; - memset(z->long_value.ob_digit, 0, Py_SIZE(z) * sizeof(digit)); + memset(z->long_value.ob_digit, 0, _PyLong_DigitCount(z) * sizeof(digit)); if (a == b) { /* Efficient squaring per HAC, Algorithm 14.16: * http://www.cacr.math.uwaterloo.ca/hac/about/chap14.pdf @@ -3658,7 +3628,7 @@ kmul_split(PyLongObject *n, { PyLongObject *hi, *lo; Py_ssize_t size_lo, size_hi; - const Py_ssize_t size_n = Py_ABS(Py_SIZE(n)); + const Py_ssize_t size_n = _PyLong_DigitCount(n); size_lo = Py_MIN(size_n, size); size_hi = size_n - size_lo; @@ -3687,8 +3657,8 @@ static PyLongObject *k_lopsided_mul(PyLongObject *a, PyLongObject *b); static PyLongObject * k_mul(PyLongObject *a, PyLongObject *b) { - Py_ssize_t asize = Py_ABS(Py_SIZE(a)); - Py_ssize_t bsize = Py_ABS(Py_SIZE(b)); + Py_ssize_t asize = _PyLong_DigitCount(a); + Py_ssize_t bsize = _PyLong_DigitCount(b); PyLongObject *ah = NULL; PyLongObject *al = NULL; PyLongObject *bh = NULL; @@ -3731,7 +3701,7 @@ k_mul(PyLongObject *a, PyLongObject *b) /* If a is small compared to b, splitting on b gives a degenerate * case with ah==0, and Karatsuba may be (even much) less efficient * than "grade school" then. However, we can still win, by viewing - * b as a string of "big digits", each of width a->ob_size. That + * b as a string of "big digits", each of the same width as a. That * leads to a sequence of balanced calls to k_mul. */ if (2 * asize <= bsize) @@ -3740,7 +3710,7 @@ k_mul(PyLongObject *a, PyLongObject *b) /* Split a & b into hi & lo pieces. */ shift = bsize >> 1; if (kmul_split(a, shift, &ah, &al) < 0) goto fail; - assert(Py_SIZE(ah) > 0); /* the split isn't degenerate */ + assert(_PyLong_IsPositive(ah)); /* the split isn't degenerate */ if (a == b) { bh = (PyLongObject*)Py_NewRef(ah); @@ -3769,20 +3739,20 @@ k_mul(PyLongObject *a, PyLongObject *b) if (ret == NULL) goto fail; #ifdef Py_DEBUG /* Fill with trash, to catch reference to uninitialized digits. */ - memset(ret->long_value.ob_digit, 0xDF, Py_SIZE(ret) * sizeof(digit)); + memset(ret->long_value.ob_digit, 0xDF, _PyLong_DigitCount(ret) * sizeof(digit)); #endif /* 2. t1 <- ah*bh, and copy into high digits of result. */ if ((t1 = k_mul(ah, bh)) == NULL) goto fail; - assert(Py_SIZE(t1) >= 0); - assert(2*shift + Py_SIZE(t1) <= Py_SIZE(ret)); + assert(!_PyLong_IsNegative(t1)); + assert(2*shift + _PyLong_DigitCount(t1) <= _PyLong_DigitCount(ret)); memcpy(ret->long_value.ob_digit + 2*shift, t1->long_value.ob_digit, - Py_SIZE(t1) * sizeof(digit)); + _PyLong_DigitCount(t1) * sizeof(digit)); /* Zero-out the digits higher than the ah*bh copy. */ - i = Py_SIZE(ret) - 2*shift - Py_SIZE(t1); + i = _PyLong_DigitCount(ret) - 2*shift - _PyLong_DigitCount(t1); if (i) - memset(ret->long_value.ob_digit + 2*shift + Py_SIZE(t1), 0, + memset(ret->long_value.ob_digit + 2*shift + _PyLong_DigitCount(t1), 0, i * sizeof(digit)); /* 3. t2 <- al*bl, and copy into the low digits. */ @@ -3790,23 +3760,23 @@ k_mul(PyLongObject *a, PyLongObject *b) Py_DECREF(t1); goto fail; } - assert(Py_SIZE(t2) >= 0); - assert(Py_SIZE(t2) <= 2*shift); /* no overlap with high digits */ - memcpy(ret->long_value.ob_digit, t2->long_value.ob_digit, Py_SIZE(t2) * sizeof(digit)); + assert(!_PyLong_IsNegative(t2)); + assert(_PyLong_DigitCount(t2) <= 2*shift); /* no overlap with high digits */ + memcpy(ret->long_value.ob_digit, t2->long_value.ob_digit, _PyLong_DigitCount(t2) * sizeof(digit)); /* Zero out remaining digits. */ - i = 2*shift - Py_SIZE(t2); /* number of uninitialized digits */ + i = 2*shift - _PyLong_DigitCount(t2); /* number of uninitialized digits */ if (i) - memset(ret->long_value.ob_digit + Py_SIZE(t2), 0, i * sizeof(digit)); + memset(ret->long_value.ob_digit + _PyLong_DigitCount(t2), 0, i * sizeof(digit)); /* 4 & 5. Subtract ah*bh (t1) and al*bl (t2). We do al*bl first * because it's fresher in cache. */ - i = Py_SIZE(ret) - shift; /* # digits after shift */ - (void)v_isub(ret->long_value.ob_digit + shift, i, t2->long_value.ob_digit, Py_SIZE(t2)); + i = _PyLong_DigitCount(ret) - shift; /* # digits after shift */ + (void)v_isub(ret->long_value.ob_digit + shift, i, t2->long_value.ob_digit, _PyLong_DigitCount(t2)); _Py_DECREF_INT(t2); - (void)v_isub(ret->long_value.ob_digit + shift, i, t1->long_value.ob_digit, Py_SIZE(t1)); + (void)v_isub(ret->long_value.ob_digit + shift, i, t1->long_value.ob_digit, _PyLong_DigitCount(t1)); _Py_DECREF_INT(t1); /* 6. t3 <- (ah+al)(bh+bl), and add into result. */ @@ -3830,12 +3800,12 @@ k_mul(PyLongObject *a, PyLongObject *b) _Py_DECREF_INT(t1); _Py_DECREF_INT(t2); if (t3 == NULL) goto fail; - assert(Py_SIZE(t3) >= 0); + assert(!_PyLong_IsNegative(t3)); /* Add t3. It's not obvious why we can't run out of room here. * See the (*) comment after this function. */ - (void)v_iadd(ret->long_value.ob_digit + shift, i, t3->long_value.ob_digit, Py_SIZE(t3)); + (void)v_iadd(ret->long_value.ob_digit + shift, i, t3->long_value.ob_digit, _PyLong_DigitCount(t3)); _Py_DECREF_INT(t3); return long_normalize(ret); @@ -3896,17 +3866,17 @@ ah*bh and al*bl too. /* b has at least twice the digits of a, and a is big enough that Karatsuba * would pay off *if* the inputs had balanced sizes. View b as a sequence - * of slices, each with a->ob_size digits, and multiply the slices by a, - * one at a time. This gives k_mul balanced inputs to work with, and is - * also cache-friendly (we compute one double-width slice of the result + * of slices, each with the same number of digits as a, and multiply the + * slices by a, one at a time. This gives k_mul balanced inputs to work with, + * and is also cache-friendly (we compute one double-width slice of the result * at a time, then move on, never backtracking except for the helpful * single-width slice overlap between successive partial sums). */ static PyLongObject * k_lopsided_mul(PyLongObject *a, PyLongObject *b) { - const Py_ssize_t asize = Py_ABS(Py_SIZE(a)); - Py_ssize_t bsize = Py_ABS(Py_SIZE(b)); + const Py_ssize_t asize = _PyLong_DigitCount(a); + Py_ssize_t bsize = _PyLong_DigitCount(b); Py_ssize_t nbdone; /* # of b digits already multiplied */ PyLongObject *ret; PyLongObject *bslice = NULL; @@ -3918,7 +3888,7 @@ k_lopsided_mul(PyLongObject *a, PyLongObject *b) ret = _PyLong_New(asize + bsize); if (ret == NULL) return NULL; - memset(ret->long_value.ob_digit, 0, Py_SIZE(ret) * sizeof(digit)); + memset(ret->long_value.ob_digit, 0, _PyLong_DigitCount(ret) * sizeof(digit)); /* Successive slices of b are copied into bslice. */ bslice = _PyLong_New(asize); @@ -3933,14 +3903,15 @@ k_lopsided_mul(PyLongObject *a, PyLongObject *b) /* Multiply the next slice of b by a. */ memcpy(bslice->long_value.ob_digit, b->long_value.ob_digit + nbdone, nbtouse * sizeof(digit)); - Py_SET_SIZE(bslice, nbtouse); + assert(nbtouse >= 0); + _PyLong_SetSignAndDigitCount(bslice, 1, nbtouse); product = k_mul(a, bslice); if (product == NULL) goto fail; /* Add into result. */ - (void)v_iadd(ret->long_value.ob_digit + nbdone, Py_SIZE(ret) - nbdone, - product->long_value.ob_digit, Py_SIZE(product)); + (void)v_iadd(ret->long_value.ob_digit + nbdone, _PyLong_DigitCount(ret) - nbdone, + product->long_value.ob_digit, _PyLong_DigitCount(product)); _Py_DECREF_INT(product); bsize -= nbtouse; @@ -3962,14 +3933,14 @@ _PyLong_Multiply(PyLongObject *a, PyLongObject *b) PyLongObject *z; /* fast path for single-digit multiplication */ - if (IS_MEDIUM_VALUE(a) && IS_MEDIUM_VALUE(b)) { + if (_PyLong_BothAreCompact(a, b)) { stwodigits v = medium_value(a) * medium_value(b); return _PyLong_FromSTwoDigits(v); } z = k_mul(a, b); /* Negate if exactly one of the inputs is negative. */ - if (((Py_SIZE(a) ^ Py_SIZE(b)) < 0) && z) { + if (!_PyLong_SameSign(a, b) && z) { _PyLong_Negate(&z); if (z == NULL) return NULL; @@ -3992,11 +3963,10 @@ fast_mod(PyLongObject *a, PyLongObject *b) sdigit right = b->long_value.ob_digit[0]; sdigit mod; - assert(Py_ABS(Py_SIZE(a)) == 1); - assert(Py_ABS(Py_SIZE(b)) == 1); - - if (Py_SIZE(a) == Py_SIZE(b)) { - /* 'a' and 'b' have the same sign. */ + assert(_PyLong_DigitCount(a) == 1); + assert(_PyLong_DigitCount(b) == 1); + sdigit sign = _PyLong_CompactSign(b); + if (_PyLong_SameSign(a, b)) { mod = left % right; } else { @@ -4004,7 +3974,7 @@ fast_mod(PyLongObject *a, PyLongObject *b) mod = right - 1 - (left - 1) % right; } - return PyLong_FromLong(mod * (sdigit)Py_SIZE(b)); + return PyLong_FromLong(mod * sign); } /* Fast floor division for single-digit longs. */ @@ -4015,11 +3985,10 @@ fast_floor_div(PyLongObject *a, PyLongObject *b) sdigit right = b->long_value.ob_digit[0]; sdigit div; - assert(Py_ABS(Py_SIZE(a)) == 1); - assert(Py_ABS(Py_SIZE(b)) == 1); + assert(_PyLong_DigitCount(a) == 1); + assert(_PyLong_DigitCount(b) == 1); - if (Py_SIZE(a) == Py_SIZE(b)) { - /* 'a' and 'b' have the same sign. */ + if (_PyLong_SameSign(a, b)) { div = left / right; } else { @@ -4097,7 +4066,7 @@ l_divmod(PyLongObject *v, PyLongObject *w, { PyLongObject *div, *mod; - if (Py_ABS(Py_SIZE(v)) == 1 && Py_ABS(Py_SIZE(w)) == 1) { + if (_PyLong_DigitCount(v) == 1 && _PyLong_DigitCount(w) == 1) { /* Fast path for single-digit longs */ div = NULL; if (pdiv != NULL) { @@ -4122,8 +4091,8 @@ l_divmod(PyLongObject *v, PyLongObject *w, return 0; } #if WITH_PYLONG_MODULE - Py_ssize_t size_v = Py_ABS(Py_SIZE(v)); /* digits in numerator */ - Py_ssize_t size_w = Py_ABS(Py_SIZE(w)); /* digits in denominator */ + Py_ssize_t size_v = _PyLong_DigitCount(v); /* digits in numerator */ + Py_ssize_t size_w = _PyLong_DigitCount(w); /* digits in denominator */ if (size_w > 300 && (size_v - size_w) > 150) { /* Switch to _pylong.int_divmod(). If the quotient is small then "schoolbook" division is linear-time so don't use in that case. @@ -4135,8 +4104,8 @@ l_divmod(PyLongObject *v, PyLongObject *w, #endif if (long_divrem(v, w, &div, &mod) < 0) return -1; - if ((Py_SIZE(mod) < 0 && Py_SIZE(w) > 0) || - (Py_SIZE(mod) > 0 && Py_SIZE(w) < 0)) { + if ((_PyLong_IsNegative(mod) && _PyLong_IsPositive(w)) || + (_PyLong_IsPositive(mod) && _PyLong_IsNegative(w))) { PyLongObject *temp; temp = (PyLongObject *) long_add(mod, w); Py_SETREF(mod, temp); @@ -4175,15 +4144,15 @@ l_mod(PyLongObject *v, PyLongObject *w, PyLongObject **pmod) PyLongObject *mod; assert(pmod); - if (Py_ABS(Py_SIZE(v)) == 1 && Py_ABS(Py_SIZE(w)) == 1) { + if (_PyLong_DigitCount(v) == 1 && _PyLong_DigitCount(w) == 1) { /* Fast path for single-digit longs */ *pmod = (PyLongObject *)fast_mod(v, w); return -(*pmod == NULL); } if (long_rem(v, w, &mod) < 0) return -1; - if ((Py_SIZE(mod) < 0 && Py_SIZE(w) > 0) || - (Py_SIZE(mod) > 0 && Py_SIZE(w) < 0)) { + if ((_PyLong_IsNegative(mod) && _PyLong_IsPositive(w)) || + (_PyLong_IsPositive(mod) && _PyLong_IsNegative(w))) { PyLongObject *temp; temp = (PyLongObject *) long_add(mod, w); Py_SETREF(mod, temp); @@ -4202,7 +4171,7 @@ long_div(PyObject *a, PyObject *b) CHECK_BINOP(a, b); - if (Py_ABS(Py_SIZE(a)) == 1 && Py_ABS(Py_SIZE(b)) == 1) { + if (_PyLong_DigitCount((PyLongObject*)a) == 1 && _PyLong_DigitCount((PyLongObject*)b) == 1) { return fast_floor_div((PyLongObject*)a, (PyLongObject*)b); } @@ -4317,9 +4286,9 @@ long_true_divide(PyObject *v, PyObject *w) */ /* Reduce to case where a and b are both positive. */ - a_size = Py_ABS(Py_SIZE(a)); - b_size = Py_ABS(Py_SIZE(b)); - negate = (Py_SIZE(a) < 0) ^ (Py_SIZE(b) < 0); + a_size = _PyLong_DigitCount(a); + b_size = _PyLong_DigitCount(b); + negate = (_PyLong_IsNegative(a)) != (_PyLong_IsNegative(b)); if (b_size == 0) { PyErr_SetString(PyExc_ZeroDivisionError, "division by zero"); @@ -4412,7 +4381,7 @@ long_true_divide(PyObject *v, PyObject *w) inexact = 1; } long_normalize(x); - x_size = Py_SIZE(x); + x_size = _PyLong_SignedDigitCount(x); /* x //= b. If the remainder is nonzero, set inexact. We own the only reference to x, so it's safe to modify it in-place. */ @@ -4429,11 +4398,11 @@ long_true_divide(PyObject *v, PyObject *w) Py_SETREF(x, div); if (x == NULL) goto error; - if (Py_SIZE(rem)) + if (!_PyLong_IsZero(rem)) inexact = 1; Py_DECREF(rem); } - x_size = Py_ABS(Py_SIZE(x)); + x_size = _PyLong_DigitCount(x); assert(x_size > 0); /* result of division is never zero */ x_bits = (x_size-1)*PyLong_SHIFT+bit_length_digit(x->long_value.ob_digit[x_size-1]); @@ -4535,7 +4504,7 @@ long_invmod(PyLongObject *a, PyLongObject *n) PyLongObject *b, *c; /* Should only ever be called for positive n */ - assert(Py_SIZE(n) > 0); + assert(_PyLong_IsPositive(n)); b = (PyLongObject *)PyLong_FromLong(1L); if (b == NULL) { @@ -4550,7 +4519,7 @@ long_invmod(PyLongObject *a, PyLongObject *n) Py_INCREF(n); /* references now owned: a, b, c, n */ - while (Py_SIZE(n) != 0) { + while (!_PyLong_IsZero(n)) { PyLongObject *q, *r, *s, *t; if (l_divmod(a, n, &q, &r) == -1) { @@ -4636,7 +4605,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) Py_RETURN_NOTIMPLEMENTED; } - if (Py_SIZE(b) < 0 && c == NULL) { + if (_PyLong_IsNegative(b) && c == NULL) { /* if exponent is negative and there's no modulus: return a float. This works because we know that this calls float_pow() which converts its @@ -4649,7 +4618,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) if (c) { /* if modulus == 0: raise ValueError() */ - if (Py_SIZE(c) == 0) { + if (_PyLong_IsZero(c)) { PyErr_SetString(PyExc_ValueError, "pow() 3rd argument cannot be 0"); goto Error; @@ -4658,7 +4627,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) /* if modulus < 0: negativeOutput = True modulus = -modulus */ - if (Py_SIZE(c) < 0) { + if (_PyLong_IsNegative(c)) { negativeOutput = 1; temp = (PyLongObject *)_PyLong_Copy(c); if (temp == NULL) @@ -4672,14 +4641,14 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) /* if modulus == 1: return 0 */ - if ((Py_SIZE(c) == 1) && (c->long_value.ob_digit[0] == 1)) { + if (_PyLong_IsNonNegativeCompact(c) && (c->long_value.ob_digit[0] == 1)) { z = (PyLongObject *)PyLong_FromLong(0L); goto Done; } /* if exponent is negative, negate the exponent and replace the base with a modular inverse */ - if (Py_SIZE(b) < 0) { + if (_PyLong_IsNegative(b)) { temp = (PyLongObject *)_PyLong_Copy(b); if (temp == NULL) goto Error; @@ -4705,7 +4674,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) base % modulus instead. We could _always_ do this reduction, but l_mod() isn't cheap, so we only do it when it buys something. */ - if (Py_SIZE(a) < 0 || Py_SIZE(a) > Py_SIZE(c)) { + if (_PyLong_IsNegative(a) || _PyLong_DigitCount(a) > _PyLong_DigitCount(c)) { if (l_mod(a, c, &temp) < 0) goto Error; Py_SETREF(a, temp); @@ -4747,7 +4716,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) REDUCE(result); \ } while(0) - i = Py_SIZE(b); + i = _PyLong_SignedDigitCount(b); digit bi = i ? b->long_value.ob_digit[i-1] : 0; digit bit; if (i <= 1 && bi <= 3) { @@ -4839,7 +4808,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) pending = 0; \ } while(0) - for (i = Py_SIZE(b) - 1; i >= 0; --i) { + for (i = _PyLong_SignedDigitCount(b) - 1; i >= 0; --i) { const digit bi = b->long_value.ob_digit[i]; for (j = PyLong_SHIFT - 1; j >= 0; --j) { const int bit = (bi >> j) & 1; @@ -4857,7 +4826,7 @@ long_pow(PyObject *v, PyObject *w, PyObject *x) ABSORB_PENDING; } - if (negativeOutput && (Py_SIZE(z) != 0)) { + if (negativeOutput && !_PyLong_IsZero(z)) { temp = (PyLongObject *)long_sub(z, c); if (temp == NULL) goto Error; @@ -4885,14 +4854,14 @@ long_invert(PyLongObject *v) { /* Implement ~x as -(x+1) */ PyLongObject *x; - if (IS_MEDIUM_VALUE(v)) + if (_PyLong_IsCompact(v)) return _PyLong_FromSTwoDigits(~medium_value(v)); x = (PyLongObject *) long_add(v, (PyLongObject *)_PyLong_GetOne()); if (x == NULL) return NULL; _PyLong_Negate(&x); - /* No need for maybe_small_long here, since any small - longs will have been caught in the Py_SIZE <= 1 fast path. */ + /* No need for maybe_small_long here, since any small longs + will have been caught in the _PyLong_IsCompact() fast path. */ return (PyObject *)x; } @@ -4900,18 +4869,18 @@ static PyObject * long_neg(PyLongObject *v) { PyLongObject *z; - if (IS_MEDIUM_VALUE(v)) + if (_PyLong_IsCompact(v)) return _PyLong_FromSTwoDigits(-medium_value(v)); z = (PyLongObject *)_PyLong_Copy(v); if (z != NULL) - Py_SET_SIZE(z, -(Py_SIZE(v))); + _PyLong_FlipSign(z); return (PyObject *)z; } static PyObject * long_abs(PyLongObject *v) { - if (Py_SIZE(v) < 0) + if (_PyLong_IsNegative(v)) return long_neg(v); else return long_long((PyObject *)v); @@ -4920,7 +4889,7 @@ long_abs(PyLongObject *v) static int long_bool(PyLongObject *v) { - return Py_SIZE(v) != 0; + return !_PyLong_IsZero(v); } /* wordshift, remshift = divmod(shiftby, PyLong_SHIFT) */ @@ -4928,14 +4897,14 @@ static int divmod_shift(PyObject *shiftby, Py_ssize_t *wordshift, digit *remshift) { assert(PyLong_Check(shiftby)); - assert(Py_SIZE(shiftby) >= 0); + assert(!_PyLong_IsNegative((PyLongObject *)shiftby)); Py_ssize_t lshiftby = PyLong_AsSsize_t((PyObject *)shiftby); if (lshiftby >= 0) { *wordshift = lshiftby / PyLong_SHIFT; *remshift = lshiftby % PyLong_SHIFT; return 0; } - /* PyLong_Check(shiftby) is true and Py_SIZE(shiftby) >= 0, so it must + /* PyLong_Check(shiftby) is true and shiftby is not negative, so it must be that PyLong_AsSsize_t raised an OverflowError. */ assert(PyErr_ExceptionMatches(PyExc_OverflowError)); PyErr_Clear(); @@ -4973,7 +4942,7 @@ long_rshift1(PyLongObject *a, Py_ssize_t wordshift, digit remshift) assert(remshift < PyLong_SHIFT); /* Fast path for small a. */ - if (IS_MEDIUM_VALUE(a)) { + if (_PyLong_IsCompact(a)) { stwodigits m, x; digit shift; m = medium_value(a); @@ -4982,8 +4951,8 @@ long_rshift1(PyLongObject *a, Py_ssize_t wordshift, digit remshift) return _PyLong_FromSTwoDigits(x); } - a_negative = Py_SIZE(a) < 0; - size_a = Py_ABS(Py_SIZE(a)); + a_negative = _PyLong_IsNegative(a); + size_a = _PyLong_DigitCount(a); if (a_negative) { /* For negative 'a', adjust so that 0 < remshift <= PyLong_SHIFT, @@ -5024,7 +4993,7 @@ long_rshift1(PyLongObject *a, Py_ssize_t wordshift, digit remshift) significant `wordshift` digits of `a` is nonzero. Digit `wordshift` of `2**shift - 1` has value `PyLong_MASK >> hishift`. */ - Py_SET_SIZE(z, -newsize); + _PyLong_SetSignAndDigitCount(z, -1, newsize); digit sticky = 0; for (Py_ssize_t j = 0; j < wordshift; j++) { @@ -5054,11 +5023,11 @@ long_rshift(PyObject *a, PyObject *b) CHECK_BINOP(a, b); - if (Py_SIZE(b) < 0) { + if (_PyLong_IsNegative((PyLongObject *)b)) { PyErr_SetString(PyExc_ValueError, "negative shift count"); return NULL; } - if (Py_SIZE(a) == 0) { + if (_PyLong_IsZero((PyLongObject *)a)) { return PyLong_FromLong(0); } if (divmod_shift(b, &wordshift, &remshift) < 0) @@ -5074,7 +5043,7 @@ _PyLong_Rshift(PyObject *a, size_t shiftby) digit remshift; assert(PyLong_Check(a)); - if (Py_SIZE(a) == 0) { + if (_PyLong_IsZero((PyLongObject *)a)) { return PyLong_FromLong(0); } wordshift = shiftby / PyLong_SHIFT; @@ -5089,23 +5058,23 @@ long_lshift1(PyLongObject *a, Py_ssize_t wordshift, digit remshift) Py_ssize_t oldsize, newsize, i, j; twodigits accum; - if (wordshift == 0 && IS_MEDIUM_VALUE(a)) { + if (wordshift == 0 && _PyLong_IsCompact(a)) { stwodigits m = medium_value(a); // bypass undefined shift operator behavior stwodigits x = m < 0 ? -(-m << remshift) : m << remshift; return _PyLong_FromSTwoDigits(x); } - oldsize = Py_ABS(Py_SIZE(a)); + oldsize = _PyLong_DigitCount(a); newsize = oldsize + wordshift; if (remshift) ++newsize; z = _PyLong_New(newsize); if (z == NULL) return NULL; - if (Py_SIZE(a) < 0) { + if (_PyLong_IsNegative(a)) { assert(Py_REFCNT(z) == 1); - Py_SET_SIZE(z, -Py_SIZE(z)); + _PyLong_FlipSign(z); } for (i = 0; i < wordshift; i++) z->long_value.ob_digit[i] = 0; @@ -5131,11 +5100,11 @@ long_lshift(PyObject *a, PyObject *b) CHECK_BINOP(a, b); - if (Py_SIZE(b) < 0) { + if (_PyLong_IsNegative((PyLongObject *)b)) { PyErr_SetString(PyExc_ValueError, "negative shift count"); return NULL; } - if (Py_SIZE(a) == 0) { + if (_PyLong_IsZero((PyLongObject *)a)) { return PyLong_FromLong(0); } if (divmod_shift(b, &wordshift, &remshift) < 0) @@ -5151,7 +5120,7 @@ _PyLong_Lshift(PyObject *a, size_t shiftby) digit remshift; assert(PyLong_Check(a)); - if (Py_SIZE(a) == 0) { + if (_PyLong_IsZero((PyLongObject *)a)) { return PyLong_FromLong(0); } wordshift = shiftby / PyLong_SHIFT; @@ -5193,8 +5162,8 @@ long_bitwise(PyLongObject *a, result back to sign-magnitude at the end. */ /* If a is negative, replace it by its two's complement. */ - size_a = Py_ABS(Py_SIZE(a)); - nega = Py_SIZE(a) < 0; + size_a = _PyLong_DigitCount(a); + nega = _PyLong_IsNegative(a); if (nega) { z = _PyLong_New(size_a); if (z == NULL) @@ -5207,8 +5176,8 @@ long_bitwise(PyLongObject *a, Py_INCREF(a); /* Same for b. */ - size_b = Py_ABS(Py_SIZE(b)); - negb = Py_SIZE(b) < 0; + size_b = _PyLong_DigitCount(b); + negb = _PyLong_IsNegative(b); if (negb) { z = _PyLong_New(size_b); if (z == NULL) { @@ -5289,7 +5258,7 @@ long_bitwise(PyLongObject *a, /* Complement result if negative. */ if (negz) { - Py_SET_SIZE(z, -(Py_SIZE(z))); + _PyLong_FlipSign(z); z->long_value.ob_digit[size_z] = PyLong_MASK; v_complement(z->long_value.ob_digit, z->long_value.ob_digit, size_z+1); } @@ -5305,7 +5274,7 @@ long_and(PyObject *a, PyObject *b) CHECK_BINOP(a, b); PyLongObject *x = (PyLongObject*)a; PyLongObject *y = (PyLongObject*)b; - if (IS_MEDIUM_VALUE(x) && IS_MEDIUM_VALUE(y)) { + if (_PyLong_IsCompact(x) && _PyLong_IsCompact(y)) { return _PyLong_FromSTwoDigits(medium_value(x) & medium_value(y)); } return long_bitwise(x, '&', y); @@ -5317,7 +5286,7 @@ long_xor(PyObject *a, PyObject *b) CHECK_BINOP(a, b); PyLongObject *x = (PyLongObject*)a; PyLongObject *y = (PyLongObject*)b; - if (IS_MEDIUM_VALUE(x) && IS_MEDIUM_VALUE(y)) { + if (_PyLong_IsCompact(x) && _PyLong_IsCompact(y)) { return _PyLong_FromSTwoDigits(medium_value(x) ^ medium_value(y)); } return long_bitwise(x, '^', y); @@ -5329,7 +5298,7 @@ long_or(PyObject *a, PyObject *b) CHECK_BINOP(a, b); PyLongObject *x = (PyLongObject*)a; PyLongObject *y = (PyLongObject*)b; - if (IS_MEDIUM_VALUE(x) && IS_MEDIUM_VALUE(y)) { + if (_PyLong_IsCompact(x) && _PyLong_IsCompact(y)) { return _PyLong_FromSTwoDigits(medium_value(x) | medium_value(y)); } return long_bitwise(x, '|', y); @@ -5353,14 +5322,11 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) stwodigits x, y, q, s, t, c_carry, d_carry; stwodigits A, B, C, D, T; int nbits, k; - Py_ssize_t size_a, size_b, alloc_a, alloc_b; digit *a_digit, *b_digit, *c_digit, *d_digit, *a_end, *b_end; a = (PyLongObject *)aarg; b = (PyLongObject *)barg; - size_a = Py_SIZE(a); - size_b = Py_SIZE(b); - if (-2 <= size_a && size_a <= 2 && -2 <= size_b && size_b <= 2) { + if (_PyLong_DigitCount(a) <= 2 && _PyLong_DigitCount(b) <= 2) { Py_INCREF(a); Py_INCREF(b); goto simple; @@ -5382,14 +5348,15 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) } /* We now own references to a and b */ - alloc_a = Py_SIZE(a); - alloc_b = Py_SIZE(b); + Py_ssize_t size_a, size_b, alloc_a, alloc_b; + alloc_a = _PyLong_DigitCount(a); + alloc_b = _PyLong_DigitCount(b); /* reduce until a fits into 2 digits */ - while ((size_a = Py_SIZE(a)) > 2) { + while ((size_a = _PyLong_DigitCount(a)) > 2) { nbits = bit_length_digit(a->long_value.ob_digit[size_a-1]); /* extract top 2*PyLong_SHIFT bits of a into x, along with corresponding bits of b into y */ - size_b = Py_SIZE(b); + size_b = _PyLong_DigitCount(b); assert(size_b <= size_a); if (size_b == 0) { if (size_a < alloc_a) { @@ -5433,7 +5400,7 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) Py_SETREF(a, b); b = r; alloc_a = alloc_b; - alloc_b = Py_SIZE(b); + alloc_b = _PyLong_DigitCount(b); continue; } @@ -5446,7 +5413,8 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) T = -C; C = -D; D = T; } if (c != NULL) { - Py_SET_SIZE(c, size_a); + assert(size_a >= 0); + _PyLong_SetSignAndDigitCount(c, 1, size_a); } else if (Py_REFCNT(a) == 1) { c = (PyLongObject*)Py_NewRef(a); @@ -5459,11 +5427,13 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) } if (d != NULL) { - Py_SET_SIZE(d, size_a); + assert(size_a >= 0); + _PyLong_SetSignAndDigitCount(d, 1, size_a); } else if (Py_REFCNT(b) == 1 && size_a <= alloc_b) { d = (PyLongObject*)Py_NewRef(b); - Py_SET_SIZE(d, size_a); + assert(size_a >= 0); + _PyLong_SetSignAndDigitCount(d, 1, size_a); } else { alloc_b = size_a; @@ -5635,9 +5605,7 @@ long_subtype_new(PyTypeObject *type, PyObject *x, PyObject *obase) if (tmp == NULL) return NULL; assert(PyLong_Check(tmp)); - n = Py_SIZE(tmp); - if (n < 0) - n = -n; + n = _PyLong_DigitCount(tmp); /* Fast operations for single digit integers (including zero) * assume that there is always at least one digit present. */ if (n == 0) { @@ -5649,7 +5617,7 @@ long_subtype_new(PyTypeObject *type, PyObject *x, PyObject *obase) return NULL; } assert(PyLong_Check(newobj)); - Py_SET_SIZE(newobj, Py_SIZE(tmp)); + newobj->long_value.lv_tag = tmp->long_value.lv_tag; for (i = 0; i < n; i++) { newobj->long_value.ob_digit[i] = tmp->long_value.ob_digit[i]; } @@ -5743,7 +5711,7 @@ _PyLong_DivmodNear(PyObject *a, PyObject *b) } /* Do a and b have different signs? If so, quotient is negative. */ - quo_is_neg = (Py_SIZE(a) < 0) != (Py_SIZE(b) < 0); + quo_is_neg = (_PyLong_IsNegative((PyLongObject *)a)) != (_PyLong_IsNegative((PyLongObject *)b)); if (long_divrem((PyLongObject*)a, (PyLongObject*)b, &quo, &rem) < 0) goto error; @@ -5763,8 +5731,8 @@ _PyLong_DivmodNear(PyObject *a, PyObject *b) cmp = long_compare((PyLongObject *)twice_rem, (PyLongObject *)b); Py_DECREF(twice_rem); - quo_is_odd = Py_SIZE(quo) != 0 && ((quo->long_value.ob_digit[0] & 1) != 0); - if ((Py_SIZE(b) < 0 ? cmp < 0 : cmp > 0) || (cmp == 0 && quo_is_odd)) { + quo_is_odd = (quo->long_value.ob_digit[0] & 1) != 0; + if ((_PyLong_IsNegative((PyLongObject *)b) ? cmp < 0 : cmp > 0) || (cmp == 0 && quo_is_odd)) { /* fix up quotient */ if (quo_is_neg) temp = long_sub(quo, (PyLongObject *)one); @@ -5837,7 +5805,7 @@ int___round___impl(PyObject *self, PyObject *o_ndigits) return NULL; /* if ndigits >= 0 then no rounding is necessary; return self unchanged */ - if (Py_SIZE(ndigits) >= 0) { + if (!_PyLong_IsNegative((PyLongObject *)ndigits)) { Py_DECREF(ndigits); return long_long(self); } @@ -5883,8 +5851,8 @@ int___sizeof___impl(PyObject *self) /*[clinic end generated code: output=3303f008eaa6a0a5 input=9b51620c76fc4507]*/ { /* using Py_MAX(..., 1) because we always allocate space for at least - one digit, even though the integer zero has a Py_SIZE of 0 */ - Py_ssize_t ndigits = Py_MAX(Py_ABS(Py_SIZE(self)), 1); + one digit, even though the integer zero has a digit count of 0 */ + Py_ssize_t ndigits = Py_MAX(_PyLong_DigitCount((PyLongObject *)self), 1); return Py_TYPE(self)->tp_basicsize + Py_TYPE(self)->tp_itemsize * ndigits; } @@ -5911,7 +5879,7 @@ int_bit_length_impl(PyObject *self) assert(self != NULL); assert(PyLong_Check(self)); - ndigits = Py_ABS(Py_SIZE(self)); + ndigits = _PyLong_DigitCount((PyLongObject *)self); if (ndigits == 0) return PyLong_FromLong(0); @@ -5980,7 +5948,7 @@ int_bit_count_impl(PyObject *self) assert(PyLong_Check(self)); PyLongObject *z = (PyLongObject *)self; - Py_ssize_t ndigits = Py_ABS(Py_SIZE(z)); + Py_ssize_t ndigits = _PyLong_DigitCount(z); Py_ssize_t bit_count = 0; /* Each digit has up to PyLong_SHIFT ones, so the accumulated bit count @@ -6285,7 +6253,7 @@ PyTypeObject PyLong_Type = { "int", /* tp_name */ offsetof(PyLongObject, long_value.ob_digit), /* tp_basicsize */ sizeof(digit), /* tp_itemsize */ - 0, /* tp_dealloc */ + long_dealloc, /* tp_dealloc */ 0, /* tp_vectorcall_offset */ 0, /* tp_getattr */ 0, /* tp_setattr */ @@ -6383,19 +6351,9 @@ PyLong_GetInfo(void) PyStatus _PyLong_InitTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - - if (PyType_Ready(&PyLong_Type) < 0) { - return _PyStatus_ERR("Can't initialize int type"); - } - /* initialize int_info */ - if (Int_InfoType.tp_name == NULL) { - if (_PyStructSequence_InitBuiltin(&Int_InfoType, &int_info_desc) < 0) { - return _PyStatus_ERR("can't init int info type"); - } + if (_PyStructSequence_InitBuiltin(&Int_InfoType, &int_info_desc) < 0) { + return _PyStatus_ERR("can't init int info type"); } return _PyStatus_OK(); @@ -6409,5 +6367,5 @@ _PyLong_FiniTypes(PyInterpreterState *interp) return; } - _PyStructSequence_FiniType(&Int_InfoType); + _PyStructSequence_FiniBuiltin(&Int_InfoType); } diff --git a/Objects/memoryobject.c b/Objects/memoryobject.c index 1d6cc3b508448d..34cc797b404cda 100644 --- a/Objects/memoryobject.c +++ b/Objects/memoryobject.c @@ -2642,7 +2642,11 @@ static Py_ssize_t memory_length(PyMemoryViewObject *self) { CHECK_RELEASED_INT(self); - return self->view.ndim == 0 ? 1 : self->view.shape[0]; + if (self->view.ndim == 0) { + PyErr_SetString(PyExc_TypeError, "0-dim memory has no length"); + return -1; + } + return self->view.shape[0]; } /* As mapping */ diff --git a/Objects/object.c b/Objects/object.c index dff5e2afa16ab8..4ce10cf1192d3f 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -54,37 +54,100 @@ _PyObject_CheckConsistency(PyObject *op, int check_content) #ifdef Py_REF_DEBUG +/* We keep the legacy symbol around for backward compatibility. */ Py_ssize_t _Py_RefTotal; +static inline Py_ssize_t +get_legacy_reftotal(void) +{ + return _Py_RefTotal; +} +#endif + +#ifdef Py_REF_DEBUG + +# define REFTOTAL(interp) \ + interp->object_state.reftotal + +static inline void +reftotal_increment(PyInterpreterState *interp) +{ + REFTOTAL(interp)++; +} + static inline void -reftotal_increment(void) +reftotal_decrement(PyInterpreterState *interp) { - _Py_RefTotal++; + REFTOTAL(interp)--; } static inline void -reftotal_decrement(void) +reftotal_add(PyInterpreterState *interp, Py_ssize_t n) { - _Py_RefTotal--; + REFTOTAL(interp) += n; } +static inline Py_ssize_t get_global_reftotal(_PyRuntimeState *); + +/* We preserve the number of refs leaked during runtime finalization, + so they can be reported if the runtime is initialized again. */ +// XXX We don't lose any information by dropping this, +// so we should consider doing so. +static Py_ssize_t last_final_reftotal = 0; + void -_Py_AddRefTotal(Py_ssize_t n) +_Py_FinalizeRefTotal(_PyRuntimeState *runtime) { - _Py_RefTotal += n; + last_final_reftotal = get_global_reftotal(runtime); + runtime->object_state.interpreter_leaks = 0; } -Py_ssize_t -_Py_GetRefTotal(void) +void +_PyInterpreterState_FinalizeRefTotal(PyInterpreterState *interp) { - return _Py_RefTotal; + interp->runtime->object_state.interpreter_leaks += REFTOTAL(interp); + REFTOTAL(interp) = 0; } +static inline Py_ssize_t +get_reftotal(PyInterpreterState *interp) +{ + /* For a single interpreter, we ignore the legacy _Py_RefTotal, + since we can't determine which interpreter updated it. */ + return REFTOTAL(interp); +} + +static inline Py_ssize_t +get_global_reftotal(_PyRuntimeState *runtime) +{ + Py_ssize_t total = 0; + + /* Add up the total from each interpreter. */ + HEAD_LOCK(&_PyRuntime); + PyInterpreterState *interp = PyInterpreterState_Head(); + for (; interp != NULL; interp = PyInterpreterState_Next(interp)) { + total += REFTOTAL(interp); + } + HEAD_UNLOCK(&_PyRuntime); + + /* Add in the updated value from the legacy _Py_RefTotal. */ + total += get_legacy_reftotal(); + total += last_final_reftotal; + total += runtime->object_state.interpreter_leaks; + + return total; +} + +#undef REFTOTAL + void _PyDebug_PrintTotalRefs(void) { + _PyRuntimeState *runtime = &_PyRuntime; fprintf(stderr, "[%zd refs, %zd blocks]\n", - _Py_GetRefTotal(), _Py_GetAllocatedBlocks()); + get_global_reftotal(runtime), _Py_GetGlobalAllocatedBlocks()); + /* It may be helpful to also print the "legacy" reftotal separately. + Likewise for the total for each interpreter. */ } #endif /* Py_REF_DEBUG */ @@ -139,30 +202,56 @@ _Py_NegativeRefcount(const char *filename, int lineno, PyObject *op) filename, lineno, __func__); } -/* This is exposed strictly for use in Py_INCREF(). */ -PyAPI_FUNC(void) +/* This is used strictly by Py_INCREF(). */ +void _Py_IncRefTotal_DO_NOT_USE_THIS(void) { - reftotal_increment(); + reftotal_increment(_PyInterpreterState_GET()); } -/* This is exposed strictly for use in Py_DECREF(). */ -PyAPI_FUNC(void) +/* This is used strictly by Py_DECREF(). */ +void _Py_DecRefTotal_DO_NOT_USE_THIS(void) { - reftotal_decrement(); + reftotal_decrement(_PyInterpreterState_GET()); +} + +void +_Py_IncRefTotal(PyInterpreterState *interp) +{ + reftotal_increment(interp); } void -_Py_IncRefTotal(void) +_Py_DecRefTotal(PyInterpreterState *interp) { - reftotal_increment(); + reftotal_decrement(interp); } void -_Py_DecRefTotal(void) +_Py_AddRefTotal(PyInterpreterState *interp, Py_ssize_t n) { - reftotal_decrement(); + reftotal_add(interp, n); +} + +/* This includes the legacy total + and any carried over from the last runtime init/fini cycle. */ +Py_ssize_t +_Py_GetGlobalRefTotal(void) +{ + return get_global_reftotal(&_PyRuntime); +} + +Py_ssize_t +_Py_GetLegacyRefTotal(void) +{ + return get_legacy_reftotal(); +} + +Py_ssize_t +_PyInterpreterState_GetRefTotal(PyInterpreterState *interp) +{ + return get_reftotal(interp); } #endif /* Py_REF_DEBUG */ @@ -182,21 +271,18 @@ Py_DecRef(PyObject *o) void _Py_IncRef(PyObject *o) { -#ifdef Py_REF_DEBUG - reftotal_increment(); -#endif Py_INCREF(o); } void _Py_DecRef(PyObject *o) { -#ifdef Py_REF_DEBUG - reftotal_decrement(); -#endif Py_DECREF(o); } + +/**************************************/ + PyObject * PyObject_Init(PyObject *op, PyTypeObject *tp) { @@ -804,7 +890,7 @@ PyObject_Hash(PyObject *v) * an explicit call to PyType_Ready, we implicitly call * PyType_Ready here and then check the tp_hash slot again */ - if (tp->tp_dict == NULL) { + if (!_PyType_IsReady(tp)) { if (PyType_Ready(tp) < 0) return -1; if (tp->tp_hash != NULL) @@ -947,7 +1033,7 @@ PyObject_GetAttr(PyObject *v, PyObject *name) } else { PyErr_Format(PyExc_AttributeError, - "'%.50s' object has no attribute '%U'", + "'%.100s' object has no attribute '%U'", tp->tp_name, name); } @@ -1267,7 +1353,7 @@ _PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method) } PyErr_Format(PyExc_AttributeError, - "'%.50s' object has no attribute '%U'", + "'%.100s' object has no attribute '%U'", tp->tp_name, name); set_attribute_error_context(obj, name); @@ -1299,7 +1385,7 @@ _PyObject_GenericGetAttrWithDict(PyObject *obj, PyObject *name, } Py_INCREF(name); - if (tp->tp_dict == NULL) { + if (!_PyType_IsReady(tp)) { if (PyType_Ready(tp) < 0) goto done; } @@ -1388,7 +1474,7 @@ _PyObject_GenericGetAttrWithDict(PyObject *obj, PyObject *name, if (!suppress) { PyErr_Format(PyExc_AttributeError, - "'%.50s' object has no attribute '%U'", + "'%.100s' object has no attribute '%U'", tp->tp_name, name); set_attribute_error_context(obj, name); @@ -1405,12 +1491,6 @@ PyObject_GenericGetAttr(PyObject *obj, PyObject *name) return _PyObject_GenericGetAttrWithDict(obj, name, NULL, 0); } -PyObject * -_PyObject_GenericTryGetAttr(PyObject *obj, PyObject *name) -{ - return _PyObject_GenericGetAttrWithDict(obj, name, NULL, 1); -} - int _PyObject_GenericSetAttrWithDict(PyObject *obj, PyObject *name, PyObject *value, PyObject *dict) @@ -1427,8 +1507,9 @@ _PyObject_GenericSetAttrWithDict(PyObject *obj, PyObject *name, return -1; } - if (tp->tp_dict == NULL && PyType_Ready(tp) < 0) + if (!_PyType_IsReady(tp) && PyType_Ready(tp) < 0) { return -1; + } Py_INCREF(name); Py_INCREF(tp); @@ -1465,7 +1546,7 @@ _PyObject_GenericSetAttrWithDict(PyObject *obj, PyObject *name, } else { PyErr_Format(PyExc_AttributeError, - "'%.50s' object attribute '%U' is read-only", + "'%.100s' object attribute '%U' is read-only", tp->tp_name, name); } goto done; @@ -1674,10 +1755,14 @@ none_repr(PyObject *op) return PyUnicode_FromString("None"); } -static void _Py_NO_RETURN -none_dealloc(PyObject* Py_UNUSED(ignore)) +static void +none_dealloc(PyObject* none) { - _Py_FatalRefcountError("deallocating None"); + /* This should never get called, but we also don't want to SEGV if + * we accidentally decref None out of existence. Instead, + * since None is an immortal object, re-set the reference count. + */ + _Py_SetImmortal(none); } static PyObject * @@ -1743,7 +1828,7 @@ PyTypeObject _PyNone_Type = { "NoneType", 0, 0, - none_dealloc, /*tp_dealloc*/ /*never called*/ + none_dealloc, /*tp_dealloc*/ 0, /*tp_vectorcall_offset*/ 0, /*tp_getattr*/ 0, /*tp_setattr*/ @@ -1780,8 +1865,9 @@ PyTypeObject _PyNone_Type = { }; PyObject _Py_NoneStruct = { - _PyObject_EXTRA_INIT - 1, &_PyNone_Type + _PyObject_EXTRA_INIT + { _Py_IMMORTAL_REFCNT }, + &_PyNone_Type }; /* NotImplemented is an object that can be used to signal that an @@ -1814,13 +1900,14 @@ notimplemented_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) Py_RETURN_NOTIMPLEMENTED; } -static void _Py_NO_RETURN -notimplemented_dealloc(PyObject* ignore) +static void +notimplemented_dealloc(PyObject *notimplemented) { /* This should never get called, but we also don't want to SEGV if - * we accidentally decref NotImplemented out of existence. + * we accidentally decref NotImplemented out of existence. Instead, + * since Notimplemented is an immortal object, re-set the reference count. */ - Py_FatalError("deallocating NotImplemented"); + _Py_SetImmortal(notimplemented); } static int @@ -1882,16 +1969,15 @@ PyTypeObject _PyNotImplemented_Type = { PyObject _Py_NotImplementedStruct = { _PyObject_EXTRA_INIT - 1, &_PyNotImplemented_Type + { _Py_IMMORTAL_REFCNT }, + &_PyNotImplemented_Type }; -#ifdef MS_WINDOWS -extern PyTypeObject PyHKEY_Type; -#endif extern PyTypeObject _Py_GenericAliasIterType; extern PyTypeObject _PyMemoryIter_Type; extern PyTypeObject _PyLineIterator; extern PyTypeObject _PyPositionsIterator; +extern PyTypeObject _PyLegacyEventHandler_Type; static PyTypeObject* static_types[] = { // The two most important base types: must be initialized first and @@ -1937,9 +2023,6 @@ static PyTypeObject* static_types[] = { &PyFunction_Type, &PyGen_Type, &PyGetSetDescr_Type, -#ifdef MS_WINDOWS - &PyHKEY_Type, -#endif &PyInstanceMethod_Type, &PyListIter_Type, &PyListRevIter_Type, @@ -1989,6 +2072,7 @@ static PyTypeObject* static_types[] = { &_PyHamt_BitmapNode_Type, &_PyHamt_CollisionNode_Type, &_PyHamt_Type, + &_PyLegacyEventHandler_Type, &_PyInterpreterID_Type, &_PyLineIterator, &_PyManagedBuffer_Type, @@ -2018,10 +2102,6 @@ static PyTypeObject* static_types[] = { PyStatus _PyTypes_InitTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - // All other static types (unless initialized elsewhere) for (size_t i=0; i < Py_ARRAY_LENGTH(static_types); i++) { PyTypeObject *type = static_types[i]; @@ -2067,7 +2147,8 @@ new_reference(PyObject *op) if (_PyRuntime.tracemalloc.config.tracing) { _PyTraceMalloc_NewReference(op); } - Py_SET_REFCNT(op, 1); + // Skip the immortal object check in Py_SET_REFCNT; always set refcnt to 1 + op->ob_refcnt = 1; #ifdef Py_TRACE_REFS _Py_AddToAllObjects(op, 1); #endif @@ -2077,7 +2158,7 @@ void _Py_NewReference(PyObject *op) { #ifdef Py_REF_DEBUG - reftotal_increment(); + reftotal_increment(_PyInterpreterState_GET()); #endif new_reference(op); } diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index 5e1bcda1d976bb..de62aeb04461fa 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -725,20 +725,51 @@ PyObject_Free(void *ptr) static int running_on_valgrind = -1; #endif +typedef struct _obmalloc_state OMState; -#define allarenas (_PyRuntime.obmalloc.mgmt.arenas) -#define maxarenas (_PyRuntime.obmalloc.mgmt.maxarenas) -#define unused_arena_objects (_PyRuntime.obmalloc.mgmt.unused_arena_objects) -#define usable_arenas (_PyRuntime.obmalloc.mgmt.usable_arenas) -#define nfp2lasta (_PyRuntime.obmalloc.mgmt.nfp2lasta) -#define narenas_currently_allocated (_PyRuntime.obmalloc.mgmt.narenas_currently_allocated) -#define ntimes_arena_allocated (_PyRuntime.obmalloc.mgmt.ntimes_arena_allocated) -#define narenas_highwater (_PyRuntime.obmalloc.mgmt.narenas_highwater) -#define raw_allocated_blocks (_PyRuntime.obmalloc.mgmt.raw_allocated_blocks) +static inline int +has_own_state(PyInterpreterState *interp) +{ + return (_Py_IsMainInterpreter(interp) || + !(interp->feature_flags & Py_RTFLAGS_USE_MAIN_OBMALLOC) || + _Py_IsMainInterpreterFinalizing(interp)); +} + +static inline OMState * +get_state(void) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (!has_own_state(interp)) { + interp = _PyInterpreterState_Main(); + } + return &interp->obmalloc; +} + +// These macros all rely on a local "state" variable. +#define usedpools (state->pools.used) +#define allarenas (state->mgmt.arenas) +#define maxarenas (state->mgmt.maxarenas) +#define unused_arena_objects (state->mgmt.unused_arena_objects) +#define usable_arenas (state->mgmt.usable_arenas) +#define nfp2lasta (state->mgmt.nfp2lasta) +#define narenas_currently_allocated (state->mgmt.narenas_currently_allocated) +#define ntimes_arena_allocated (state->mgmt.ntimes_arena_allocated) +#define narenas_highwater (state->mgmt.narenas_highwater) +#define raw_allocated_blocks (state->mgmt.raw_allocated_blocks) Py_ssize_t -_Py_GetAllocatedBlocks(void) +_PyInterpreterState_GetAllocatedBlocks(PyInterpreterState *interp) { +#ifdef Py_DEBUG + assert(has_own_state(interp)); +#else + if (!has_own_state(interp)) { + _Py_FatalErrorFunc(__func__, + "the interpreter doesn't have its own allocator"); + } +#endif + OMState *state = &interp->obmalloc; + Py_ssize_t n = raw_allocated_blocks; /* add up allocated blocks for used pools */ for (uint i = 0; i < maxarenas; ++i) { @@ -759,20 +790,100 @@ _Py_GetAllocatedBlocks(void) return n; } +void +_PyInterpreterState_FinalizeAllocatedBlocks(PyInterpreterState *interp) +{ + if (has_own_state(interp)) { + Py_ssize_t leaked = _PyInterpreterState_GetAllocatedBlocks(interp); + assert(has_own_state(interp) || leaked == 0); + interp->runtime->obmalloc.interpreter_leaks += leaked; + } +} + +static Py_ssize_t get_num_global_allocated_blocks(_PyRuntimeState *); + +/* We preserve the number of blockss leaked during runtime finalization, + so they can be reported if the runtime is initialized again. */ +// XXX We don't lose any information by dropping this, +// so we should consider doing so. +static Py_ssize_t last_final_leaks = 0; + +void +_Py_FinalizeAllocatedBlocks(_PyRuntimeState *runtime) +{ + last_final_leaks = get_num_global_allocated_blocks(runtime); + runtime->obmalloc.interpreter_leaks = 0; +} + +static Py_ssize_t +get_num_global_allocated_blocks(_PyRuntimeState *runtime) +{ + Py_ssize_t total = 0; + if (_PyRuntimeState_GetFinalizing(runtime) != NULL) { + PyInterpreterState *interp = _PyInterpreterState_Main(); + if (interp == NULL) { + /* We are at the very end of runtime finalization. + We can't rely on finalizing->interp since that thread + state is probably already freed, so we don't worry + about it. */ + assert(PyInterpreterState_Head() == NULL); + } + else { + assert(interp != NULL); + /* It is probably the last interpreter but not necessarily. */ + assert(PyInterpreterState_Next(interp) == NULL); + total += _PyInterpreterState_GetAllocatedBlocks(interp); + } + } + else { + HEAD_LOCK(runtime); + PyInterpreterState *interp = PyInterpreterState_Head(); + assert(interp != NULL); +#ifdef Py_DEBUG + int got_main = 0; +#endif + for (; interp != NULL; interp = PyInterpreterState_Next(interp)) { +#ifdef Py_DEBUG + if (_Py_IsMainInterpreter(interp)) { + assert(!got_main); + got_main = 1; + assert(has_own_state(interp)); + } +#endif + if (has_own_state(interp)) { + total += _PyInterpreterState_GetAllocatedBlocks(interp); + } + } + HEAD_UNLOCK(runtime); +#ifdef Py_DEBUG + assert(got_main); +#endif + } + total += runtime->obmalloc.interpreter_leaks; + total += last_final_leaks; + return total; +} + +Py_ssize_t +_Py_GetGlobalAllocatedBlocks(void) +{ + return get_num_global_allocated_blocks(&_PyRuntime); +} + #if WITH_PYMALLOC_RADIX_TREE /*==========================================================================*/ /* radix tree for tracking arena usage. */ -#define arena_map_root (_PyRuntime.obmalloc.usage.arena_map_root) +#define arena_map_root (state->usage.arena_map_root) #ifdef USE_INTERIOR_NODES -#define arena_map_mid_count (_PyRuntime.obmalloc.usage.arena_map_mid_count) -#define arena_map_bot_count (_PyRuntime.obmalloc.usage.arena_map_bot_count) +#define arena_map_mid_count (state->usage.arena_map_mid_count) +#define arena_map_bot_count (state->usage.arena_map_bot_count) #endif /* Return a pointer to a bottom tree node, return NULL if it doesn't exist or * it cannot be created */ static Py_ALWAYS_INLINE arena_map_bot_t * -arena_map_get(pymem_block *p, int create) +arena_map_get(OMState *state, pymem_block *p, int create) { #ifdef USE_INTERIOR_NODES /* sanity check that IGNORE_BITS is correct */ @@ -833,11 +944,12 @@ arena_map_get(pymem_block *p, int create) /* mark or unmark addresses covered by arena */ static int -arena_map_mark_used(uintptr_t arena_base, int is_used) +arena_map_mark_used(OMState *state, uintptr_t arena_base, int is_used) { /* sanity check that IGNORE_BITS is correct */ assert(HIGH_BITS(arena_base) == HIGH_BITS(&arena_map_root)); - arena_map_bot_t *n_hi = arena_map_get((pymem_block *)arena_base, is_used); + arena_map_bot_t *n_hi = arena_map_get( + state, (pymem_block *)arena_base, is_used); if (n_hi == NULL) { assert(is_used); /* otherwise node should already exist */ return 0; /* failed to allocate space for node */ @@ -862,7 +974,8 @@ arena_map_mark_used(uintptr_t arena_base, int is_used) * must overflow to 0. However, that would mean arena_base was * "ideal" and we should not be in this case. */ assert(arena_base < arena_base_next); - arena_map_bot_t *n_lo = arena_map_get((pymem_block *)arena_base_next, is_used); + arena_map_bot_t *n_lo = arena_map_get( + state, (pymem_block *)arena_base_next, is_used); if (n_lo == NULL) { assert(is_used); /* otherwise should already exist */ n_hi->arenas[i3].tail_hi = 0; @@ -877,9 +990,9 @@ arena_map_mark_used(uintptr_t arena_base, int is_used) /* Return true if 'p' is a pointer inside an obmalloc arena. * _PyObject_Free() calls this so it needs to be very fast. */ static int -arena_map_is_used(pymem_block *p) +arena_map_is_used(OMState *state, pymem_block *p) { - arena_map_bot_t *n = arena_map_get(p, 0); + arena_map_bot_t *n = arena_map_get(state, p, 0); if (n == NULL) { return 0; } @@ -902,7 +1015,7 @@ arena_map_is_used(pymem_block *p) * `usable_arenas` to the return value. */ static struct arena_object* -new_arena(void) +new_arena(OMState *state) { struct arena_object* arenaobj; uint excess; /* number of bytes above pool alignment */ @@ -968,7 +1081,7 @@ new_arena(void) address = _PyObject_Arena.alloc(_PyObject_Arena.ctx, ARENA_SIZE); #if WITH_PYMALLOC_RADIX_TREE if (address != NULL) { - if (!arena_map_mark_used((uintptr_t)address, 1)) { + if (!arena_map_mark_used(state, (uintptr_t)address, 1)) { /* marking arena in radix tree failed, abort */ _PyObject_Arena.free(_PyObject_Arena.ctx, address, ARENA_SIZE); address = NULL; @@ -1011,9 +1124,9 @@ new_arena(void) pymalloc. When the radix tree is used, 'poolp' is unused. */ static bool -address_in_range(void *p, poolp Py_UNUSED(pool)) +address_in_range(OMState *state, void *p, poolp Py_UNUSED(pool)) { - return arena_map_is_used(p); + return arena_map_is_used(state, p); } #else /* @@ -1094,7 +1207,7 @@ extremely desirable that it be this fast. static bool _Py_NO_SANITIZE_ADDRESS _Py_NO_SANITIZE_THREAD _Py_NO_SANITIZE_MEMORY -address_in_range(void *p, poolp pool) +address_in_range(OMState *state, void *p, poolp pool) { // Since address_in_range may be reading from memory which was not allocated // by Python, it is important that pool->arenaindex is read only once, as @@ -1111,8 +1224,6 @@ address_in_range(void *p, poolp pool) /*==========================================================================*/ -#define usedpools (_PyRuntime.obmalloc.pools.used) - // Called when freelist is exhausted. Extend the freelist if there is // space for a block. Otherwise, remove this pool from usedpools. static void @@ -1138,7 +1249,7 @@ pymalloc_pool_extend(poolp pool, uint size) * This function takes new pool and allocate a block from it. */ static void* -allocate_from_new_pool(uint size) +allocate_from_new_pool(OMState *state, uint size) { /* There isn't a pool of the right size class immediately * available: use a free pool. @@ -1150,7 +1261,7 @@ allocate_from_new_pool(uint size) return NULL; } #endif - usable_arenas = new_arena(); + usable_arenas = new_arena(state); if (usable_arenas == NULL) { return NULL; } @@ -1274,7 +1385,7 @@ allocate_from_new_pool(uint size) or when the max memory limit has been reached. */ static inline void* -pymalloc_alloc(void *Py_UNUSED(ctx), size_t nbytes) +pymalloc_alloc(OMState *state, void *Py_UNUSED(ctx), size_t nbytes) { #ifdef WITH_VALGRIND if (UNLIKELY(running_on_valgrind == -1)) { @@ -1314,7 +1425,7 @@ pymalloc_alloc(void *Py_UNUSED(ctx), size_t nbytes) /* There isn't a pool of the right size class immediately * available: use a free pool. */ - bp = allocate_from_new_pool(size); + bp = allocate_from_new_pool(state, size); } return (void *)bp; @@ -1324,7 +1435,8 @@ pymalloc_alloc(void *Py_UNUSED(ctx), size_t nbytes) void * _PyObject_Malloc(void *ctx, size_t nbytes) { - void* ptr = pymalloc_alloc(ctx, nbytes); + OMState *state = get_state(); + void* ptr = pymalloc_alloc(state, ctx, nbytes); if (LIKELY(ptr != NULL)) { return ptr; } @@ -1343,7 +1455,8 @@ _PyObject_Calloc(void *ctx, size_t nelem, size_t elsize) assert(elsize == 0 || nelem <= (size_t)PY_SSIZE_T_MAX / elsize); size_t nbytes = nelem * elsize; - void* ptr = pymalloc_alloc(ctx, nbytes); + OMState *state = get_state(); + void* ptr = pymalloc_alloc(state, ctx, nbytes); if (LIKELY(ptr != NULL)) { memset(ptr, 0, nbytes); return ptr; @@ -1358,7 +1471,7 @@ _PyObject_Calloc(void *ctx, size_t nelem, size_t elsize) static void -insert_to_usedpool(poolp pool) +insert_to_usedpool(OMState *state, poolp pool) { assert(pool->ref.count > 0); /* else the pool is empty */ @@ -1374,7 +1487,7 @@ insert_to_usedpool(poolp pool) } static void -insert_to_freepool(poolp pool) +insert_to_freepool(OMState *state, poolp pool) { poolp next = pool->nextpool; poolp prev = pool->prevpool; @@ -1457,7 +1570,7 @@ insert_to_freepool(poolp pool) #if WITH_PYMALLOC_RADIX_TREE /* mark arena region as not under control of obmalloc */ - arena_map_mark_used(ao->address, 0); + arena_map_mark_used(state, ao->address, 0); #endif /* Free the entire arena. */ @@ -1544,7 +1657,7 @@ insert_to_freepool(poolp pool) Return 1 if it was freed. Return 0 if the block was not allocated by pymalloc_alloc(). */ static inline int -pymalloc_free(void *Py_UNUSED(ctx), void *p) +pymalloc_free(OMState *state, void *Py_UNUSED(ctx), void *p) { assert(p != NULL); @@ -1555,7 +1668,7 @@ pymalloc_free(void *Py_UNUSED(ctx), void *p) #endif poolp pool = POOL_ADDR(p); - if (UNLIKELY(!address_in_range(p, pool))) { + if (UNLIKELY(!address_in_range(state, p, pool))) { return 0; } /* We allocated this address. */ @@ -1579,7 +1692,7 @@ pymalloc_free(void *Py_UNUSED(ctx), void *p) * targets optimal filling when several pools contain * blocks of the same size class. */ - insert_to_usedpool(pool); + insert_to_usedpool(state, pool); return 1; } @@ -1596,7 +1709,7 @@ pymalloc_free(void *Py_UNUSED(ctx), void *p) * previously freed pools will be allocated later * (being not referenced, they are perhaps paged out). */ - insert_to_freepool(pool); + insert_to_freepool(state, pool); return 1; } @@ -1609,7 +1722,8 @@ _PyObject_Free(void *ctx, void *p) return; } - if (UNLIKELY(!pymalloc_free(ctx, p))) { + OMState *state = get_state(); + if (UNLIKELY(!pymalloc_free(state, ctx, p))) { /* pymalloc didn't allocate this address */ PyMem_RawFree(p); raw_allocated_blocks--; @@ -1627,7 +1741,8 @@ _PyObject_Free(void *ctx, void *p) Return 0 if pymalloc didn't allocated p. */ static int -pymalloc_realloc(void *ctx, void **newptr_p, void *p, size_t nbytes) +pymalloc_realloc(OMState *state, void *ctx, + void **newptr_p, void *p, size_t nbytes) { void *bp; poolp pool; @@ -1643,7 +1758,7 @@ pymalloc_realloc(void *ctx, void **newptr_p, void *p, size_t nbytes) #endif pool = POOL_ADDR(p); - if (!address_in_range(p, pool)) { + if (!address_in_range(state, p, pool)) { /* pymalloc is not managing this block. If nbytes <= SMALL_REQUEST_THRESHOLD, it's tempting to try to take @@ -1696,7 +1811,8 @@ _PyObject_Realloc(void *ctx, void *ptr, size_t nbytes) return _PyObject_Malloc(ctx, nbytes); } - if (pymalloc_realloc(ctx, &ptr2, ptr, nbytes)) { + OMState *state = get_state(); + if (pymalloc_realloc(state, ctx, &ptr2, ptr, nbytes)) { return ptr2; } @@ -1710,11 +1826,29 @@ _PyObject_Realloc(void *ctx, void *ptr, size_t nbytes) * only be used by extensions that are compiled with pymalloc enabled. */ Py_ssize_t -_Py_GetAllocatedBlocks(void) +_PyInterpreterState_GetAllocatedBlocks(PyInterpreterState *Py_UNUSED(interp)) +{ + return 0; +} + +Py_ssize_t +_Py_GetGlobalAllocatedBlocks(void) { return 0; } +void +_PyInterpreterState_FinalizeAllocatedBlocks(PyInterpreterState *Py_UNUSED(interp)) +{ + return; +} + +void +_Py_FinalizeAllocatedBlocks(_PyRuntimeState *Py_UNUSED(runtime)) +{ + return; +} + #endif /* WITH_PYMALLOC */ @@ -2289,6 +2423,7 @@ _PyObject_DebugMallocStats(FILE *out) if (!_PyMem_PymallocEnabled()) { return 0; } + OMState *state = get_state(); uint i; const uint numclasses = SMALL_REQUEST_THRESHOLD >> ALIGNMENT_SHIFT; diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c index b4d0bbf32c84c8..beb86b9623bdbc 100644 --- a/Objects/rangeobject.c +++ b/Objects/rangeobject.c @@ -33,7 +33,7 @@ validate_step(PyObject *step) return PyLong_FromLong(1); step = PyNumber_Index(step); - if (step && _PyLong_Sign(step) == 0) { + if (step && _PyLong_IsZero((PyLongObject *)step)) { PyErr_SetString(PyExc_ValueError, "range() arg 3 must not be zero"); Py_CLEAR(step); diff --git a/Objects/setobject.c b/Objects/setobject.c index fcdda2a0bca2b6..58f0ae73c0c403 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -2543,6 +2543,7 @@ static PyTypeObject _PySetDummy_Type = { }; static PyObject _dummy_struct = { - _PyObject_EXTRA_INIT - 2, &_PySetDummy_Type + _PyObject_EXTRA_INIT + { _Py_IMMORTAL_REFCNT }, + &_PySetDummy_Type }; diff --git a/Objects/sliceobject.c b/Objects/sliceobject.c index 5d2e6ad522bcf2..e6776ac92b669c 100644 --- a/Objects/sliceobject.c +++ b/Objects/sliceobject.c @@ -29,6 +29,16 @@ ellipsis_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) return Py_NewRef(Py_Ellipsis); } +static void +ellipsis_dealloc(PyObject *ellipsis) +{ + /* This should never get called, but we also don't want to SEGV if + * we accidentally decref Ellipsis out of existence. Instead, + * since Ellipsis is an immortal object, re-set the reference count. + */ + _Py_SetImmortal(ellipsis); +} + static PyObject * ellipsis_repr(PyObject *op) { @@ -51,7 +61,7 @@ PyTypeObject PyEllipsis_Type = { "ellipsis", /* tp_name */ 0, /* tp_basicsize */ 0, /* tp_itemsize */ - 0, /*never called*/ /* tp_dealloc */ + ellipsis_dealloc, /* tp_dealloc */ 0, /* tp_vectorcall_offset */ 0, /* tp_getattr */ 0, /* tp_setattr */ @@ -89,7 +99,8 @@ PyTypeObject PyEllipsis_Type = { PyObject _Py_EllipsisObject = { _PyObject_EXTRA_INIT - 1, &PyEllipsis_Type + { _Py_IMMORTAL_REFCNT }, + &PyEllipsis_Type }; @@ -445,7 +456,7 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, if (start == NULL) goto error; - if (_PyLong_Sign(start) < 0) { + if (_PyLong_IsNegative((PyLongObject *)start)) { /* start += length */ PyObject *tmp = PyNumber_Add(start, length); Py_SETREF(start, tmp); @@ -478,7 +489,7 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, if (stop == NULL) goto error; - if (_PyLong_Sign(stop) < 0) { + if (_PyLong_IsNegative((PyLongObject *)stop)) { /* stop += length */ PyObject *tmp = PyNumber_Add(stop, length); Py_SETREF(stop, tmp); @@ -533,7 +544,7 @@ slice_indices(PySliceObject* self, PyObject* len) if (length == NULL) return NULL; - if (_PyLong_Sign(length) < 0) { + if (_PyLong_IsNegative((PyLongObject *)length)) { PyErr_SetString(PyExc_ValueError, "length should not be negative"); Py_DECREF(length); diff --git a/Objects/structseq.c b/Objects/structseq.c index c20962ecd82563..d8f55dc1eae5ed 100644 --- a/Objects/structseq.c +++ b/Objects/structseq.c @@ -31,6 +31,7 @@ get_type_attr_as_size(PyTypeObject *tp, PyObject *name) PyErr_Format(PyExc_TypeError, "Missed attribute '%U' of type %s", name, tp->tp_name); + return -1; } return PyLong_AsSsize_t(v); } @@ -432,12 +433,10 @@ initialize_structseq_dict(PyStructSequence_Desc *desc, PyObject* dict, static PyMemberDef * initialize_members(PyStructSequence_Desc *desc, - Py_ssize_t *pn_members, Py_ssize_t *pn_unnamed_members) + Py_ssize_t n_members, Py_ssize_t n_unnamed_members) { PyMemberDef *members; - Py_ssize_t n_members, n_unnamed_members; - n_members = count_members(desc, &n_unnamed_members); members = PyMem_NEW(PyMemberDef, n_members - n_unnamed_members + 1); if (members == NULL) { PyErr_NoMemory(); @@ -462,8 +461,6 @@ initialize_members(PyStructSequence_Desc *desc, } members[k].name = NULL; - *pn_members = n_members; - *pn_unnamed_members = n_unnamed_members; return members; } @@ -509,26 +506,59 @@ _PyStructSequence_InitBuiltinWithFlags(PyTypeObject *type, PyStructSequence_Desc *desc, unsigned long tp_flags) { - PyMemberDef *members; - Py_ssize_t n_members, n_unnamed_members; + Py_ssize_t n_unnamed_members; + Py_ssize_t n_members = count_members(desc, &n_unnamed_members); + PyMemberDef *members = NULL; + + int initialized = 1; + if ((type->tp_flags & Py_TPFLAGS_READY) == 0) { + assert(type->tp_name == NULL); + assert(type->tp_members == NULL); + assert(type->tp_base == NULL); + + members = initialize_members(desc, n_members, n_unnamed_members); + if (members == NULL) { + goto error; + } + initialize_static_fields(type, desc, members, tp_flags); - members = initialize_members(desc, &n_members, &n_unnamed_members); - if (members == NULL) { - return -1; + _Py_SetImmortal(type); + initialized = 0; + } +#ifndef NDEBUG + else { + // Ensure that the type was initialized. + assert(type->tp_name != NULL); + assert(type->tp_members != NULL); + assert(type->tp_base == &PyTuple_Type); + assert((type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); + assert(_Py_IsImmortal(type)); } - initialize_static_fields(type, desc, members, tp_flags); +#endif + if (_PyStaticType_InitBuiltin(type) < 0) { - PyMem_Free(members); PyErr_Format(PyExc_RuntimeError, "Can't initialize builtin type %s", desc->name); - return -1; + goto error; } - if (initialize_static_type(type, desc, n_members, n_unnamed_members) < 0) { - PyMem_Free(members); - return -1; + // This should be dropped if tp_dict is made per-interpreter. + if (initialized) { + return 0; } + + if (initialize_structseq_dict( + desc, type->tp_dict, n_members, n_unnamed_members) < 0) { + goto error; + } + return 0; + +error: + if (members != NULL) { + PyMem_Free(members); + } + return -1; } int @@ -551,7 +581,8 @@ PyStructSequence_InitType2(PyTypeObject *type, PyStructSequence_Desc *desc) return -1; } - members = initialize_members(desc, &n_members, &n_unnamed_members); + n_members = count_members(desc, &n_unnamed_members); + members = initialize_members(desc, n_members, n_unnamed_members); if (members == NULL) { return -1; } @@ -570,35 +601,32 @@ PyStructSequence_InitType(PyTypeObject *type, PyStructSequence_Desc *desc) } +/* This is exposed in the internal API, not the public API. + It is only called on builtin static types, which are all + initialized via _PyStructSequence_InitBuiltinWithFlags(). */ + void -_PyStructSequence_FiniType(PyTypeObject *type) +_PyStructSequence_FiniBuiltin(PyTypeObject *type) { // Ensure that the type is initialized assert(type->tp_name != NULL); assert(type->tp_base == &PyTuple_Type); + assert((type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); + assert(_Py_IsImmortal(type)); // Cannot delete a type if it still has subclasses if (_PyType_HasSubclasses(type)) { + // XXX Shouldn't this be an error? return; } - // Undo PyStructSequence_NewType() - type->tp_name = NULL; - PyMem_Free(type->tp_members); - _PyStaticType_Dealloc(type); - assert(Py_REFCNT(type) == 1); - // Undo Py_INCREF(type) of _PyStructSequence_InitType(). - // Don't use Py_DECREF(): static type must not be deallocated - Py_SET_REFCNT(type, 0); -#ifdef Py_REF_DEBUG - _Py_DecRefTotal(); -#endif - // Make sure that _PyStructSequence_InitType() will initialize - // the type again - assert(Py_REFCNT(type) == 0); - assert(type->tp_name == NULL); + // Undo _PyStructSequence_InitBuiltinWithFlags(). + type->tp_name = NULL; + PyMem_Free(type->tp_members); + type->tp_members = NULL; + type->tp_base = NULL; } @@ -612,7 +640,8 @@ _PyStructSequence_NewType(PyStructSequence_Desc *desc, unsigned long tp_flags) Py_ssize_t n_members, n_unnamed_members; /* Initialize MemberDefs */ - members = initialize_members(desc, &n_members, &n_unnamed_members); + n_members = count_members(desc, &n_unnamed_members); + members = initialize_members(desc, n_members, n_unnamed_members); if (members == NULL) { return NULL; } diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 59c0251639d3dd..991edcc86677de 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -944,7 +944,7 @@ _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize) if (sv == NULL) { *pv = NULL; #ifdef Py_REF_DEBUG - _Py_DecRefTotal(); + _Py_DecRefTotal(_PyInterpreterState_GET()); #endif PyObject_GC_Del(v); return -1; @@ -960,24 +960,6 @@ _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize) } -PyStatus -_PyTuple_InitTypes(PyInterpreterState *interp) -{ - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - - if (PyType_Ready(&PyTuple_Type) < 0) { - return _PyStatus_ERR("Can't initialize tuple type"); - } - - if (PyType_Ready(&PyTupleIter_Type) < 0) { - return _PyStatus_ERR("Can't initialize tuple iterator type"); - } - - return _PyStatus_OK(); -} - static void maybe_freelist_clear(PyInterpreterState *, int); void diff --git a/Objects/typeobject.c b/Objects/typeobject.c index f0654c239f6635..060d14e254ab2d 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -8,6 +8,7 @@ #include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_moduleobject.h" // _PyModule_GetDef() #include "pycore_object.h" // _PyType_HasFeature() +#include "pycore_long.h" // _PyLong_IsNegative() #include "pycore_pyerrors.h" // _PyErr_Occurred() #include "pycore_pystate.h" // _PyThreadState_GET() #include "pycore_typeobject.h" // struct type_cache @@ -44,7 +45,9 @@ class object "PyObject *" "&PyBaseObject_Type" PyUnicode_IS_READY(name) && \ (PyUnicode_GET_LENGTH(name) <= MCACHE_MAX_ATTR_SIZE) -#define next_version_tag (_PyRuntime.types.next_version_tag) +#define NEXT_GLOBAL_VERSION_TAG _PyRuntime.types.next_version_tag +#define NEXT_VERSION_TAG(interp) \ + (interp)->types.next_version_tag typedef struct PySlot_Offset { short subslot_offset; @@ -317,7 +320,7 @@ _PyType_InitCache(PyInterpreterState *interp) entry->version = 0; // Set to None so _PyType_Lookup() can use Py_SETREF(), // rather than using slower Py_XSETREF(). - entry->name = Py_NewRef(Py_None); + entry->name = Py_None; entry->value = NULL; } } @@ -331,7 +334,7 @@ _PyType_ClearCache(PyInterpreterState *interp) // use Py_SETREF() rather than using slower Py_XSETREF(). type_cache_clear(cache, Py_None); - return next_version_tag - 1; + return NEXT_VERSION_TAG(interp) - 1; } @@ -400,7 +403,7 @@ PyType_ClearWatcher(int watcher_id) return 0; } -static int assign_version_tag(PyTypeObject *type); +static int assign_version_tag(PyInterpreterState *interp, PyTypeObject *type); int PyType_Watch(int watcher_id, PyObject* obj) @@ -415,7 +418,7 @@ PyType_Watch(int watcher_id, PyObject* obj) return -1; } // ensure we will get a callback on the next modification - assign_version_tag(type); + assign_version_tag(interp, type); type->tp_watched |= (1 << watcher_id); return 0; } @@ -493,6 +496,11 @@ PyType_Modified(PyTypeObject *type) type->tp_flags &= ~Py_TPFLAGS_VALID_VERSION_TAG; type->tp_version_tag = 0; /* 0 is not a valid version tag */ + if (PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) { + // This field *must* be invalidated if the type is modified (see the + // comment on struct _specialization_cache): + ((PyHeapTypeObject *)type)->_spec_cache.getitem = NULL; + } } static void @@ -543,13 +551,20 @@ type_mro_modified(PyTypeObject *type, PyObject *bases) { } } return; + clear: + assert(!(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); type->tp_flags &= ~Py_TPFLAGS_VALID_VERSION_TAG; type->tp_version_tag = 0; /* 0 is not a valid version tag */ + if (PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) { + // This field *must* be invalidated if the type is modified (see the + // comment on struct _specialization_cache): + ((PyHeapTypeObject *)type)->_spec_cache.getitem = NULL; + } } static int -assign_version_tag(PyTypeObject *type) +assign_version_tag(PyInterpreterState *interp, PyTypeObject *type) { /* Ensure that the tp_version_tag is valid and set Py_TPFLAGS_VALID_VERSION_TAG. To respect the invariant, this @@ -563,24 +578,42 @@ assign_version_tag(PyTypeObject *type) return 0; } - if (next_version_tag == 0) { - /* We have run out of version numbers */ - return 0; + if (type->tp_flags & Py_TPFLAGS_IMMUTABLETYPE) { + /* static types */ + if (NEXT_GLOBAL_VERSION_TAG > _Py_MAX_GLOBAL_TYPE_VERSION_TAG) { + /* We have run out of version numbers */ + return 0; + } + type->tp_version_tag = NEXT_GLOBAL_VERSION_TAG++; + assert (type->tp_version_tag <= _Py_MAX_GLOBAL_TYPE_VERSION_TAG); + } + else { + /* heap types */ + if (NEXT_VERSION_TAG(interp) == 0) { + /* We have run out of version numbers */ + return 0; + } + type->tp_version_tag = NEXT_VERSION_TAG(interp)++; + assert (type->tp_version_tag != 0); } - type->tp_version_tag = next_version_tag++; - assert (type->tp_version_tag != 0); PyObject *bases = type->tp_bases; Py_ssize_t n = PyTuple_GET_SIZE(bases); for (Py_ssize_t i = 0; i < n; i++) { PyObject *b = PyTuple_GET_ITEM(bases, i); - if (!assign_version_tag(_PyType_CAST(b))) + if (!assign_version_tag(interp, _PyType_CAST(b))) return 0; } type->tp_flags |= Py_TPFLAGS_VALID_VERSION_TAG; return 1; } +int PyUnstable_Type_AssignVersionTag(PyTypeObject *type) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + return assign_version_tag(interp, type); +} + static PyMemberDef type_members[] = { {"__basicsize__", T_PYSSIZET, offsetof(PyTypeObject,tp_basicsize),READONLY}, @@ -2330,7 +2363,15 @@ mro_internal(PyTypeObject *type, PyObject **p_old_mro) from the custom MRO */ type_mro_modified(type, type->tp_bases); - PyType_Modified(type); + // XXX Expand this to Py_TPFLAGS_IMMUTABLETYPE? + if (!(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)) { + PyType_Modified(type); + } + else { + /* For static builtin types, this is only called during init + before the method cache has been populated. */ + assert(_PyType_HasFeature(type, Py_TPFLAGS_VALID_VERSION_TAG)); + } if (p_old_mro != NULL) *p_old_mro = old_mro; /* transfer the ownership */ @@ -4165,6 +4206,7 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) { PyObject *res; int error; + PyInterpreterState *interp = _PyInterpreterState_GET(); unsigned int h = MCACHE_HASH_METHOD(type, name); struct type_cache *cache = get_type_cache(); @@ -4199,7 +4241,7 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) return NULL; } - if (MCACHE_CACHEABLE_NAME(name) && assign_version_tag(type)) { + if (MCACHE_CACHEABLE_NAME(name) && assign_version_tag(interp, type)) { h = MCACHE_HASH_METHOD(type, name); struct type_cache_entry *entry = &cache->hashtable[h]; entry->version = type->tp_version_tag; @@ -4317,7 +4359,7 @@ _Py_type_getattro_impl(PyTypeObject *type, PyObject *name, int * suppress_missin /* Give up */ if (suppress_missing_attribute == NULL) { PyErr_Format(PyExc_AttributeError, - "type object '%.50s' has no attribute '%U'", + "type object '%.100s' has no attribute '%U'", type->tp_name, name); } else { // signal the caller we have not set an PyExc_AttributeError and gave up @@ -6660,8 +6702,10 @@ type_ready_mro(PyTypeObject *type) assert(type->tp_mro != NULL); assert(PyTuple_Check(type->tp_mro)); - /* All bases of statically allocated type should be statically allocated */ + /* All bases of statically allocated type should be statically allocated, + and static builtin types must have static builtin bases. */ if (!(type->tp_flags & Py_TPFLAGS_HEAPTYPE)) { + assert(type->tp_flags & Py_TPFLAGS_IMMUTABLETYPE); PyObject *mro = type->tp_mro; Py_ssize_t n = PyTuple_GET_SIZE(mro); for (Py_ssize_t i = 0; i < n; i++) { @@ -6673,6 +6717,8 @@ type_ready_mro(PyTypeObject *type) type->tp_name, base->tp_name); return -1; } + assert(!(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) || + (base->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); } } return 0; @@ -6902,8 +6948,12 @@ type_ready_post_checks(PyTypeObject *type) static int type_ready(PyTypeObject *type) { + _PyObject_ASSERT((PyObject *)type, + (type->tp_flags & Py_TPFLAGS_READYING) == 0); + type->tp_flags |= Py_TPFLAGS_READYING; + if (type_ready_pre_checks(type) < 0) { - return -1; + goto error; } #ifdef Py_TRACE_REFS @@ -6917,41 +6967,49 @@ type_ready(PyTypeObject *type) /* Initialize tp_dict: _PyType_IsReady() tests if tp_dict != NULL */ if (type_ready_set_dict(type) < 0) { - return -1; + goto error; } if (type_ready_set_bases(type) < 0) { - return -1; + goto error; } if (type_ready_mro(type) < 0) { - return -1; + goto error; } if (type_ready_set_new(type) < 0) { - return -1; + goto error; } if (type_ready_fill_dict(type) < 0) { - return -1; + goto error; } if (type_ready_inherit(type) < 0) { - return -1; + goto error; } if (type_ready_preheader(type) < 0) { - return -1; + goto error; } if (type_ready_set_hash(type) < 0) { - return -1; + goto error; } if (type_ready_add_subclasses(type) < 0) { - return -1; + goto error; } if (type_ready_managed_dict(type) < 0) { - return -1; + goto error; } if (type_ready_post_checks(type) < 0) { - return -1; + goto error; } + + /* All done -- set the ready flag */ + type->tp_flags = (type->tp_flags & ~Py_TPFLAGS_READYING) | Py_TPFLAGS_READY; + + assert(_PyType_CheckConsistency(type)); return 0; -} +error: + type->tp_flags &= ~Py_TPFLAGS_READYING; + return -1; +} int PyType_Ready(PyTypeObject *type) @@ -6960,35 +7018,38 @@ PyType_Ready(PyTypeObject *type) assert(_PyType_CheckConsistency(type)); return 0; } - _PyObject_ASSERT((PyObject *)type, - (type->tp_flags & Py_TPFLAGS_READYING) == 0); - - type->tp_flags |= Py_TPFLAGS_READYING; + assert(!(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); /* Historically, all static types were immutable. See bpo-43908 */ if (!(type->tp_flags & Py_TPFLAGS_HEAPTYPE)) { type->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; } - if (type_ready(type) < 0) { - type->tp_flags &= ~Py_TPFLAGS_READYING; - return -1; - } - - /* All done -- set the ready flag */ - type->tp_flags = (type->tp_flags & ~Py_TPFLAGS_READYING) | Py_TPFLAGS_READY; - assert(_PyType_CheckConsistency(type)); - return 0; + return type_ready(type); } int _PyStaticType_InitBuiltin(PyTypeObject *self) { - self->tp_flags = self->tp_flags | _Py_TPFLAGS_STATIC_BUILTIN; + assert(_Py_IsImmortal((PyObject *)self)); + assert(!(self->tp_flags & Py_TPFLAGS_HEAPTYPE)); + + if (self->tp_flags & Py_TPFLAGS_READY) { + assert(self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN); + assert(_PyType_CheckConsistency(self)); + return 0; + } + + self->tp_flags |= _Py_TPFLAGS_STATIC_BUILTIN; + self->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; + + assert(NEXT_GLOBAL_VERSION_TAG <= _Py_MAX_GLOBAL_TYPE_VERSION_TAG); + self->tp_version_tag = NEXT_GLOBAL_VERSION_TAG++; + self->tp_flags |= Py_TPFLAGS_VALID_VERSION_TAG; static_builtin_state_init(self); - int res = PyType_Ready(self); + int res = type_ready(self); if (res < 0) { static_builtin_state_clear(self); } @@ -7849,7 +7910,7 @@ slot_sq_length(PyObject *self) return -1; assert(PyLong_Check(res)); - if (Py_SIZE(res) < 0) { + if (_PyLong_IsNegative((PyLongObject *)res)) { Py_DECREF(res); PyErr_SetString(PyExc_ValueError, "__len__() should return >= 0"); @@ -8246,20 +8307,23 @@ _Py_slot_tp_getattr_hook(PyObject *self, PyObject *name) if (getattribute == NULL || (Py_IS_TYPE(getattribute, &PyWrapperDescr_Type) && ((PyWrapperDescrObject *)getattribute)->d_wrapped == - (void *)PyObject_GenericGetAttr)) - /* finding nothing is reasonable when __getattr__ is defined */ - res = _PyObject_GenericTryGetAttr(self, name); - else { + (void *)PyObject_GenericGetAttr)) { + res = _PyObject_GenericGetAttrWithDict(self, name, NULL, 1); + /* if res == NULL with no exception set, then it must be an + AttributeError suppressed by us. */ + if (res == NULL && !PyErr_Occurred()) { + res = call_attribute(self, getattr, name); + } + } else { Py_INCREF(getattribute); res = call_attribute(self, getattribute, name); Py_DECREF(getattribute); - } - if (res == NULL) { - if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + if (res == NULL && PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Clear(); + res = call_attribute(self, getattr, name); } - res = call_attribute(self, getattr, name); } + Py_DECREF(getattr); return res; } @@ -9113,13 +9177,15 @@ type_new_set_names(PyTypeObject *type) Py_DECREF(set_name); if (res == NULL) { - _PyErr_FormatFromCause(PyExc_RuntimeError, + _PyErr_FormatNote( "Error calling __set_name__ on '%.100s' instance %R " "in '%.100s'", Py_TYPE(value)->tp_name, key, type->tp_name); goto error; } - Py_DECREF(res); + else { + Py_DECREF(res); + } } Py_DECREF(names_to_set); @@ -9331,42 +9397,33 @@ super_repr(PyObject *self) su->type ? su->type->tp_name : "NULL"); } +/* Do a super lookup without executing descriptors or falling back to getattr +on the super object itself. + +May return NULL with or without an exception set, like PyDict_GetItemWithError. */ static PyObject * -super_getattro(PyObject *self, PyObject *name) +_super_lookup_descr(PyTypeObject *su_type, PyTypeObject *su_obj_type, PyObject *name) { - superobject *su = (superobject *)self; - PyTypeObject *starttype; - PyObject *mro; + PyObject *mro, *res; Py_ssize_t i, n; - starttype = su->obj_type; - if (starttype == NULL) - goto skip; - - /* We want __class__ to return the class of the super object - (i.e. super, or a subclass), not the class of su->obj. */ - if (PyUnicode_Check(name) && - PyUnicode_GET_LENGTH(name) == 9 && - _PyUnicode_Equal(name, &_Py_ID(__class__))) - goto skip; - - mro = starttype->tp_mro; + mro = su_obj_type->tp_mro; if (mro == NULL) - goto skip; + return NULL; assert(PyTuple_Check(mro)); n = PyTuple_GET_SIZE(mro); /* No need to check the last one: it's gonna be skipped anyway. */ for (i = 0; i+1 < n; i++) { - if ((PyObject *)(su->type) == PyTuple_GET_ITEM(mro, i)) + if ((PyObject *)(su_type) == PyTuple_GET_ITEM(mro, i)) break; } i++; /* skip su->type (if any) */ if (i >= n) - goto skip; + return NULL; - /* keep a strong reference to mro because starttype->tp_mro can be + /* keep a strong reference to mro because su_obj_type->tp_mro can be replaced during PyDict_GetItemWithError(dict, name) */ Py_INCREF(mro); do { @@ -9374,21 +9431,9 @@ super_getattro(PyObject *self, PyObject *name) PyObject *dict = _PyType_CAST(obj)->tp_dict; assert(dict != NULL && PyDict_Check(dict)); - PyObject *res = PyDict_GetItemWithError(dict, name); + res = PyDict_GetItemWithError(dict, name); if (res != NULL) { Py_INCREF(res); - - descrgetfunc f = Py_TYPE(res)->tp_descr_get; - if (f != NULL) { - PyObject *res2; - res2 = f(res, - /* Only pass 'obj' param if this is instance-mode super - (See SF ID #743627) */ - (su->obj == (PyObject *)starttype) ? NULL : su->obj, - (PyObject *)starttype); - Py_SETREF(res, res2); - } - Py_DECREF(mro); return res; } @@ -9400,9 +9445,75 @@ super_getattro(PyObject *self, PyObject *name) i++; } while (i < n); Py_DECREF(mro); + return NULL; +} + +// if `method` is non-NULL, we are looking for a method descriptor, +// and setting `*method = 1` means we found one. +static PyObject * +do_super_lookup(superobject *su, PyTypeObject *su_type, PyObject *su_obj, + PyTypeObject *su_obj_type, PyObject *name, int *method) +{ + PyObject *res; + int temp_su = 0; + + if (su_obj_type == NULL) { + goto skip; + } + + res = _super_lookup_descr(su_type, su_obj_type, name); + if (res != NULL) { + if (method && _PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)) { + *method = 1; + } + else { + descrgetfunc f = Py_TYPE(res)->tp_descr_get; + if (f != NULL) { + PyObject *res2; + res2 = f(res, + /* Only pass 'obj' param if this is instance-mode super + (See SF ID #743627) */ + (su_obj == (PyObject *)su_obj_type) ? NULL : su_obj, + (PyObject *)su_obj_type); + Py_SETREF(res, res2); + } + } + + return res; + } + else if (PyErr_Occurred()) { + return NULL; + } skip: - return PyObject_GenericGetAttr(self, name); + if (su == NULL) { + PyObject *args[] = {(PyObject *)su_type, su_obj}; + su = (superobject *)PyObject_Vectorcall((PyObject *)&PySuper_Type, args, 2, NULL); + if (su == NULL) { + return NULL; + } + temp_su = 1; + } + res = PyObject_GenericGetAttr((PyObject *)su, name); + if (temp_su) { + Py_DECREF(su); + } + return res; +} + +static PyObject * +super_getattro(PyObject *self, PyObject *name) +{ + superobject *su = (superobject *)self; + + /* We want __class__ to return the class of the super object + (i.e. super, or a subclass), not the class of su->obj. */ + if (PyUnicode_Check(name) && + PyUnicode_GET_LENGTH(name) == 9 && + _PyUnicode_Equal(name, &_Py_ID(__class__))) + return PyObject_GenericGetAttr(self, name); + + return do_super_lookup(su, su->type, su->obj, su->obj_type, name, NULL); } static PyTypeObject * @@ -9458,6 +9569,30 @@ supercheck(PyTypeObject *type, PyObject *obj) return NULL; } +PyObject * +_PySuper_Lookup(PyTypeObject *su_type, PyObject *su_obj, PyObject *name, int *method) +{ + PyTypeObject *su_obj_type = supercheck(su_type, su_obj); + if (su_obj_type == NULL) { + return NULL; + } + PyObject *res = do_super_lookup(NULL, su_type, su_obj, su_obj_type, name, method); + Py_DECREF(su_obj_type); + return res; +} + +PyObject * +_PySuper_LookupDescr(PyTypeObject *su_type, PyObject *su_obj, PyObject *name) +{ + PyTypeObject *su_obj_type = supercheck(su_type, su_obj); + if (su_obj_type == NULL) { + return NULL; + } + PyObject *res = _super_lookup_descr(su_type, su_obj_type, name); + Py_DECREF(su_obj_type); + return res; +} + static PyObject * super_descr_get(PyObject *self, PyObject *obj, PyObject *type) { diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index b9fb53147b9b51..7537c12e92680c 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -228,17 +228,39 @@ static inline PyObject* unicode_new_empty(void) to strings in this dictionary are *not* counted in the string's ob_refcnt. When the interned string reaches a refcnt of 0 the string deallocation function will delete the reference from this dictionary. - Another way to look at this is that to say that the actual reference - count of a string is: s->ob_refcnt + (s->state ? 2 : 0) */ -static inline PyObject *get_interned_dict(void) +static inline PyObject *get_interned_dict(PyInterpreterState *interp) { - return _Py_CACHED_OBJECT(interned_strings); + return _Py_INTERP_CACHED_OBJECT(interp, interned_strings); } -static inline void set_interned_dict(PyObject *dict) +Py_ssize_t +_PyUnicode_InternedSize() +{ + return PyObject_Length(get_interned_dict(_PyInterpreterState_GET())); +} + +static int +init_interned_dict(PyInterpreterState *interp) +{ + assert(get_interned_dict(interp) == NULL); + PyObject *interned = interned = PyDict_New(); + if (interned == NULL) { + return -1; + } + _Py_INTERP_CACHED_OBJECT(interp, interned_strings) = interned; + return 0; +} + +static void +clear_interned_dict(PyInterpreterState *interp) { - _Py_CACHED_OBJECT(interned_strings) = dict; + PyObject *interned = get_interned_dict(interp); + if (interned != NULL) { + PyDict_Clear(interned); + Py_DECREF(interned); + _Py_INTERP_CACHED_OBJECT(interp, interned_strings) = NULL; + } } #define _Py_RETURN_UNICODE_EMPTY() \ @@ -1525,23 +1547,14 @@ unicode_dealloc(PyObject *unicode) _Py_FatalRefcountError("deallocating an Unicode singleton"); } #endif - PyObject *interned = get_interned_dict(); + /* This should never get called, but we also don't want to SEGV if + * we accidentally decref an immortal string out of existence. Since + * the string is an immortal object, just re-set the reference count. + */ if (PyUnicode_CHECK_INTERNED(unicode)) { - /* Revive the dead object temporarily. PyDict_DelItem() removes two - references (key and value) which were ignored by - PyUnicode_InternInPlace(). Use refcnt=3 rather than refcnt=2 - to prevent calling unicode_dealloc() again. Adjust refcnt after - PyDict_DelItem(). */ - assert(Py_REFCNT(unicode) == 0); - Py_SET_REFCNT(unicode, 3); - if (PyDict_DelItem(interned, unicode) != 0) { - _PyErr_WriteUnraisableMsg("deletion of interned string failed", - NULL); - } - assert(Py_REFCNT(unicode) == 1); - Py_SET_REFCNT(unicode, 0); + _Py_SetImmortal(unicode); + return; } - if (_PyUnicode_HAS_UTF8_MEMORY(unicode)) { PyObject_Free(_PyUnicode_UTF8(unicode)); } @@ -14529,34 +14542,29 @@ _PyUnicode_InitState(PyInterpreterState *interp) PyStatus _PyUnicode_InitGlobalObjects(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - // Initialize the global interned dict - PyObject *interned = PyDict_New(); - if (interned == NULL) { + if (init_interned_dict(interp)) { PyErr_Clear(); return _PyStatus_ERR("failed to create interned dict"); } - set_interned_dict(interned); - - /* Intern statically allocated string identifiers and deepfreeze strings. - * This must be done before any module initialization so that statically - * allocated string identifiers are used instead of heap allocated strings. - * Deepfreeze uses the interned identifiers if present to save space - * else generates them and they are interned to speed up dict lookups. - */ - _PyUnicode_InitStaticStrings(); + if (_Py_IsMainInterpreter(interp)) { + /* Intern statically allocated string identifiers and deepfreeze strings. + * This must be done before any module initialization so that statically + * allocated string identifiers are used instead of heap allocated strings. + * Deepfreeze uses the interned identifiers if present to save space + * else generates them and they are interned to speed up dict lookups. + */ + _PyUnicode_InitStaticStrings(interp); #ifdef Py_DEBUG - assert(_PyUnicode_CheckConsistency(&_Py_STR(empty), 1)); + assert(_PyUnicode_CheckConsistency(&_Py_STR(empty), 1)); - for (int i = 0; i < 256; i++) { - assert(_PyUnicode_CheckConsistency(LATIN1(i), 1)); - } + for (int i = 0; i < 256; i++) { + assert(_PyUnicode_CheckConsistency(LATIN1(i), 1)); + } #endif + } return _PyStatus_OK(); } @@ -14565,10 +14573,6 @@ _PyUnicode_InitGlobalObjects(PyInterpreterState *interp) PyStatus _PyUnicode_InitTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - if (_PyStaticType_InitBuiltin(&EncodingMapType) < 0) { goto error; } @@ -14586,7 +14590,7 @@ _PyUnicode_InitTypes(PyInterpreterState *interp) void -PyUnicode_InternInPlace(PyObject **p) +_PyUnicode_InternInPlace(PyInterpreterState *interp, PyObject **p) { PyObject *s = *p; #ifdef Py_DEBUG @@ -14608,7 +14612,7 @@ PyUnicode_InternInPlace(PyObject **p) return; } - PyObject *interned = get_interned_dict(); + PyObject *interned = get_interned_dict(interp); assert(interned != NULL); PyObject *t = PyDict_SetDefault(interned, s, s); @@ -14622,11 +14626,28 @@ PyUnicode_InternInPlace(PyObject **p) return; } - /* The two references in interned dict (key and value) are not counted by - refcnt. unicode_dealloc() and _PyUnicode_ClearInterned() take care of - this. */ - Py_SET_REFCNT(s, Py_REFCNT(s) - 2); - _PyUnicode_STATE(s).interned = 1; + if (_Py_IsImmortal(s)) { + _PyUnicode_STATE(*p).interned = SSTATE_INTERNED_IMMORTAL_STATIC; + return; + } +#ifdef Py_REF_DEBUG + /* The reference count value excluding the 2 references from the + interned dictionary should be excluded from the RefTotal. The + decrements to these objects will not be registered so they + need to be accounted for in here. */ + for (Py_ssize_t i = 0; i < Py_REFCNT(s) - 2; i++) { + _Py_DecRefTotal(_PyInterpreterState_GET()); + } +#endif + _Py_SetImmortal(s); + _PyUnicode_STATE(*p).interned = SSTATE_INTERNED_IMMORTAL; +} + +void +PyUnicode_InternInPlace(PyObject **p) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + _PyUnicode_InternInPlace(interp, p); } // Function kept for the stable ABI. @@ -14653,21 +14674,26 @@ PyUnicode_InternFromString(const char *cp) void _PyUnicode_ClearInterned(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - // interned dict is shared by all interpreters - return; - } - - PyObject *interned = get_interned_dict(); + PyObject *interned = get_interned_dict(interp); if (interned == NULL) { return; } assert(PyDict_CheckExact(interned)); - /* Interned unicode strings are not forcibly deallocated; rather, we give - them their stolen references back, and then clear and DECREF the - interned dict. */ - + /* TODO: + * Currently, the runtime is not able to guarantee that it can exit without + * allocations that carry over to a future initialization of Python within + * the same process. i.e: + * ./python -X showrefcount -c 'import itertools' + * [237 refs, 237 blocks] + * + * Therefore, this should remain disabled for until there is a strict guarantee + * that no memory will be left after `Py_Finalize`. + */ +#ifdef Py_DEBUG + /* For all non-singleton interned strings, restore the two valid references + to that instance from within the intern string dictionary and let the + normal reference counting process clean up these instances. */ #ifdef INTERNED_STATS fprintf(stderr, "releasing %zd interned strings\n", PyDict_GET_SIZE(interned)); @@ -14677,15 +14703,27 @@ _PyUnicode_ClearInterned(PyInterpreterState *interp) Py_ssize_t pos = 0; PyObject *s, *ignored_value; while (PyDict_Next(interned, &pos, &s, &ignored_value)) { - assert(PyUnicode_CHECK_INTERNED(s)); - // Restore the two references (key and value) ignored - // by PyUnicode_InternInPlace(). - Py_SET_REFCNT(s, Py_REFCNT(s) + 2); + assert(PyUnicode_IS_READY(s)); + switch (PyUnicode_CHECK_INTERNED(s)) { + case SSTATE_INTERNED_IMMORTAL: + // Skip the Immortal Instance check and restore + // the two references (key and value) ignored + // by PyUnicode_InternInPlace(). + s->ob_refcnt = 2; #ifdef INTERNED_STATS - total_length += PyUnicode_GET_LENGTH(s); + total_length += PyUnicode_GET_LENGTH(s); #endif - - _PyUnicode_STATE(s).interned = 0; + break; + case SSTATE_INTERNED_IMMORTAL_STATIC: + break; + case SSTATE_INTERNED_MORTAL: + /* fall through */ + case SSTATE_NOT_INTERNED: + /* fall through */ + default: + Py_UNREACHABLE(); + } + _PyUnicode_STATE(s).interned = SSTATE_NOT_INTERNED; } #ifdef INTERNED_STATS fprintf(stderr, @@ -14693,9 +14731,13 @@ _PyUnicode_ClearInterned(PyInterpreterState *interp) total_length); #endif - PyDict_Clear(interned); - Py_DECREF(interned); - set_interned_dict(NULL); + struct _Py_unicode_state *state = &interp->unicode; + struct _Py_unicode_ids *ids = &state->ids; + for (Py_ssize_t i=0; i < ids->size; i++) { + Py_XINCREF(ids->array[i]); + } +#endif /* Py_DEBUG */ + clear_interned_dict(interp); } @@ -15108,7 +15150,7 @@ _PyUnicode_EnableLegacyWindowsFSEncoding(void) static inline int unicode_is_finalizing(void) { - return (get_interned_dict() == NULL); + return (get_interned_dict(_PyInterpreterState_Main()) == NULL); } #endif @@ -15131,14 +15173,13 @@ _PyUnicode_Fini(PyInterpreterState *interp) { struct _Py_unicode_state *state = &interp->unicode; - if (_Py_IsMainInterpreter(interp)) { - // _PyUnicode_ClearInterned() must be called before _PyUnicode_Fini() - assert(get_interned_dict() == NULL); - // bpo-47182: force a unicodedata CAPI capsule re-import on - // subsequent initialization of main interpreter. - } + // _PyUnicode_ClearInterned() must be called before _PyUnicode_Fini() + assert(get_interned_dict(interp) == NULL); _PyUnicode_FiniEncodings(&state->fs_codec); + + // bpo-47182: force a unicodedata CAPI capsule re-import on + // subsequent initialization of interpreter. interp->unicode.ucnhash_capi = NULL; unicode_clear_identifiers(state); diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c index 5a3e49a6fe45e3..c1afe63ecf66f6 100644 --- a/Objects/weakrefobject.c +++ b/Objects/weakrefobject.c @@ -170,10 +170,7 @@ weakref_repr(PyWeakReference *self) } Py_INCREF(obj); - if (_PyObject_LookupAttr(obj, &_Py_ID(__name__), &name) < 0) { - Py_DECREF(obj); - return NULL; - } + name = _PyObject_LookupSpecial(obj, &_Py_ID(__name__)); if (name == NULL || !PyUnicode_Check(name)) { repr = PyUnicode_FromFormat( "<weakref at %p; to '%s' at %p>", diff --git a/PC/clinic/winreg.c.h b/PC/clinic/winreg.c.h index 7a9474301da8a1..4109c85276f0a4 100644 --- a/PC/clinic/winreg.c.h +++ b/PC/clinic/winreg.c.h @@ -219,14 +219,14 @@ winreg_ConnectRegistry(PyObject *module, PyObject *const *args, Py_ssize_t nargs _PyArg_BadArgument("ConnectRegistry", "argument 1", "str or None", args[0]); goto exit; } - if (!clinic_HKEY_converter(args[1], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[1], &key)) { goto exit; } _return_value = winreg_ConnectRegistry_impl(module, computer_name, key); if (_return_value == NULL) { goto exit; } - return_value = PyHKEY_FromHKEY(_return_value); + return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value); exit: /* Cleanup for computer_name */ @@ -275,7 +275,7 @@ winreg_CreateKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("CreateKey", nargs, 2, 2)) { goto exit; } - if (!clinic_HKEY_converter(args[0], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) { goto exit; } if (args[1] == Py_None) { @@ -295,7 +295,7 @@ winreg_CreateKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (_return_value == NULL) { goto exit; } - return_value = PyHKEY_FromHKEY(_return_value); + return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value); exit: /* Cleanup for sub_key */ @@ -382,7 +382,7 @@ winreg_CreateKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py if (!args) { goto exit; } - if (!clinic_HKEY_converter(args[0], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) { goto exit; } if (args[1] == Py_None) { @@ -419,7 +419,7 @@ winreg_CreateKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py if (_return_value == NULL) { goto exit; } - return_value = PyHKEY_FromHKEY(_return_value); + return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value); exit: /* Cleanup for sub_key */ @@ -466,7 +466,7 @@ winreg_DeleteKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("DeleteKey", nargs, 2, 2)) { goto exit; } - if (!clinic_HKEY_converter(args[0], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) { goto exit; } if (!PyUnicode_Check(args[1])) { @@ -566,7 +566,7 @@ winreg_DeleteKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py if (!args) { goto exit; } - if (!clinic_HKEY_converter(args[0], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) { goto exit; } if (!PyUnicode_Check(args[1])) { @@ -634,7 +634,7 @@ winreg_DeleteValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("DeleteValue", nargs, 2, 2)) { goto exit; } - if (!clinic_HKEY_converter(args[0], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) { goto exit; } if (args[1] == Py_None) { @@ -694,7 +694,7 @@ winreg_EnumKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("EnumKey", nargs, 2, 2)) { goto exit; } - if (!clinic_HKEY_converter(args[0], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) { goto exit; } index = _PyLong_AsInt(args[1]); @@ -751,7 +751,7 @@ winreg_EnumValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("EnumValue", nargs, 2, 2)) { goto exit; } - if (!clinic_HKEY_converter(args[0], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) { goto exit; } index = _PyLong_AsInt(args[1]); @@ -839,7 +839,7 @@ winreg_FlushKey(PyObject *module, PyObject *arg) PyObject *return_value = NULL; HKEY key; - if (!clinic_HKEY_converter(arg, &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), arg, &key)) { goto exit; } return_value = winreg_FlushKey_impl(module, key); @@ -898,7 +898,7 @@ winreg_LoadKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("LoadKey", nargs, 3, 3)) { goto exit; } - if (!clinic_HKEY_converter(args[0], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) { goto exit; } if (!PyUnicode_Check(args[1])) { @@ -999,7 +999,7 @@ winreg_OpenKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje if (!args) { goto exit; } - if (!clinic_HKEY_converter(args[0], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) { goto exit; } if (args[1] == Py_None) { @@ -1036,7 +1036,7 @@ winreg_OpenKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje if (_return_value == NULL) { goto exit; } - return_value = PyHKEY_FromHKEY(_return_value); + return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value); exit: /* Cleanup for sub_key */ @@ -1116,7 +1116,7 @@ winreg_OpenKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb if (!args) { goto exit; } - if (!clinic_HKEY_converter(args[0], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) { goto exit; } if (args[1] == Py_None) { @@ -1153,7 +1153,7 @@ winreg_OpenKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb if (_return_value == NULL) { goto exit; } - return_value = PyHKEY_FromHKEY(_return_value); + return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value); exit: /* Cleanup for sub_key */ @@ -1193,7 +1193,7 @@ winreg_QueryInfoKey(PyObject *module, PyObject *arg) PyObject *return_value = NULL; HKEY key; - if (!clinic_HKEY_converter(arg, &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), arg, &key)) { goto exit; } return_value = winreg_QueryInfoKey_impl(module, key); @@ -1242,7 +1242,7 @@ winreg_QueryValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("QueryValue", nargs, 2, 2)) { goto exit; } - if (!clinic_HKEY_converter(args[0], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) { goto exit; } if (args[1] == Py_None) { @@ -1303,7 +1303,7 @@ winreg_QueryValueEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("QueryValueEx", nargs, 2, 2)) { goto exit; } - if (!clinic_HKEY_converter(args[0], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) { goto exit; } if (args[1] == Py_None) { @@ -1369,7 +1369,7 @@ winreg_SaveKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("SaveKey", nargs, 2, 2)) { goto exit; } - if (!clinic_HKEY_converter(args[0], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) { goto exit; } if (!PyUnicode_Check(args[1])) { @@ -1438,7 +1438,7 @@ winreg_SetValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("SetValue", nargs, 4, 4)) { goto exit; } - if (!clinic_HKEY_converter(args[0], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) { goto exit; } if (args[1] == Py_None) { @@ -1542,7 +1542,7 @@ winreg_SetValueEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("SetValueEx", nargs, 5, 5)) { goto exit; } - if (!clinic_HKEY_converter(args[0], &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) { goto exit; } if (args[1] == Py_None) { @@ -1603,7 +1603,7 @@ winreg_DisableReflectionKey(PyObject *module, PyObject *arg) PyObject *return_value = NULL; HKEY key; - if (!clinic_HKEY_converter(arg, &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), arg, &key)) { goto exit; } return_value = winreg_DisableReflectionKey_impl(module, key); @@ -1641,7 +1641,7 @@ winreg_EnableReflectionKey(PyObject *module, PyObject *arg) PyObject *return_value = NULL; HKEY key; - if (!clinic_HKEY_converter(arg, &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), arg, &key)) { goto exit; } return_value = winreg_EnableReflectionKey_impl(module, key); @@ -1677,7 +1677,7 @@ winreg_QueryReflectionKey(PyObject *module, PyObject *arg) PyObject *return_value = NULL; HKEY key; - if (!clinic_HKEY_converter(arg, &key)) { + if (!clinic_HKEY_converter(_PyModule_GetState(module), arg, &key)) { goto exit; } return_value = winreg_QueryReflectionKey_impl(module, key); @@ -1795,4 +1795,4 @@ winreg_QueryReflectionKey(PyObject *module, PyObject *arg) #ifndef WINREG_QUERYREFLECTIONKEY_METHODDEF #define WINREG_QUERYREFLECTIONKEY_METHODDEF #endif /* !defined(WINREG_QUERYREFLECTIONKEY_METHODDEF) */ -/*[clinic end generated code: output=715db416dc1321ee input=a9049054013a1b77]*/ +/*[clinic end generated code: output=15dc2e6c4d4e2ad5 input=a9049054013a1b77]*/ diff --git a/PC/launcher.c b/PC/launcher.c index 0776e57249c427..dc265533740b67 100644 --- a/PC/launcher.c +++ b/PC/launcher.c @@ -449,7 +449,7 @@ locate_pythons_for_key(HKEY root, REGSAM flags) } static void -locate_store_pythons() +locate_store_pythons(void) { #if defined(_M_X64) /* 64bit process, so look in native registry */ @@ -466,7 +466,7 @@ locate_store_pythons() } static void -locate_venv_python() +locate_venv_python(void) { static wchar_t venv_python[MAX_PATH]; INSTALLED_PYTHON * ip; @@ -495,7 +495,7 @@ locate_venv_python() } static void -locate_all_pythons() +locate_all_pythons(void) { /* venv Python is highest priority */ locate_venv_python(); @@ -694,7 +694,7 @@ static wchar_t wrapped_script_path[MAX_PATH]; * valid wrapped script file. */ static void -locate_wrapped_script() +locate_wrapped_script(void) { wchar_t * p; size_t plen; @@ -1034,7 +1034,7 @@ read_config_file(wchar_t * config_path) } } -static void read_commands() +static void read_commands(void) { if (launcher_ini_path[0]) read_config_file(launcher_ini_path); @@ -1684,7 +1684,7 @@ wcsdup_pad(const wchar_t *s, int padding, int *newlen) } static wchar_t * -get_process_name() +get_process_name(void) { DWORD bufferLen = MAX_PATH; DWORD len = bufferLen; diff --git a/PC/launcher2.c b/PC/launcher2.c index 932665387f1966..bb500d4b6bfb07 100644 --- a/PC/launcher2.c +++ b/PC/launcher2.c @@ -132,7 +132,7 @@ typedef BOOL (*PIsWow64Process2)(HANDLE, USHORT*, USHORT*); USHORT -_getNativeMachine() +_getNativeMachine(void) { static USHORT _nativeMachine = IMAGE_FILE_MACHINE_UNKNOWN; if (_nativeMachine == IMAGE_FILE_MACHINE_UNKNOWN) { @@ -163,14 +163,14 @@ _getNativeMachine() bool -isAMD64Host() +isAMD64Host(void) { return _getNativeMachine() == IMAGE_FILE_MACHINE_AMD64; } bool -isARM64Host() +isARM64Host(void) { return _getNativeMachine() == IMAGE_FILE_MACHINE_ARM64; } diff --git a/PC/layout/support/pip.py b/PC/layout/support/pip.py index c54acb250a252e..0a6582acf348a3 100644 --- a/PC/layout/support/pip.py +++ b/PC/layout/support/pip.py @@ -67,7 +67,6 @@ def extract_pip_files(ns): "--no-color", "install", "pip", - "setuptools", "--upgrade", "--target", str(dest), diff --git a/PC/msvcrtmodule.c b/PC/msvcrtmodule.c index face4d03af9d4f..090254befc934d 100644 --- a/PC/msvcrtmodule.c +++ b/PC/msvcrtmodule.c @@ -564,110 +564,115 @@ static struct PyMethodDef msvcrt_functions[] = { {NULL, NULL} }; - -static struct PyModuleDef msvcrtmodule = { - PyModuleDef_HEAD_INIT, - "msvcrt", - NULL, - -1, - msvcrt_functions, - NULL, - NULL, - NULL, - NULL -}; - -static void -insertint(PyObject *d, char *name, int value) -{ - PyObject *v = PyLong_FromLong((long) value); - if (v == NULL) { - /* Don't bother reporting this error */ - PyErr_Clear(); - } - else { - PyDict_SetItemString(d, name, v); - Py_DECREF(v); - } -} - -static void -insertptr(PyObject *d, char *name, void *value) +static int +insertptr(PyObject *mod, char *name, void *value) { PyObject *v = PyLong_FromVoidPtr(value); if (v == NULL) { - /* Don't bother reporting this error */ - PyErr_Clear(); - } - else { - PyDict_SetItemString(d, name, v); - Py_DECREF(v); + return -1; } + int rc = PyModule_AddObjectRef(mod, name, v); + Py_DECREF(v); + return rc; } -PyMODINIT_FUNC -PyInit_msvcrt(void) -{ - int st; - PyObject *d, *version; - PyObject *m = PyModule_Create(&msvcrtmodule); - if (m == NULL) - return NULL; - d = PyModule_GetDict(m); +#define INSERTINT(MOD, NAME, VAL) do { \ + if (PyModule_AddIntConstant(MOD, NAME, VAL) < 0) { \ + return -1; \ + } \ +} while (0) + +#define INSERTPTR(MOD, NAME, PTR) do { \ + if (insertptr(MOD, NAME, PTR) < 0) { \ + return -1; \ + } \ +} while (0) + +#define INSERTSTR(MOD, NAME, CONST) do { \ + if (PyModule_AddStringConstant(MOD, NAME, CONST) < 0) { \ + return -1; \ + } \ +} while (0) +static int +exec_module(PyObject* m) +{ /* constants for the locking() function's mode argument */ - insertint(d, "LK_LOCK", _LK_LOCK); - insertint(d, "LK_NBLCK", _LK_NBLCK); - insertint(d, "LK_NBRLCK", _LK_NBRLCK); - insertint(d, "LK_RLCK", _LK_RLCK); - insertint(d, "LK_UNLCK", _LK_UNLCK); + INSERTINT(m, "LK_LOCK", _LK_LOCK); + INSERTINT(m, "LK_NBLCK", _LK_NBLCK); + INSERTINT(m, "LK_NBRLCK", _LK_NBRLCK); + INSERTINT(m, "LK_RLCK", _LK_RLCK); + INSERTINT(m, "LK_UNLCK", _LK_UNLCK); #ifdef MS_WINDOWS_DESKTOP - insertint(d, "SEM_FAILCRITICALERRORS", SEM_FAILCRITICALERRORS); - insertint(d, "SEM_NOALIGNMENTFAULTEXCEPT", SEM_NOALIGNMENTFAULTEXCEPT); - insertint(d, "SEM_NOGPFAULTERRORBOX", SEM_NOGPFAULTERRORBOX); - insertint(d, "SEM_NOOPENFILEERRORBOX", SEM_NOOPENFILEERRORBOX); + INSERTINT(m, "SEM_FAILCRITICALERRORS", SEM_FAILCRITICALERRORS); + INSERTINT(m, "SEM_NOALIGNMENTFAULTEXCEPT", SEM_NOALIGNMENTFAULTEXCEPT); + INSERTINT(m, "SEM_NOGPFAULTERRORBOX", SEM_NOGPFAULTERRORBOX); + INSERTINT(m, "SEM_NOOPENFILEERRORBOX", SEM_NOOPENFILEERRORBOX); #endif #ifdef _DEBUG - insertint(d, "CRT_WARN", _CRT_WARN); - insertint(d, "CRT_ERROR", _CRT_ERROR); - insertint(d, "CRT_ASSERT", _CRT_ASSERT); - insertint(d, "CRTDBG_MODE_DEBUG", _CRTDBG_MODE_DEBUG); - insertint(d, "CRTDBG_MODE_FILE", _CRTDBG_MODE_FILE); - insertint(d, "CRTDBG_MODE_WNDW", _CRTDBG_MODE_WNDW); - insertint(d, "CRTDBG_REPORT_MODE", _CRTDBG_REPORT_MODE); - insertptr(d, "CRTDBG_FILE_STDERR", _CRTDBG_FILE_STDERR); - insertptr(d, "CRTDBG_FILE_STDOUT", _CRTDBG_FILE_STDOUT); - insertptr(d, "CRTDBG_REPORT_FILE", _CRTDBG_REPORT_FILE); + INSERTINT(m, "CRT_WARN", _CRT_WARN); + INSERTINT(m, "CRT_ERROR", _CRT_ERROR); + INSERTINT(m, "CRT_ASSERT", _CRT_ASSERT); + INSERTINT(m, "CRTDBG_MODE_DEBUG", _CRTDBG_MODE_DEBUG); + INSERTINT(m, "CRTDBG_MODE_FILE", _CRTDBG_MODE_FILE); + INSERTINT(m, "CRTDBG_MODE_WNDW", _CRTDBG_MODE_WNDW); + INSERTINT(m, "CRTDBG_REPORT_MODE", _CRTDBG_REPORT_MODE); + INSERTPTR(m, "CRTDBG_FILE_STDERR", _CRTDBG_FILE_STDERR); + INSERTPTR(m, "CRTDBG_FILE_STDOUT", _CRTDBG_FILE_STDOUT); + INSERTPTR(m, "CRTDBG_REPORT_FILE", _CRTDBG_REPORT_FILE); #endif +#undef INSERTINT +#undef INSERTPTR + /* constants for the crt versions */ #ifdef _VC_ASSEMBLY_PUBLICKEYTOKEN - st = PyModule_AddStringConstant(m, "VC_ASSEMBLY_PUBLICKEYTOKEN", - _VC_ASSEMBLY_PUBLICKEYTOKEN); - if (st < 0) return NULL; + INSERTSTR(m, "VC_ASSEMBLY_PUBLICKEYTOKEN", _VC_ASSEMBLY_PUBLICKEYTOKEN); #endif #ifdef _CRT_ASSEMBLY_VERSION - st = PyModule_AddStringConstant(m, "CRT_ASSEMBLY_VERSION", - _CRT_ASSEMBLY_VERSION); - if (st < 0) return NULL; + INSERTSTR(m, "CRT_ASSEMBLY_VERSION", _CRT_ASSEMBLY_VERSION); #endif #ifdef __LIBRARIES_ASSEMBLY_NAME_PREFIX - st = PyModule_AddStringConstant(m, "LIBRARIES_ASSEMBLY_NAME_PREFIX", - __LIBRARIES_ASSEMBLY_NAME_PREFIX); - if (st < 0) return NULL; + INSERTSTR(m, "LIBRARIES_ASSEMBLY_NAME_PREFIX", + __LIBRARIES_ASSEMBLY_NAME_PREFIX); #endif +#undef INSERTSTR + /* constants for the 2010 crt versions */ #if defined(_VC_CRT_MAJOR_VERSION) && defined (_VC_CRT_MINOR_VERSION) && defined(_VC_CRT_BUILD_VERSION) && defined(_VC_CRT_RBUILD_VERSION) - version = PyUnicode_FromFormat("%d.%d.%d.%d", _VC_CRT_MAJOR_VERSION, - _VC_CRT_MINOR_VERSION, - _VC_CRT_BUILD_VERSION, - _VC_CRT_RBUILD_VERSION); - st = PyModule_AddObject(m, "CRT_ASSEMBLY_VERSION", version); - if (st < 0) return NULL; + PyObject *version = PyUnicode_FromFormat("%d.%d.%d.%d", + _VC_CRT_MAJOR_VERSION, + _VC_CRT_MINOR_VERSION, + _VC_CRT_BUILD_VERSION, + _VC_CRT_RBUILD_VERSION); + if (version == NULL) { + return -1; + } + int st = PyModule_AddObjectRef(m, "CRT_ASSEMBLY_VERSION", version); + Py_DECREF(version); + if (st < 0) { + return -1; + } #endif - /* make compiler warning quiet if st is unused */ - (void)st; - return m; + return 0; +} + +static PyModuleDef_Slot msvcrt_slots[] = { + {Py_mod_exec, exec_module}, + {0, NULL} +}; + +static struct PyModuleDef msvcrtmodule = { + .m_base = PyModuleDef_HEAD_INIT, + .m_name = "msvcrt", + .m_methods = msvcrt_functions, + .m_slots = msvcrt_slots, +}; + +PyMODINIT_FUNC +PyInit_msvcrt(void) +{ + return PyModuleDef_Init(&msvcrtmodule); } diff --git a/PC/winreg.c b/PC/winreg.c index 073598a12a68aa..4884125c3609ad 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -15,15 +15,22 @@ #define PY_SSIZE_T_CLEAN #include "Python.h" #include "pycore_object.h" // _PyObject_Init() +#include "pycore_moduleobject.h" #include "structmember.h" // PyMemberDef #include <windows.h> #if defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) || defined(MS_WINDOWS_GAMES) -static BOOL PyHKEY_AsHKEY(PyObject *ob, HKEY *pRes, BOOL bNoneOK); -static BOOL clinic_HKEY_converter(PyObject *ob, void *p); -static PyObject *PyHKEY_FromHKEY(HKEY h); -static BOOL PyHKEY_Close(PyObject *obHandle); +typedef struct { + PyTypeObject *PyHKEY_Type; +} winreg_state; + +/* Forward declares */ + +static BOOL PyHKEY_AsHKEY(winreg_state *st, PyObject *ob, HKEY *pRes, BOOL bNoneOK); +static BOOL clinic_HKEY_converter(winreg_state *st, PyObject *ob, void *p); +static PyObject *PyHKEY_FromHKEY(winreg_state *st, HKEY h); +static BOOL PyHKEY_Close(winreg_state *st, PyObject *obHandle); static char errNotAHandle[] = "Object is not a handle"; @@ -35,8 +42,6 @@ static char errNotAHandle[] = "Object is not a handle"; #define PyErr_SetFromWindowsErrWithFunction(rc, fnname) \ PyErr_SetFromWindowsErr(rc) -/* Forward declares */ - /* Doc strings */ PyDoc_STRVAR(module_doc, "This module provides access to the Windows registry API.\n" @@ -114,7 +119,7 @@ typedef struct { HKEY hkey; } PyHKEYObject; -#define PyHKEY_Check(op) Py_IS_TYPE(op, &PyHKEY_Type) +#define PyHKEY_Check(st, op) Py_IS_TYPE(op, st->PyHKEY_Type) static char *failMsg = "bad operand type"; @@ -147,7 +152,18 @@ PyHKEY_deallocFunc(PyObject *ob) PyHKEYObject *obkey = (PyHKEYObject *)ob; if (obkey->hkey) RegCloseKey((HKEY)obkey->hkey); - PyObject_Free(ob); + + PyTypeObject *tp = Py_TYPE(ob); + PyObject_GC_UnTrack(ob); + PyObject_GC_Del(ob); + Py_DECREF(tp); +} + +static int +PyHKEY_traverseFunc(PyHKEYObject *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + return 0; } static int @@ -189,29 +205,6 @@ PyHKEY_hashFunc(PyObject *ob) } -static PyNumberMethods PyHKEY_NumberMethods = -{ - PyHKEY_binaryFailureFunc, /* nb_add */ - PyHKEY_binaryFailureFunc, /* nb_subtract */ - PyHKEY_binaryFailureFunc, /* nb_multiply */ - PyHKEY_binaryFailureFunc, /* nb_remainder */ - PyHKEY_binaryFailureFunc, /* nb_divmod */ - PyHKEY_ternaryFailureFunc, /* nb_power */ - PyHKEY_unaryFailureFunc, /* nb_negative */ - PyHKEY_unaryFailureFunc, /* nb_positive */ - PyHKEY_unaryFailureFunc, /* nb_absolute */ - PyHKEY_boolFunc, /* nb_bool */ - PyHKEY_unaryFailureFunc, /* nb_invert */ - PyHKEY_binaryFailureFunc, /* nb_lshift */ - PyHKEY_binaryFailureFunc, /* nb_rshift */ - PyHKEY_binaryFailureFunc, /* nb_and */ - PyHKEY_binaryFailureFunc, /* nb_xor */ - PyHKEY_binaryFailureFunc, /* nb_or */ - PyHKEY_intFunc, /* nb_int */ - 0, /* nb_reserved */ - PyHKEY_unaryFailureFunc, /* nb_float */ -}; - /*[clinic input] module winreg class winreg.HKEYType "PyHKEYObject *" "&PyHKEY_Type" @@ -229,6 +222,14 @@ class HKEY_converter(CConverter): type = 'HKEY' converter = 'clinic_HKEY_converter' + def parse_arg(self, argname, displayname): + return """ + if (!{converter}(_PyModule_GetState(module), {argname}, &{paramname})) {{{{ + goto exit; + }}}} + """.format(argname=argname, paramname=self.parser_name, + converter=self.converter) + class HKEY_return_converter(CReturnConverter): type = 'HKEY' @@ -236,7 +237,7 @@ class HKEY_return_converter(CReturnConverter): self.declare(data) self.err_occurred_if_null_pointer("_return_value", data) data.return_conversion.append( - 'return_value = PyHKEY_FromHKEY(_return_value);\n') + 'return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value);\n') # HACK: this only works for PyHKEYObjects, nothing else. # Should this be generalized and enshrined in clinic.py, @@ -249,7 +250,7 @@ class self_return_converter(CReturnConverter): data.return_conversion.append( 'return_value = (PyObject *)_return_value;\n') [python start generated code]*/ -/*[python end generated code: output=da39a3ee5e6b4b0d input=2ebb7a4922d408d6]*/ +/*[python end generated code: output=da39a3ee5e6b4b0d input=17e645060c7b8ae1]*/ #include "clinic/winreg.c.h" @@ -270,8 +271,11 @@ static PyObject * winreg_HKEYType_Close_impl(PyHKEYObject *self) /*[clinic end generated code: output=fced3a624fb0c344 input=6786ac75f6b89de6]*/ { - if (!PyHKEY_Close((PyObject *)self)) + winreg_state *st = _PyType_GetModuleState(Py_TYPE(self)); + assert(st != NULL); + if (!PyHKEY_Close(st, (PyObject *)self)) { return NULL; + } Py_RETURN_NONE; } @@ -327,8 +331,11 @@ winreg_HKEYType___exit___impl(PyHKEYObject *self, PyObject *exc_type, PyObject *exc_value, PyObject *traceback) /*[clinic end generated code: output=923ebe7389e6a263 input=fb32489ee92403c7]*/ { - if (!PyHKEY_Close((PyObject *)self)) + winreg_state *st = _PyType_GetModuleState(Py_TYPE(self)); + assert(st != NULL); + if (!PyHKEY_Close(st, (PyObject *)self)) { return NULL; + } Py_RETURN_NONE; } @@ -350,62 +357,71 @@ static PyMemberDef PyHKEY_memberlist[] = { {NULL} /* Sentinel */ }; -/* The type itself */ -PyTypeObject PyHKEY_Type = -{ - PyVarObject_HEAD_INIT(0, 0) /* fill in type at module init */ - "PyHKEY", - sizeof(PyHKEYObject), - 0, - PyHKEY_deallocFunc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - &PyHKEY_NumberMethods, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - PyHKEY_hashFunc, /* tp_hash */ - 0, /* tp_call */ - PyHKEY_strFunc, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - 0, /* tp_flags */ - PyHKEY_doc, /* tp_doc */ - 0, /*tp_traverse*/ - 0, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - PyHKEY_methods, /*tp_methods*/ - PyHKEY_memberlist, /*tp_members*/ +static PyType_Slot pyhkey_type_slots[] = { + {Py_tp_dealloc, PyHKEY_deallocFunc}, + {Py_tp_members, PyHKEY_memberlist}, + {Py_tp_methods, PyHKEY_methods}, + {Py_tp_doc, (char *)PyHKEY_doc}, + {Py_tp_traverse, PyHKEY_traverseFunc}, + {Py_tp_hash, PyHKEY_hashFunc}, + {Py_tp_str, PyHKEY_strFunc}, + + // Number protocol + {Py_nb_add, PyHKEY_binaryFailureFunc}, + {Py_nb_subtract, PyHKEY_binaryFailureFunc}, + {Py_nb_multiply, PyHKEY_binaryFailureFunc}, + {Py_nb_remainder, PyHKEY_binaryFailureFunc}, + {Py_nb_divmod, PyHKEY_binaryFailureFunc}, + {Py_nb_power, PyHKEY_ternaryFailureFunc}, + {Py_nb_negative, PyHKEY_unaryFailureFunc}, + {Py_nb_positive, PyHKEY_unaryFailureFunc}, + {Py_nb_absolute, PyHKEY_unaryFailureFunc}, + {Py_nb_bool, PyHKEY_boolFunc}, + {Py_nb_invert, PyHKEY_unaryFailureFunc}, + {Py_nb_lshift, PyHKEY_binaryFailureFunc}, + {Py_nb_rshift, PyHKEY_binaryFailureFunc}, + {Py_nb_and, PyHKEY_binaryFailureFunc}, + {Py_nb_xor, PyHKEY_binaryFailureFunc}, + {Py_nb_or, PyHKEY_binaryFailureFunc}, + {Py_nb_int, PyHKEY_intFunc}, + {Py_nb_float, PyHKEY_unaryFailureFunc}, + {0, NULL}, +}; + +static PyType_Spec pyhkey_type_spec = { + .name = "winreg.PyHKEY", + .basicsize = sizeof(PyHKEYObject), + .flags = (Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_IMMUTABLETYPE | + Py_TPFLAGS_DISALLOW_INSTANTIATION), + .slots = pyhkey_type_slots, }; /************************************************************************ The public PyHKEY API (well, not public yet :-) ************************************************************************/ PyObject * -PyHKEY_New(HKEY hInit) +PyHKEY_New(PyObject *m, HKEY hInit) { - PyHKEYObject *key = PyObject_New(PyHKEYObject, &PyHKEY_Type); - if (key) - key->hkey = hInit; + winreg_state *st = _PyModule_GetState(m); + PyHKEYObject *key = PyObject_GC_New(PyHKEYObject, st->PyHKEY_Type); + if (key == NULL) { + return NULL; + } + key->hkey = hInit; + PyObject_GC_Track(key); return (PyObject *)key; } BOOL -PyHKEY_Close(PyObject *ob_handle) +PyHKEY_Close(winreg_state *st, PyObject *ob_handle) { LONG rc; HKEY key; - if (!PyHKEY_AsHKEY(ob_handle, &key, TRUE)) { + if (!PyHKEY_AsHKEY(st, ob_handle, &key, TRUE)) { return FALSE; } - if (PyHKEY_Check(ob_handle)) { + if (PyHKEY_Check(st, ob_handle)) { ((PyHKEYObject*)ob_handle)->hkey = 0; } rc = key ? RegCloseKey(key) : ERROR_SUCCESS; @@ -415,7 +431,7 @@ PyHKEY_Close(PyObject *ob_handle) } BOOL -PyHKEY_AsHKEY(PyObject *ob, HKEY *pHANDLE, BOOL bNoneOK) +PyHKEY_AsHKEY(winreg_state *st, PyObject *ob, HKEY *pHANDLE, BOOL bNoneOK) { if (ob == Py_None) { if (!bNoneOK) { @@ -426,7 +442,7 @@ PyHKEY_AsHKEY(PyObject *ob, HKEY *pHANDLE, BOOL bNoneOK) } *pHANDLE = (HKEY)0; } - else if (PyHKEY_Check(ob)) { + else if (PyHKEY_Check(st ,ob)) { PyHKEYObject *pH = (PyHKEYObject *)ob; *pHANDLE = pH->hkey; } @@ -447,23 +463,24 @@ PyHKEY_AsHKEY(PyObject *ob, HKEY *pHANDLE, BOOL bNoneOK) } BOOL -clinic_HKEY_converter(PyObject *ob, void *p) +clinic_HKEY_converter(winreg_state *st, PyObject *ob, void *p) { - if (!PyHKEY_AsHKEY(ob, (HKEY *)p, FALSE)) + if (!PyHKEY_AsHKEY(st, ob, (HKEY *)p, FALSE)) { return FALSE; + } return TRUE; } PyObject * -PyHKEY_FromHKEY(HKEY h) +PyHKEY_FromHKEY(winreg_state *st, HKEY h) { - /* Inline PyObject_New */ - PyHKEYObject *op = (PyHKEYObject *) PyObject_Malloc(sizeof(PyHKEYObject)); + PyHKEYObject *op = (PyHKEYObject *)PyObject_GC_New(PyHKEYObject, + st->PyHKEY_Type); if (op == NULL) { - return PyErr_NoMemory(); + return NULL; } - _PyObject_Init((PyObject*)op, &PyHKEY_Type); op->hkey = h; + PyObject_GC_Track(op); return (PyObject *)op; } @@ -472,11 +489,11 @@ PyHKEY_FromHKEY(HKEY h) The module methods ************************************************************************/ BOOL -PyWinObject_CloseHKEY(PyObject *obHandle) +PyWinObject_CloseHKEY(winreg_state *st, PyObject *obHandle) { BOOL ok; - if (PyHKEY_Check(obHandle)) { - ok = PyHKEY_Close(obHandle); + if (PyHKEY_Check(st, obHandle)) { + ok = PyHKEY_Close(st, obHandle); } #if SIZEOF_LONG >= SIZEOF_HKEY else if (PyLong_Check(obHandle)) { @@ -826,8 +843,9 @@ static PyObject * winreg_CloseKey(PyObject *module, PyObject *hkey) /*[clinic end generated code: output=a4fa537019a80d15 input=5b1aac65ba5127ad]*/ { - if (!PyHKEY_Close(hkey)) + if (!PyHKEY_Close(_PyModule_GetState(module), hkey)) { return NULL; + } Py_RETURN_NONE; } @@ -2059,57 +2077,46 @@ static struct PyMethodDef winreg_methods[] = { NULL, }; -static void -insint(PyObject * d, char * name, long value) -{ - PyObject *v = PyLong_FromLong(value); - if (!v || PyDict_SetItemString(d, name, v)) - PyErr_Clear(); - Py_XDECREF(v); -} - -#define ADD_INT(val) insint(d, #val, val) +#define ADD_INT(VAL) do { \ + if (PyModule_AddIntConstant(m, #VAL, VAL) < 0) { \ + return -1; \ + } \ +} while (0) -static void -inskey(PyObject * d, char * name, HKEY key) +static int +inskey(PyObject *mod, char *name, HKEY key) { PyObject *v = PyLong_FromVoidPtr(key); - if (!v || PyDict_SetItemString(d, name, v)) - PyErr_Clear(); - Py_XDECREF(v); + if (v == NULL) { + return -1; + } + int rc = PyModule_AddObjectRef(mod, name, v); + Py_DECREF(v); + return rc; } -#define ADD_KEY(val) inskey(d, #val, val) +#define ADD_KEY(VAL) do { \ + if (inskey(m, #VAL, VAL) < 0) { \ + return -1; \ + } \ +} while (0) - -static struct PyModuleDef winregmodule = { - PyModuleDef_HEAD_INIT, - "winreg", - module_doc, - -1, - winreg_methods, - NULL, - NULL, - NULL, - NULL -}; - -PyMODINIT_FUNC PyInit_winreg(void) +static int +exec_module(PyObject *m) { - PyObject *m, *d; - m = PyModule_Create(&winregmodule); - if (m == NULL) - return NULL; - d = PyModule_GetDict(m); - PyHKEY_Type.tp_doc = PyHKEY_doc; - if (PyType_Ready(&PyHKEY_Type) < 0) - return NULL; - if (PyDict_SetItemString(d, "HKEYType", - (PyObject *)&PyHKEY_Type) != 0) - return NULL; - if (PyDict_SetItemString(d, "error", - PyExc_OSError) != 0) - return NULL; + winreg_state *st = (winreg_state *)_PyModule_GetState(m); + + st->PyHKEY_Type = (PyTypeObject *) + PyType_FromModuleAndSpec(m, &pyhkey_type_spec, NULL); + if (st->PyHKEY_Type == NULL) { + return -1; + } + if (PyModule_AddObjectRef(m, "HKEYType", (PyObject *)st->PyHKEY_Type) < 0) { + return -1; + } + if (PyModule_AddObjectRef(m, "error", PyExc_OSError) < 0) { + return -1; + } /* Add the relevant constants */ ADD_KEY(HKEY_CLASSES_ROOT); @@ -2170,7 +2177,46 @@ PyMODINIT_FUNC PyInit_winreg(void) ADD_INT(REG_RESOURCE_LIST); ADD_INT(REG_FULL_RESOURCE_DESCRIPTOR); ADD_INT(REG_RESOURCE_REQUIREMENTS_LIST); - return m; + +#undef ADD_INT + return 0; +} + +static PyModuleDef_Slot winreg_slots[] = { + {Py_mod_exec, exec_module}, + {0, NULL} +}; + +static int +winreg_traverse(PyObject *module, visitproc visit, void *arg) +{ + winreg_state *state = _PyModule_GetState(module); + Py_VISIT(state->PyHKEY_Type); + return 0; +} + +static int +winreg_clear(PyObject *module) +{ + winreg_state *state = _PyModule_GetState(module); + Py_CLEAR(state->PyHKEY_Type); + return 0; +} + +static struct PyModuleDef winregmodule = { + .m_base = PyModuleDef_HEAD_INIT, + .m_name = "winreg", + .m_doc = module_doc, + .m_size = sizeof(winreg_state), + .m_methods = winreg_methods, + .m_slots = winreg_slots, + .m_traverse = winreg_traverse, + .m_clear = winreg_clear, +}; + +PyMODINIT_FUNC PyInit_winreg(void) +{ + return PyModuleDef_Init(&winregmodule); } #endif /* MS_WINDOWS_DESKTOP || MS_WINDOWS_SYSTEM || MS_WINDOWS_GAMES */ diff --git a/PC/winsound.c b/PC/winsound.c index 65025ddc5e1f51..17ce2ef423b1f9 100644 --- a/PC/winsound.c +++ b/PC/winsound.c @@ -202,42 +202,15 @@ static struct PyMethodDef sound_methods[] = {NULL, NULL} }; -static void -add_define(PyObject *dict, const char *key, long value) +#define ADD_DEFINE(CONST) do { \ + if (PyModule_AddIntConstant(module, #CONST, CONST) < 0) { \ + return -1; \ + } \ +} while (0) + +static int +exec_module(PyObject *module) { - PyObject *k = PyUnicode_FromString(key); - PyObject *v = PyLong_FromLong(value); - if (v && k) { - PyDict_SetItem(dict, k, v); - } - Py_XDECREF(k); - Py_XDECREF(v); -} - -#define ADD_DEFINE(tok) add_define(dict,#tok,tok) - - -static struct PyModuleDef winsoundmodule = { - PyModuleDef_HEAD_INIT, - "winsound", - sound_module_doc, - -1, - sound_methods, - NULL, - NULL, - NULL, - NULL -}; - -PyMODINIT_FUNC -PyInit_winsound(void) -{ - PyObject *dict; - PyObject *module = PyModule_Create(&winsoundmodule); - if (module == NULL) - return NULL; - dict = PyModule_GetDict(module); - ADD_DEFINE(SND_ASYNC); ADD_DEFINE(SND_NODEFAULT); ADD_DEFINE(SND_NOSTOP); @@ -254,5 +227,27 @@ PyInit_winsound(void) ADD_DEFINE(MB_ICONEXCLAMATION); ADD_DEFINE(MB_ICONHAND); ADD_DEFINE(MB_ICONQUESTION); - return module; + +#undef ADD_DEFINE + + return 0; +} + +static PyModuleDef_Slot sound_slots[] = { + {Py_mod_exec, exec_module}, + {0, NULL} +}; + +static struct PyModuleDef winsoundmodule = { + .m_base = PyModuleDef_HEAD_INIT, + .m_name = "winsound", + .m_doc = sound_module_doc, + .m_methods = sound_methods, + .m_slots = sound_slots, +}; + +PyMODINIT_FUNC +PyInit_winsound(void) +{ + return PyModuleDef_Init(&winsoundmodule); } diff --git a/PCbuild/_freeze_module.vcxproj b/PCbuild/_freeze_module.vcxproj index 4f39756019e692..d897925f58c0de 100644 --- a/PCbuild/_freeze_module.vcxproj +++ b/PCbuild/_freeze_module.vcxproj @@ -177,6 +177,7 @@ <ClCompile Include="..\PC\winreg.c" /> <ClCompile Include="..\Python\_warnings.c" /> <ClCompile Include="..\Python\asdl.c" /> + <ClCompile Include="..\Python\assemble.c" /> <ClCompile Include="..\Python\ast.c" /> <ClCompile Include="..\Python\ast_opt.c" /> <ClCompile Include="..\Python\ast_unparse.c" /> @@ -191,6 +192,7 @@ <ClCompile Include="..\Python\dynload_win.c" /> <ClCompile Include="..\Python\errors.c" /> <ClCompile Include="..\Python\fileutils.c" /> + <ClCompile Include="..\Python\flowgraph.c" /> <ClCompile Include="..\Python\formatter_unicode.c" /> <ClCompile Include="..\Python\frame.c" /> <ClCompile Include="..\Python\future.c" /> @@ -207,6 +209,8 @@ <ClCompile Include="..\Python\importdl.c" /> <ClCompile Include="..\Python\initconfig.c" /> <ClCompile Include="..\Python\intrinsics.c" /> + <ClCompile Include="..\Python\instrumentation.c" /> + <ClCompile Include="..\Python\legacy_tracing.c" /> <ClCompile Include="..\Python\marshal.c" /> <ClCompile Include="..\Python\modsupport.c" /> <ClCompile Include="..\Python\mysnprintf.c" /> diff --git a/PCbuild/_freeze_module.vcxproj.filters b/PCbuild/_freeze_module.vcxproj.filters index 7d7c4587b9a3f3..176935a63c4852 100644 --- a/PCbuild/_freeze_module.vcxproj.filters +++ b/PCbuild/_freeze_module.vcxproj.filters @@ -28,6 +28,9 @@ <ClCompile Include="..\Python\asdl.c"> <Filter>Source Files</Filter> </ClCompile> + <ClCompile Include="..\Python\assemble.c"> + <Filter>Source Files</Filter> + </ClCompile> <ClCompile Include="..\Python\ast.c"> <Filter>Source Files</Filter> </ClCompile> @@ -139,6 +142,9 @@ <ClCompile Include="..\Objects\floatobject.c"> <Filter>Source Files</Filter> </ClCompile> + <ClCompile Include="..\Python\flowgraph.c"> + <Filter>Source Files</Filter> + </ClCompile> <ClCompile Include="..\Python\formatter_unicode.c"> <Filter>Source Files</Filter> </ClCompile> @@ -208,6 +214,12 @@ <ClCompile Include="..\Python\intrinsics.c"> <Filter>Source Files</Filter> </ClCompile> + <ClCompile Include="..\Python\instrumentation.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="..\Python\legacy_tracing.c"> + <Filter>Source Files</Filter> + </ClCompile> <ClCompile Include="..\Objects\interpreteridobject.c"> <Filter>Source Files</Filter> </ClCompile> diff --git a/PCbuild/_testcapi.vcxproj b/PCbuild/_testcapi.vcxproj index 4cc184bfc1ac82..439cd687fda61d 100644 --- a/PCbuild/_testcapi.vcxproj +++ b/PCbuild/_testcapi.vcxproj @@ -109,6 +109,7 @@ <ClCompile Include="..\Modules\_testcapi\structmember.c" /> <ClCompile Include="..\Modules\_testcapi\exceptions.c" /> <ClCompile Include="..\Modules\_testcapi\code.c" /> + <ClCompile Include="..\Modules\_testcapi\pyos.c" /> </ItemGroup> <ItemGroup> <ResourceCompile Include="..\PC\python_nt.rc" /> diff --git a/PCbuild/_testcapi.vcxproj.filters b/PCbuild/_testcapi.vcxproj.filters index fbdaf04ce37cb1..0e42e4982c21ff 100644 --- a/PCbuild/_testcapi.vcxproj.filters +++ b/PCbuild/_testcapi.vcxproj.filters @@ -57,6 +57,9 @@ <ClCompile Include="..\Modules\_testcapi\code.c"> <Filter>Source Files</Filter> </ClCompile> + <ClCompile Include="..\Modules\_testcapi\pyos.c"> + <Filter>Source Files</Filter> + </ClCompile> </ItemGroup> <ItemGroup> <ResourceCompile Include="..\PC\python_nt.rc"> diff --git a/PCbuild/find_python.bat b/PCbuild/find_python.bat index 11d6cba7a172c9..7af5503d80a0fc 100644 --- a/PCbuild/find_python.bat +++ b/PCbuild/find_python.bat @@ -42,7 +42,7 @@ @if NOT "%HOST_PYTHON%"=="" @%HOST_PYTHON% -Ec "import sys; assert sys.version_info[:2] >= (3, 9)" >nul 2>nul && (set PYTHON="%HOST_PYTHON%") && (set _Py_Python_Source=found as HOST_PYTHON) && goto :found @rem If py.exe finds a recent enough version, use that one -@for %%p in (3.10 3.9) do @py -%%p -EV >nul 2>&1 && (set PYTHON=py -%%p) && (set _Py_Python_Source=found %%p with py.exe) && goto :found +@for %%p in (3.11 3.10 3.9) do @py -%%p -EV >nul 2>&1 && (set PYTHON=py -%%p) && (set _Py_Python_Source=found %%p with py.exe) && goto :found @if NOT exist "%_Py_EXTERNALS_DIR%" mkdir "%_Py_EXTERNALS_DIR%" @set _Py_NUGET=%NUGET% diff --git a/PCbuild/pyproject.props b/PCbuild/pyproject.props index 92c7849d3bcf75..36c4c269d05da9 100644 --- a/PCbuild/pyproject.props +++ b/PCbuild/pyproject.props @@ -21,6 +21,13 @@ <LinkIncremental Condition="$(Configuration) != 'Debug'">false</LinkIncremental> </PropertyGroup> + <PropertyGroup Label="MSVC Bug Workarounds" Condition="$(VCToolsVersion) != ''"> + <_VCToolsVersion>$([System.Version]::Parse(`$(VCToolsVersion)`).Major).$([System.Version]::Parse(`$(VCToolsVersion)`).Minor)</_VCToolsVersion> + + <!-- See https://developercommunity.visualstudio.com/t/Regression-in-MSVC-1433-1434-ARM64-co/10224361 --> + <MSVCHasBrokenARM64Clamping Condition="$(_VCToolsVersion) == '14.34'">true</MSVCHasBrokenARM64Clamping> + </PropertyGroup> + <PropertyGroup> <_DebugPreprocessorDefinition>NDEBUG;</_DebugPreprocessorDefinition> <_DebugPreprocessorDefinition Condition="$(Configuration) == 'Debug'">_DEBUG;</_DebugPreprocessorDefinition> @@ -50,6 +57,7 @@ <AdditionalOptions>/utf-8 %(AdditionalOptions)</AdditionalOptions> <AdditionalOptions Condition="$(PlatformToolset) == 'ClangCL'">-Wno-deprecated-non-prototype -Wno-unused-label -Wno-pointer-sign -Wno-incompatible-pointer-types-discards-qualifiers -Wno-unused-function %(AdditionalOptions)</AdditionalOptions> <AdditionalOptions Condition="$(Configuration) != 'Debug' and $(PlatformToolset) == 'ClangCL'">-flto %(AdditionalOptions)</AdditionalOptions> + <AdditionalOptions Condition="$(MSVCHasBrokenARM64Clamping) == 'true' and $(Platform) == 'ARM64'">-d2pattern-opt-disable:-932189325 %(AdditionalOptions)</AdditionalOptions> </ClCompile> <ClCompile Condition="$(Configuration) == 'Debug'"> <InlineFunctionExpansion>OnlyExplicitInline</InlineFunctionExpansion> @@ -79,6 +87,7 @@ <LinkTimeCodeGeneration Condition="$(SupportPGO) and $(Configuration) == 'PGUpdate'">PGUpdate</LinkTimeCodeGeneration> <AdditionalDependencies>advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;%(AdditionalDependencies)</AdditionalDependencies> <AdditionalOptions Condition="$(Configuration) != 'Debug'">/OPT:REF,NOICF %(AdditionalOptions)</AdditionalOptions> + <AdditionalOptions Condition="$(MSVCHasBrokenARM64Clamping) == 'true' and $(Platform) == 'ARM64'">-d2:-pattern-opt-disable:-932189325 %(AdditionalOptions)</AdditionalOptions> </Link> <Lib> <LinkTimeCodeGeneration Condition="$(Configuration) == 'Release'">true</LinkTimeCodeGeneration> diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 0343d30a42cd6a..8aafcb786a6064 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -197,6 +197,7 @@ <ClInclude Include="..\Include\internal\pycore_asdl.h" /> <ClInclude Include="..\Include\internal\pycore_ast.h" /> <ClInclude Include="..\Include\internal\pycore_ast_state.h" /> + <ClInclude Include="..\Include\internal\pycore_atexit.h" /> <ClInclude Include="..\Include\internal\pycore_atomic.h" /> <ClInclude Include="..\Include\internal\pycore_atomic_funcs.h" /> <ClInclude Include="..\Include\internal\pycore_bitutils.h" /> @@ -205,6 +206,7 @@ <ClInclude Include="..\Include\internal\pycore_call.h" /> <ClInclude Include="..\Include\internal\pycore_ceval.h" /> <ClInclude Include="..\Include\internal\pycore_ceval_state.h" /> + <ClInclude Include="..\Include\internal\pycore_cfg.h" /> <ClInclude Include="..\Include\internal\pycore_code.h" /> <ClInclude Include="..\Include\internal\pycore_compile.h" /> <ClInclude Include="..\Include\internal\pycore_condvar.h" /> @@ -239,6 +241,7 @@ <ClInclude Include="..\Include\internal\pycore_moduleobject.h" /> <ClInclude Include="..\Include\internal\pycore_namespace.h" /> <ClInclude Include="..\Include\internal\pycore_object.h" /> + <ClInclude Include="..\Include\internal\pycore_object_state.h" /> <ClInclude Include="..\Include\internal\pycore_obmalloc.h" /> <ClInclude Include="..\Include\internal\pycore_obmalloc_init.h" /> <ClInclude Include="..\Include\internal\pycore_pathconfig.h" /> @@ -497,6 +500,7 @@ <ClCompile Include="..\Python\pyhash.c" /> <ClCompile Include="..\Python\_warnings.c" /> <ClCompile Include="..\Python\asdl.c" /> + <ClCompile Include="..\Python\assemble.c" /> <ClCompile Include="..\Python\ast.c" /> <ClCompile Include="..\Python\ast_opt.c" /> <ClCompile Include="..\Python\ast_unparse.c" /> @@ -510,6 +514,7 @@ <ClCompile Include="..\Python\dynload_win.c" /> <ClCompile Include="..\Python\errors.c" /> <ClCompile Include="..\Python\fileutils.c" /> + <ClCompile Include="..\Python\flowgraph.c" /> <ClCompile Include="..\Python\formatter_unicode.c" /> <ClCompile Include="..\Python\frame.c" /> <ClCompile Include="..\Python\frozen.c" /> @@ -527,6 +532,8 @@ <ClCompile Include="..\Python\importdl.c" /> <ClCompile Include="..\Python\initconfig.c" /> <ClCompile Include="..\Python\intrinsics.c" /> + <ClCompile Include="..\Python\instrumentation.c" /> + <ClCompile Include="..\Python\legacy_tracing.c" /> <ClCompile Include="..\Python\marshal.c" /> <ClCompile Include="..\Python\modsupport.c" /> <ClCompile Include="..\Python\mysnprintf.c" /> diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 359463e1d9af75..07476f30833372 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -498,6 +498,9 @@ <ClInclude Include="..\Include\internal\pycore_ast_state.h"> <Filter>Include\internal</Filter> </ClInclude> + <ClInclude Include="..\Include\internal\pycore_atexit.h"> + <Filter>Include\internal</Filter> + </ClInclude> <ClInclude Include="..\Include\internal\pycore_atomic.h"> <Filter>Include\internal</Filter> </ClInclude> @@ -621,6 +624,9 @@ <ClInclude Include="..\Include\internal\pycore_object.h"> <Filter>Include\internal</Filter> </ClInclude> + <ClInclude Include="..\Include\internal\pycore_object_state.h"> + <Filter>Include\internal</Filter> + </ClInclude> <ClInclude Include="..\Include\internal\pycore_obmalloc.h"> <Filter>Include\internal</Filter> </ClInclude> @@ -1088,6 +1094,9 @@ <ClCompile Include="..\Python\asdl.c"> <Filter>Python</Filter> </ClCompile> + <ClCompile Include="..\Python\assemble.c"> + <Filter>Python</Filter> + </ClCompile> <ClCompile Include="..\Python\ast.c"> <Filter>Python</Filter> </ClCompile> @@ -1121,6 +1130,9 @@ <ClCompile Include="..\Python\fileutils.c"> <Filter>Python</Filter> </ClCompile> + <ClCompile Include="..\Python\flowgraph.c"> + <Filter>Python</Filter> + </ClCompile> <ClCompile Include="..\Python\formatter_unicode.c"> <Filter>Python</Filter> </ClCompile> @@ -1166,6 +1178,12 @@ <ClCompile Include="..\Python\intrinsics.c"> <Filter>Source Files</Filter> </ClCompile> + <ClCompile Include="..\Python\instrumentation.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="..\Python\legacy_tracing.c"> + <Filter>Source Files</Filter> + </ClCompile> <ClCompile Include="..\Python\marshal.c"> <Filter>Python</Filter> </ClCompile> diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c index 46390966892d16..0aaaed64c4037c 100644 --- a/Parser/action_helpers.c +++ b/Parser/action_helpers.c @@ -1,6 +1,7 @@ #include <Python.h> #include "pegen.h" +#include "tokenizer.h" #include "string_parser.h" #include "pycore_runtime.h" // _PyRuntime @@ -853,96 +854,6 @@ _PyPegen_seq_delete_starred_exprs(Parser *p, asdl_seq *kwargs) return new_seq; } -expr_ty -_PyPegen_concatenate_strings(Parser *p, asdl_seq *strings) -{ - Py_ssize_t len = asdl_seq_LEN(strings); - assert(len > 0); - - Token *first = asdl_seq_GET_UNTYPED(strings, 0); - Token *last = asdl_seq_GET_UNTYPED(strings, len - 1); - - int bytesmode = 0; - PyObject *bytes_str = NULL; - - FstringParser state; - _PyPegen_FstringParser_Init(&state); - - for (Py_ssize_t i = 0; i < len; i++) { - Token *t = asdl_seq_GET_UNTYPED(strings, i); - - int this_bytesmode; - int this_rawmode; - PyObject *s; - const char *fstr; - Py_ssize_t fstrlen = -1; - - if (_PyPegen_parsestr(p, &this_bytesmode, &this_rawmode, &s, &fstr, &fstrlen, t) != 0) { - goto error; - } - - /* Check that we are not mixing bytes with unicode. */ - if (i != 0 && bytesmode != this_bytesmode) { - RAISE_SYNTAX_ERROR("cannot mix bytes and nonbytes literals"); - Py_XDECREF(s); - goto error; - } - bytesmode = this_bytesmode; - - if (fstr != NULL) { - assert(s == NULL && !bytesmode); - - int result = _PyPegen_FstringParser_ConcatFstring(p, &state, &fstr, fstr + fstrlen, - this_rawmode, 0, first, t, last); - if (result < 0) { - goto error; - } - } - else { - /* String or byte string. */ - assert(s != NULL && fstr == NULL); - assert(bytesmode ? PyBytes_CheckExact(s) : PyUnicode_CheckExact(s)); - - if (bytesmode) { - if (i == 0) { - bytes_str = s; - } - else { - PyBytes_ConcatAndDel(&bytes_str, s); - if (!bytes_str) { - goto error; - } - } - } - else { - /* This is a regular string. Concatenate it. */ - if (_PyPegen_FstringParser_ConcatAndDel(&state, s) < 0) { - goto error; - } - } - } - } - - if (bytesmode) { - if (_PyArena_AddPyObject(p->arena, bytes_str) < 0) { - goto error; - } - return _PyAST_Constant(bytes_str, NULL, first->lineno, - first->col_offset, last->end_lineno, - last->end_col_offset, p->arena); - } - - return _PyPegen_FstringParser_Finish(p, &state, first, last); - -error: - Py_XDECREF(bytes_str); - _PyPegen_FstringParser_Dealloc(&state); - if (PyErr_Occurred()) { - _Pypegen_raise_decode_error(p); - } - return NULL; -} - expr_ty _PyPegen_ensure_imaginary(Parser *p, expr_ty exp) { @@ -1054,6 +965,44 @@ _PyPegen_check_legacy_stmt(Parser *p, expr_ty name) { return 0; } +static ResultTokenWithMetadata * +result_token_with_metadata(Parser *p, void *result, PyObject *metadata) +{ + ResultTokenWithMetadata *res = _PyArena_Malloc(p->arena, sizeof(ResultTokenWithMetadata)); + if (res == NULL) { + return NULL; + } + res->metadata = metadata; + res->result = result; + return res; +} + +ResultTokenWithMetadata * +_PyPegen_check_fstring_conversion(Parser *p, Token* conv_token, expr_ty conv) +{ + if (conv_token->lineno != conv->lineno || conv_token->end_col_offset != conv->col_offset) { + return RAISE_SYNTAX_ERROR_KNOWN_RANGE( + conv_token, conv, + "f-string: conversion type must come right after the exclamanation mark" + ); + } + return result_token_with_metadata(p, conv, conv_token->metadata); +} + +ResultTokenWithMetadata * +_PyPegen_setup_full_format_spec(Parser *p, Token *colon, asdl_expr_seq *spec, int lineno, int col_offset, + int end_lineno, int end_col_offset, PyArena *arena) +{ + if (!spec) { + return NULL; + } + expr_ty res = _PyAST_JoinedStr(spec, lineno, col_offset, end_lineno, end_col_offset, p->arena); + if (!res) { + return NULL; + } + return result_token_with_metadata(p, res, colon->metadata); +} + const char * _PyPegen_get_expr_name(expr_ty e) { @@ -1271,3 +1220,423 @@ _PyPegen_nonparen_genexp_in_call(Parser *p, expr_ty args, asdl_comprehension_seq "Generator expression must be parenthesized" ); } + +// Fstring stuff + +static expr_ty +_PyPegen_decode_fstring_part(Parser* p, int is_raw, expr_ty constant) { + assert(PyUnicode_CheckExact(constant->v.Constant.value)); + + const char* bstr = PyUnicode_AsUTF8(constant->v.Constant.value); + if (bstr == NULL) { + return NULL; + } + + size_t len; + if (strcmp(bstr, "{{") == 0 || strcmp(bstr, "}}") == 0) { + len = 1; + } else { + len = strlen(bstr); + } + + is_raw = is_raw || strchr(bstr, '\\') == NULL; + PyObject *str = _PyPegen_decode_string(p, is_raw, bstr, len, NULL); + if (str == NULL) { + _Pypegen_raise_decode_error(p); + return NULL; + } + if (_PyArena_AddPyObject(p->arena, str) < 0) { + Py_DECREF(str); + return NULL; + } + return _PyAST_Constant(str, NULL, constant->lineno, constant->col_offset, + constant->end_lineno, constant->end_col_offset, + p->arena); +} + +static asdl_expr_seq * +unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions) +{ + /* The parser might put multiple f-string values into an individual + * JoinedStr node at the top level due to stuff like f-string debugging + * expressions. This function flattens those and promotes them to the + * upper level. Only simplifies AST, but the compiler already takes care + * of the regular output, so this is not necessary if you are not going + * to expose the output AST to Python level. */ + + Py_ssize_t i, req_size, raw_size; + + req_size = raw_size = asdl_seq_LEN(raw_expressions); + expr_ty expr; + for (i = 0; i < raw_size; i++) { + expr = asdl_seq_GET(raw_expressions, i); + if (expr->kind == JoinedStr_kind) { + req_size += asdl_seq_LEN(expr->v.JoinedStr.values) - 1; + } + } + + asdl_expr_seq *expressions = _Py_asdl_expr_seq_new(req_size, p->arena); + + Py_ssize_t raw_index, req_index = 0; + for (raw_index = 0; raw_index < raw_size; raw_index++) { + expr = asdl_seq_GET(raw_expressions, raw_index); + if (expr->kind == JoinedStr_kind) { + asdl_expr_seq *values = expr->v.JoinedStr.values; + for (Py_ssize_t n = 0; n < asdl_seq_LEN(values); n++) { + asdl_seq_SET(expressions, req_index, asdl_seq_GET(values, n)); + req_index++; + } + } else { + asdl_seq_SET(expressions, req_index, expr); + req_index++; + } + } + return expressions; +} + +expr_ty +_PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b) { + asdl_expr_seq *expr = unpack_top_level_joined_strs(p, raw_expressions); + Py_ssize_t n_items = asdl_seq_LEN(expr); + + const char* quote_str = PyBytes_AsString(a->bytes); + if (quote_str == NULL) { + return NULL; + } + int is_raw = strpbrk(quote_str, "rR") != NULL; + + asdl_expr_seq *seq = _Py_asdl_expr_seq_new(n_items, p->arena); + if (seq == NULL) { + return NULL; + } + + Py_ssize_t index = 0; + for (Py_ssize_t i = 0; i < n_items; i++) { + expr_ty item = asdl_seq_GET(expr, i); + if (item->kind == Constant_kind) { + item = _PyPegen_decode_fstring_part(p, is_raw, item); + if (item == NULL) { + return NULL; + } + + /* Tokenizer emits string parts even when the underlying string + might become an empty value (e.g. FSTRING_MIDDLE with the value \\n) + so we need to check for them and simplify it here. */ + if (PyUnicode_CheckExact(item->v.Constant.value) + && PyUnicode_GET_LENGTH(item->v.Constant.value) == 0) { + continue; + } + } + asdl_seq_SET(seq, index++, item); + } + + asdl_expr_seq *resized_exprs; + if (index != n_items) { + resized_exprs = _Py_asdl_expr_seq_new(index, p->arena); + if (resized_exprs == NULL) { + return NULL; + } + for (Py_ssize_t i = 0; i < index; i++) { + asdl_seq_SET(resized_exprs, i, asdl_seq_GET(seq, i)); + } + } + else { + resized_exprs = seq; + } + + return _PyAST_JoinedStr(resized_exprs, a->lineno, a->col_offset, + b->end_lineno, b->end_col_offset, + p->arena); +} + +expr_ty _PyPegen_constant_from_token(Parser* p, Token* tok) { + char* bstr = PyBytes_AsString(tok->bytes); + if (bstr == NULL) { + return NULL; + } + PyObject* str = PyUnicode_FromString(bstr); + if (str == NULL) { + return NULL; + } + if (_PyArena_AddPyObject(p->arena, str) < 0) { + Py_DECREF(str); + return NULL; + } + return _PyAST_Constant(str, NULL, tok->lineno, tok->col_offset, + tok->end_lineno, tok->end_col_offset, + p->arena); +} + +expr_ty _PyPegen_constant_from_string(Parser* p, Token* tok) { + char* the_str = PyBytes_AsString(tok->bytes); + if (the_str == NULL) { + return NULL; + } + PyObject *s = _PyPegen_parse_string(p, tok); + if (s == NULL) { + _Pypegen_raise_decode_error(p); + return NULL; + } + if (_PyArena_AddPyObject(p->arena, s) < 0) { + Py_DECREF(s); + return NULL; + } + PyObject *kind = NULL; + if (the_str && the_str[0] == 'u') { + kind = _PyPegen_new_identifier(p, "u"); + if (kind == NULL) { + return NULL; + } + } + return _PyAST_Constant(s, kind, tok->lineno, tok->col_offset, tok->end_lineno, tok->end_col_offset, p->arena); +} + +expr_ty _PyPegen_formatted_value(Parser *p, expr_ty expression, Token *debug, ResultTokenWithMetadata *conversion, + ResultTokenWithMetadata *format, Token *closing_brace, int lineno, int col_offset, + int end_lineno, int end_col_offset, PyArena *arena) { + int conversion_val = -1; + if (conversion != NULL) { + expr_ty conversion_expr = (expr_ty) conversion->result; + assert(conversion_expr->kind == Name_kind); + Py_UCS4 first = PyUnicode_READ_CHAR(conversion_expr->v.Name.id, 0); + + if (PyUnicode_GET_LENGTH(conversion_expr->v.Name.id) > 1 || + !(first == 's' || first == 'r' || first == 'a')) { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(conversion_expr, + "f-string: invalid conversion character %R: expected 's', 'r', or 'a'", + conversion_expr->v.Name.id); + return NULL; + } + + conversion_val = Py_SAFE_DOWNCAST(first, Py_UCS4, int); + } + else if (debug && !format) { + /* If no conversion is specified, use !r for debug expressions */ + conversion_val = (int)'r'; + } + + expr_ty formatted_value = _PyAST_FormattedValue( + expression, conversion_val, format ? (expr_ty) format->result : NULL, + lineno, col_offset, end_lineno, + end_col_offset, arena + ); + + if (debug) { + /* Find the non whitespace token after the "=" */ + int debug_end_line, debug_end_offset; + PyObject *debug_metadata; + + if (conversion) { + debug_end_line = ((expr_ty) conversion->result)->lineno; + debug_end_offset = ((expr_ty) conversion->result)->col_offset; + debug_metadata = conversion->metadata; + } + else if (format) { + debug_end_line = ((expr_ty) format->result)->lineno; + debug_end_offset = ((expr_ty) format->result)->col_offset + 1; + debug_metadata = format->metadata; + } + else { + debug_end_line = end_lineno; + debug_end_offset = end_col_offset; + debug_metadata = closing_brace->metadata; + } + + expr_ty debug_text = _PyAST_Constant(debug_metadata, NULL, lineno, col_offset + 1, debug_end_line, + debug_end_offset - 1, p->arena); + if (!debug_text) { + return NULL; + } + + asdl_expr_seq *values = _Py_asdl_expr_seq_new(2, arena); + asdl_seq_SET(values, 0, debug_text); + asdl_seq_SET(values, 1, formatted_value); + return _PyAST_JoinedStr(values, lineno, col_offset, debug_end_line, debug_end_offset, p->arena); + } + else { + return formatted_value; + } +} + +expr_ty +_PyPegen_concatenate_strings(Parser *p, asdl_expr_seq *strings, + int lineno, int col_offset, int end_lineno, + int end_col_offset, PyArena *arena) +{ + Py_ssize_t len = asdl_seq_LEN(strings); + assert(len > 0); + + int f_string_found = 0; + int unicode_string_found = 0; + int bytes_found = 0; + + Py_ssize_t i = 0; + Py_ssize_t n_flattened_elements = 0; + for (i = 0; i < len; i++) { + expr_ty elem = asdl_seq_GET(strings, i); + if (elem->kind == Constant_kind) { + if (PyBytes_CheckExact(elem->v.Constant.value)) { + bytes_found = 1; + } else { + unicode_string_found = 1; + } + n_flattened_elements++; + } else { + n_flattened_elements += asdl_seq_LEN(elem->v.JoinedStr.values); + f_string_found = 1; + } + } + + if ((unicode_string_found || f_string_found) && bytes_found) { + RAISE_SYNTAX_ERROR("cannot mix bytes and nonbytes literals"); + return NULL; + } + + if (bytes_found) { + PyObject* res = PyBytes_FromString(""); + + /* Bytes literals never get a kind, but just for consistency + since they are represented as Constant nodes, we'll mirror + the same behavior as unicode strings for determining the + kind. */ + PyObject* kind = asdl_seq_GET(strings, 0)->v.Constant.kind; + for (i = 0; i < len; i++) { + expr_ty elem = asdl_seq_GET(strings, i); + PyBytes_Concat(&res, elem->v.Constant.value); + } + if (!res || _PyArena_AddPyObject(arena, res) < 0) { + Py_XDECREF(res); + return NULL; + } + return _PyAST_Constant(res, kind, lineno, col_offset, end_lineno, end_col_offset, p->arena); + } + + if (!f_string_found && len == 1) { + return asdl_seq_GET(strings, 0); + } + + asdl_expr_seq* flattened = _Py_asdl_expr_seq_new(n_flattened_elements, p->arena); + if (flattened == NULL) { + return NULL; + } + + /* build flattened list */ + Py_ssize_t current_pos = 0; + Py_ssize_t j = 0; + for (i = 0; i < len; i++) { + expr_ty elem = asdl_seq_GET(strings, i); + if (elem->kind == Constant_kind) { + asdl_seq_SET(flattened, current_pos++, elem); + } else { + for (j = 0; j < asdl_seq_LEN(elem->v.JoinedStr.values); j++) { + expr_ty subvalue = asdl_seq_GET(elem->v.JoinedStr.values, j); + if (subvalue == NULL) { + return NULL; + } + asdl_seq_SET(flattened, current_pos++, subvalue); + } + } + } + + /* calculate folded element count */ + Py_ssize_t n_elements = 0; + int prev_is_constant = 0; + for (i = 0; i < n_flattened_elements; i++) { + expr_ty elem = asdl_seq_GET(flattened, i); + + /* The concatenation of a FormattedValue and an empty Contant should + lead to the FormattedValue itself. Thus, we will not take any empty + constants into account, just as in `_PyPegen_joined_str` */ + if (f_string_found && elem->kind == Constant_kind && + PyUnicode_CheckExact(elem->v.Constant.value) && + PyUnicode_GET_LENGTH(elem->v.Constant.value) == 0) + continue; + + if (!prev_is_constant || elem->kind != Constant_kind) { + n_elements++; + } + prev_is_constant = elem->kind == Constant_kind; + } + + asdl_expr_seq* values = _Py_asdl_expr_seq_new(n_elements, p->arena); + if (values == NULL) { + return NULL; + } + + /* build folded list */ + _PyUnicodeWriter writer; + current_pos = 0; + for (i = 0; i < n_flattened_elements; i++) { + expr_ty elem = asdl_seq_GET(flattened, i); + + /* if the current elem and the following are constants, + fold them and all consequent constants */ + if (elem->kind == Constant_kind) { + if (i + 1 < n_flattened_elements && + asdl_seq_GET(flattened, i + 1)->kind == Constant_kind) { + expr_ty first_elem = elem; + + /* When a string is getting concatenated, the kind of the string + is determined by the first string in the concatenation + sequence. + + u"abc" "def" -> u"abcdef" + "abc" u"abc" -> "abcabc" */ + PyObject *kind = elem->v.Constant.kind; + + _PyUnicodeWriter_Init(&writer); + expr_ty last_elem = elem; + for (j = i; j < n_flattened_elements; j++) { + expr_ty current_elem = asdl_seq_GET(flattened, j); + if (current_elem->kind == Constant_kind) { + if (_PyUnicodeWriter_WriteStr( + &writer, current_elem->v.Constant.value)) { + _PyUnicodeWriter_Dealloc(&writer); + return NULL; + } + last_elem = current_elem; + } else { + break; + } + } + i = j - 1; + + PyObject *concat_str = _PyUnicodeWriter_Finish(&writer); + if (concat_str == NULL) { + _PyUnicodeWriter_Dealloc(&writer); + return NULL; + } + if (_PyArena_AddPyObject(p->arena, concat_str) < 0) { + Py_DECREF(concat_str); + return NULL; + } + elem = _PyAST_Constant(concat_str, kind, first_elem->lineno, + first_elem->col_offset, + last_elem->end_lineno, + last_elem->end_col_offset, p->arena); + if (elem == NULL) { + return NULL; + } + } + + /* Drop all empty contanst strings */ + if (f_string_found && + PyUnicode_CheckExact(elem->v.Constant.value) && + PyUnicode_GET_LENGTH(elem->v.Constant.value) == 0) { + continue; + } + } + + asdl_seq_SET(values, current_pos++, elem); + } + + if (!f_string_found) { + assert(n_elements == 1); + expr_ty elem = asdl_seq_GET(values, 0); + assert(elem->kind == Constant_kind); + return elem; + } + + assert(current_pos == n_elements); + return _PyAST_JoinedStr(values, lineno, col_offset, end_lineno, end_col_offset, p->arena); +} diff --git a/Parser/parser.c b/Parser/parser.c index e0a88a9cc72c8b..6eb985a7d3e123 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -17,52 +17,52 @@ static KeywordToken *reserved_keywords[] = { (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) { - {"if", 641}, - {"as", 639}, - {"in", 650}, + {"if", 642}, + {"as", 640}, + {"in", 651}, {"or", 574}, {"is", 582}, {NULL, -1}, }, (KeywordToken[]) { - {"del", 603}, - {"def", 651}, - {"for", 649}, - {"try", 623}, + {"del", 604}, + {"def", 652}, + {"for", 650}, + {"try", 624}, {"and", 575}, {"not", 581}, {NULL, -1}, }, (KeywordToken[]) { - {"from", 607}, + {"from", 608}, {"pass", 504}, - {"with", 614}, - {"elif", 643}, - {"else", 644}, - {"None", 601}, - {"True", 600}, + {"with", 615}, + {"elif", 644}, + {"else", 645}, + {"None", 602}, + {"True", 601}, {NULL, -1}, }, (KeywordToken[]) { {"raise", 522}, {"yield", 573}, {"break", 508}, - {"class", 653}, - {"while", 646}, - {"False", 602}, + {"class", 654}, + {"while", 647}, + {"False", 603}, {NULL, -1}, }, (KeywordToken[]) { {"return", 519}, - {"import", 606}, + {"import", 607}, {"assert", 526}, {"global", 523}, - {"except", 636}, - {"lambda", 586}, + {"except", 637}, + {"lambda", 600}, {NULL, -1}, }, (KeywordToken[]) { - {"finally", 632}, + {"finally", 633}, {NULL, -1}, }, (KeywordToken[]) { @@ -224,341 +224,370 @@ static char *soft_keywords[] = { #define lambda_param_with_default_type 1144 #define lambda_param_maybe_default_type 1145 #define lambda_param_type 1146 -#define strings_type 1147 -#define list_type 1148 -#define tuple_type 1149 -#define set_type 1150 -#define dict_type 1151 -#define double_starred_kvpairs_type 1152 -#define double_starred_kvpair_type 1153 -#define kvpair_type 1154 -#define for_if_clauses_type 1155 -#define for_if_clause_type 1156 -#define listcomp_type 1157 -#define setcomp_type 1158 -#define genexp_type 1159 -#define dictcomp_type 1160 -#define arguments_type 1161 -#define args_type 1162 -#define kwargs_type 1163 -#define starred_expression_type 1164 -#define kwarg_or_starred_type 1165 -#define kwarg_or_double_starred_type 1166 -#define star_targets_type 1167 -#define star_targets_list_seq_type 1168 -#define star_targets_tuple_seq_type 1169 -#define star_target_type 1170 -#define target_with_star_atom_type 1171 -#define star_atom_type 1172 -#define single_target_type 1173 -#define single_subscript_attribute_target_type 1174 -#define t_primary_type 1175 // Left-recursive -#define t_lookahead_type 1176 -#define del_targets_type 1177 -#define del_target_type 1178 -#define del_t_atom_type 1179 -#define type_expressions_type 1180 -#define func_type_comment_type 1181 -#define invalid_arguments_type 1182 -#define invalid_kwarg_type 1183 -#define expression_without_invalid_type 1184 -#define invalid_legacy_expression_type 1185 -#define invalid_expression_type 1186 -#define invalid_named_expression_type 1187 -#define invalid_assignment_type 1188 -#define invalid_ann_assign_target_type 1189 -#define invalid_del_stmt_type 1190 -#define invalid_block_type 1191 -#define invalid_comprehension_type 1192 -#define invalid_dict_comprehension_type 1193 -#define invalid_parameters_type 1194 -#define invalid_default_type 1195 -#define invalid_star_etc_type 1196 -#define invalid_kwds_type 1197 -#define invalid_parameters_helper_type 1198 -#define invalid_lambda_parameters_type 1199 -#define invalid_lambda_parameters_helper_type 1200 -#define invalid_lambda_star_etc_type 1201 -#define invalid_lambda_kwds_type 1202 -#define invalid_double_type_comments_type 1203 -#define invalid_with_item_type 1204 -#define invalid_for_target_type 1205 -#define invalid_group_type 1206 -#define invalid_import_type 1207 -#define invalid_import_from_targets_type 1208 -#define invalid_with_stmt_type 1209 -#define invalid_with_stmt_indent_type 1210 -#define invalid_try_stmt_type 1211 -#define invalid_except_stmt_type 1212 -#define invalid_finally_stmt_type 1213 -#define invalid_except_stmt_indent_type 1214 -#define invalid_except_star_stmt_indent_type 1215 -#define invalid_match_stmt_type 1216 -#define invalid_case_block_type 1217 -#define invalid_as_pattern_type 1218 -#define invalid_class_pattern_type 1219 -#define invalid_class_argument_pattern_type 1220 -#define invalid_if_stmt_type 1221 -#define invalid_elif_stmt_type 1222 -#define invalid_else_stmt_type 1223 -#define invalid_while_stmt_type 1224 -#define invalid_for_stmt_type 1225 -#define invalid_def_raw_type 1226 -#define invalid_class_def_raw_type 1227 -#define invalid_double_starred_kvpairs_type 1228 -#define invalid_kvpair_type 1229 -#define invalid_starred_expression_type 1230 -#define _loop0_1_type 1231 -#define _loop0_2_type 1232 -#define _loop1_3_type 1233 -#define _loop0_5_type 1234 -#define _gather_4_type 1235 -#define _tmp_6_type 1236 -#define _tmp_7_type 1237 -#define _tmp_8_type 1238 -#define _tmp_9_type 1239 -#define _tmp_10_type 1240 -#define _tmp_11_type 1241 -#define _tmp_12_type 1242 -#define _tmp_13_type 1243 -#define _loop1_14_type 1244 -#define _tmp_15_type 1245 -#define _tmp_16_type 1246 -#define _tmp_17_type 1247 -#define _loop0_19_type 1248 -#define _gather_18_type 1249 -#define _loop0_21_type 1250 -#define _gather_20_type 1251 -#define _tmp_22_type 1252 -#define _tmp_23_type 1253 -#define _loop0_24_type 1254 -#define _loop1_25_type 1255 -#define _loop0_27_type 1256 -#define _gather_26_type 1257 -#define _tmp_28_type 1258 -#define _loop0_30_type 1259 -#define _gather_29_type 1260 -#define _tmp_31_type 1261 -#define _loop1_32_type 1262 -#define _tmp_33_type 1263 -#define _tmp_34_type 1264 -#define _tmp_35_type 1265 -#define _loop0_36_type 1266 -#define _loop0_37_type 1267 -#define _loop0_38_type 1268 -#define _loop1_39_type 1269 -#define _loop0_40_type 1270 -#define _loop1_41_type 1271 -#define _loop1_42_type 1272 -#define _loop1_43_type 1273 -#define _loop0_44_type 1274 -#define _loop1_45_type 1275 -#define _loop0_46_type 1276 -#define _loop1_47_type 1277 -#define _loop0_48_type 1278 -#define _loop0_49_type 1279 -#define _loop1_50_type 1280 -#define _loop0_52_type 1281 -#define _gather_51_type 1282 -#define _loop0_54_type 1283 -#define _gather_53_type 1284 -#define _loop0_56_type 1285 -#define _gather_55_type 1286 -#define _loop0_58_type 1287 -#define _gather_57_type 1288 -#define _tmp_59_type 1289 -#define _loop1_60_type 1290 -#define _loop1_61_type 1291 -#define _tmp_62_type 1292 -#define _tmp_63_type 1293 -#define _loop1_64_type 1294 -#define _loop0_66_type 1295 -#define _gather_65_type 1296 -#define _tmp_67_type 1297 -#define _tmp_68_type 1298 -#define _tmp_69_type 1299 -#define _tmp_70_type 1300 -#define _loop0_72_type 1301 -#define _gather_71_type 1302 -#define _loop0_74_type 1303 -#define _gather_73_type 1304 -#define _tmp_75_type 1305 -#define _loop0_77_type 1306 -#define _gather_76_type 1307 -#define _loop0_79_type 1308 -#define _gather_78_type 1309 -#define _loop1_80_type 1310 -#define _loop1_81_type 1311 -#define _loop0_83_type 1312 -#define _gather_82_type 1313 -#define _loop1_84_type 1314 -#define _loop1_85_type 1315 -#define _loop1_86_type 1316 -#define _tmp_87_type 1317 -#define _loop0_89_type 1318 -#define _gather_88_type 1319 -#define _tmp_90_type 1320 -#define _tmp_91_type 1321 -#define _tmp_92_type 1322 -#define _tmp_93_type 1323 -#define _tmp_94_type 1324 -#define _loop0_95_type 1325 -#define _loop0_96_type 1326 -#define _loop0_97_type 1327 -#define _loop1_98_type 1328 -#define _loop0_99_type 1329 -#define _loop1_100_type 1330 -#define _loop1_101_type 1331 -#define _loop1_102_type 1332 -#define _loop0_103_type 1333 -#define _loop1_104_type 1334 -#define _loop0_105_type 1335 -#define _loop1_106_type 1336 -#define _loop0_107_type 1337 -#define _loop1_108_type 1338 -#define _loop1_109_type 1339 -#define _tmp_110_type 1340 -#define _loop0_112_type 1341 -#define _gather_111_type 1342 -#define _loop1_113_type 1343 -#define _loop0_114_type 1344 -#define _loop0_115_type 1345 -#define _tmp_116_type 1346 -#define _loop0_118_type 1347 -#define _gather_117_type 1348 -#define _tmp_119_type 1349 -#define _loop0_121_type 1350 -#define _gather_120_type 1351 -#define _loop0_123_type 1352 -#define _gather_122_type 1353 -#define _loop0_125_type 1354 -#define _gather_124_type 1355 -#define _loop0_127_type 1356 -#define _gather_126_type 1357 -#define _loop0_128_type 1358 -#define _loop0_130_type 1359 -#define _gather_129_type 1360 -#define _loop1_131_type 1361 -#define _tmp_132_type 1362 -#define _loop0_134_type 1363 -#define _gather_133_type 1364 -#define _loop0_136_type 1365 -#define _gather_135_type 1366 -#define _loop0_138_type 1367 -#define _gather_137_type 1368 -#define _loop0_140_type 1369 -#define _gather_139_type 1370 -#define _loop0_142_type 1371 -#define _gather_141_type 1372 -#define _tmp_143_type 1373 -#define _tmp_144_type 1374 -#define _tmp_145_type 1375 -#define _tmp_146_type 1376 -#define _tmp_147_type 1377 -#define _tmp_148_type 1378 -#define _tmp_149_type 1379 -#define _tmp_150_type 1380 -#define _tmp_151_type 1381 -#define _tmp_152_type 1382 -#define _tmp_153_type 1383 -#define _loop0_154_type 1384 -#define _loop0_155_type 1385 -#define _loop0_156_type 1386 -#define _tmp_157_type 1387 -#define _tmp_158_type 1388 -#define _tmp_159_type 1389 -#define _tmp_160_type 1390 -#define _tmp_161_type 1391 -#define _loop0_162_type 1392 -#define _loop0_163_type 1393 -#define _loop0_164_type 1394 -#define _loop1_165_type 1395 -#define _tmp_166_type 1396 -#define _loop0_167_type 1397 -#define _tmp_168_type 1398 -#define _loop0_169_type 1399 -#define _loop1_170_type 1400 -#define _tmp_171_type 1401 -#define _tmp_172_type 1402 -#define _tmp_173_type 1403 -#define _loop0_174_type 1404 -#define _tmp_175_type 1405 -#define _tmp_176_type 1406 -#define _loop1_177_type 1407 -#define _tmp_178_type 1408 -#define _loop0_179_type 1409 -#define _loop0_180_type 1410 -#define _loop0_181_type 1411 -#define _loop0_183_type 1412 -#define _gather_182_type 1413 -#define _tmp_184_type 1414 -#define _loop0_185_type 1415 -#define _tmp_186_type 1416 -#define _loop0_187_type 1417 -#define _loop1_188_type 1418 -#define _loop1_189_type 1419 -#define _tmp_190_type 1420 -#define _tmp_191_type 1421 -#define _loop0_192_type 1422 -#define _tmp_193_type 1423 -#define _tmp_194_type 1424 -#define _tmp_195_type 1425 -#define _loop0_197_type 1426 -#define _gather_196_type 1427 -#define _loop0_199_type 1428 -#define _gather_198_type 1429 -#define _loop0_201_type 1430 -#define _gather_200_type 1431 -#define _loop0_203_type 1432 -#define _gather_202_type 1433 -#define _tmp_204_type 1434 -#define _loop0_205_type 1435 -#define _loop1_206_type 1436 -#define _tmp_207_type 1437 -#define _loop0_208_type 1438 -#define _loop1_209_type 1439 -#define _tmp_210_type 1440 -#define _tmp_211_type 1441 -#define _tmp_212_type 1442 -#define _tmp_213_type 1443 -#define _tmp_214_type 1444 -#define _tmp_215_type 1445 -#define _tmp_216_type 1446 -#define _tmp_217_type 1447 -#define _tmp_218_type 1448 -#define _tmp_219_type 1449 -#define _loop0_221_type 1450 -#define _gather_220_type 1451 -#define _tmp_222_type 1452 -#define _tmp_223_type 1453 -#define _tmp_224_type 1454 -#define _tmp_225_type 1455 -#define _tmp_226_type 1456 -#define _tmp_227_type 1457 -#define _tmp_228_type 1458 -#define _tmp_229_type 1459 -#define _tmp_230_type 1460 -#define _tmp_231_type 1461 -#define _tmp_232_type 1462 -#define _tmp_233_type 1463 -#define _tmp_234_type 1464 -#define _tmp_235_type 1465 -#define _tmp_236_type 1466 -#define _tmp_237_type 1467 -#define _tmp_238_type 1468 -#define _tmp_239_type 1469 -#define _tmp_240_type 1470 -#define _tmp_241_type 1471 -#define _tmp_242_type 1472 -#define _tmp_243_type 1473 -#define _tmp_244_type 1474 -#define _tmp_245_type 1475 -#define _tmp_246_type 1476 -#define _tmp_247_type 1477 -#define _tmp_248_type 1478 -#define _tmp_249_type 1479 -#define _tmp_250_type 1480 -#define _tmp_251_type 1481 +#define fstring_middle_type 1147 +#define fstring_replacement_field_type 1148 +#define fstring_conversion_type 1149 +#define fstring_full_format_spec_type 1150 +#define fstring_format_spec_type 1151 +#define string_type 1152 +#define strings_type 1153 +#define list_type 1154 +#define tuple_type 1155 +#define set_type 1156 +#define dict_type 1157 +#define double_starred_kvpairs_type 1158 +#define double_starred_kvpair_type 1159 +#define kvpair_type 1160 +#define for_if_clauses_type 1161 +#define for_if_clause_type 1162 +#define listcomp_type 1163 +#define setcomp_type 1164 +#define genexp_type 1165 +#define dictcomp_type 1166 +#define arguments_type 1167 +#define args_type 1168 +#define kwargs_type 1169 +#define starred_expression_type 1170 +#define kwarg_or_starred_type 1171 +#define kwarg_or_double_starred_type 1172 +#define star_targets_type 1173 +#define star_targets_list_seq_type 1174 +#define star_targets_tuple_seq_type 1175 +#define star_target_type 1176 +#define target_with_star_atom_type 1177 +#define star_atom_type 1178 +#define single_target_type 1179 +#define single_subscript_attribute_target_type 1180 +#define t_primary_type 1181 // Left-recursive +#define t_lookahead_type 1182 +#define del_targets_type 1183 +#define del_target_type 1184 +#define del_t_atom_type 1185 +#define type_expressions_type 1186 +#define func_type_comment_type 1187 +#define invalid_arguments_type 1188 +#define invalid_kwarg_type 1189 +#define expression_without_invalid_type 1190 +#define invalid_legacy_expression_type 1191 +#define invalid_expression_type 1192 +#define invalid_named_expression_type 1193 +#define invalid_assignment_type 1194 +#define invalid_ann_assign_target_type 1195 +#define invalid_del_stmt_type 1196 +#define invalid_block_type 1197 +#define invalid_comprehension_type 1198 +#define invalid_dict_comprehension_type 1199 +#define invalid_parameters_type 1200 +#define invalid_default_type 1201 +#define invalid_star_etc_type 1202 +#define invalid_kwds_type 1203 +#define invalid_parameters_helper_type 1204 +#define invalid_lambda_parameters_type 1205 +#define invalid_lambda_parameters_helper_type 1206 +#define invalid_lambda_star_etc_type 1207 +#define invalid_lambda_kwds_type 1208 +#define invalid_double_type_comments_type 1209 +#define invalid_with_item_type 1210 +#define invalid_for_target_type 1211 +#define invalid_group_type 1212 +#define invalid_import_type 1213 +#define invalid_import_from_targets_type 1214 +#define invalid_with_stmt_type 1215 +#define invalid_with_stmt_indent_type 1216 +#define invalid_try_stmt_type 1217 +#define invalid_except_stmt_type 1218 +#define invalid_finally_stmt_type 1219 +#define invalid_except_stmt_indent_type 1220 +#define invalid_except_star_stmt_indent_type 1221 +#define invalid_match_stmt_type 1222 +#define invalid_case_block_type 1223 +#define invalid_as_pattern_type 1224 +#define invalid_class_pattern_type 1225 +#define invalid_class_argument_pattern_type 1226 +#define invalid_if_stmt_type 1227 +#define invalid_elif_stmt_type 1228 +#define invalid_else_stmt_type 1229 +#define invalid_while_stmt_type 1230 +#define invalid_for_stmt_type 1231 +#define invalid_def_raw_type 1232 +#define invalid_class_def_raw_type 1233 +#define invalid_double_starred_kvpairs_type 1234 +#define invalid_kvpair_type 1235 +#define invalid_starred_expression_type 1236 +#define invalid_replacement_field_type 1237 +#define invalid_conversion_character_type 1238 +#define _loop0_1_type 1239 +#define _loop0_2_type 1240 +#define _loop0_3_type 1241 +#define _loop1_4_type 1242 +#define _loop0_6_type 1243 +#define _gather_5_type 1244 +#define _tmp_7_type 1245 +#define _tmp_8_type 1246 +#define _tmp_9_type 1247 +#define _tmp_10_type 1248 +#define _tmp_11_type 1249 +#define _tmp_12_type 1250 +#define _tmp_13_type 1251 +#define _tmp_14_type 1252 +#define _loop1_15_type 1253 +#define _tmp_16_type 1254 +#define _tmp_17_type 1255 +#define _tmp_18_type 1256 +#define _loop0_20_type 1257 +#define _gather_19_type 1258 +#define _loop0_22_type 1259 +#define _gather_21_type 1260 +#define _tmp_23_type 1261 +#define _tmp_24_type 1262 +#define _loop0_25_type 1263 +#define _loop1_26_type 1264 +#define _loop0_28_type 1265 +#define _gather_27_type 1266 +#define _tmp_29_type 1267 +#define _loop0_31_type 1268 +#define _gather_30_type 1269 +#define _tmp_32_type 1270 +#define _loop1_33_type 1271 +#define _tmp_34_type 1272 +#define _tmp_35_type 1273 +#define _tmp_36_type 1274 +#define _loop0_37_type 1275 +#define _loop0_38_type 1276 +#define _loop0_39_type 1277 +#define _loop1_40_type 1278 +#define _loop0_41_type 1279 +#define _loop1_42_type 1280 +#define _loop1_43_type 1281 +#define _loop1_44_type 1282 +#define _loop0_45_type 1283 +#define _loop1_46_type 1284 +#define _loop0_47_type 1285 +#define _loop1_48_type 1286 +#define _loop0_49_type 1287 +#define _loop0_50_type 1288 +#define _loop1_51_type 1289 +#define _loop0_53_type 1290 +#define _gather_52_type 1291 +#define _loop0_55_type 1292 +#define _gather_54_type 1293 +#define _loop0_57_type 1294 +#define _gather_56_type 1295 +#define _loop0_59_type 1296 +#define _gather_58_type 1297 +#define _tmp_60_type 1298 +#define _loop1_61_type 1299 +#define _loop1_62_type 1300 +#define _tmp_63_type 1301 +#define _tmp_64_type 1302 +#define _loop1_65_type 1303 +#define _loop0_67_type 1304 +#define _gather_66_type 1305 +#define _tmp_68_type 1306 +#define _tmp_69_type 1307 +#define _tmp_70_type 1308 +#define _tmp_71_type 1309 +#define _loop0_73_type 1310 +#define _gather_72_type 1311 +#define _loop0_75_type 1312 +#define _gather_74_type 1313 +#define _tmp_76_type 1314 +#define _loop0_78_type 1315 +#define _gather_77_type 1316 +#define _loop0_80_type 1317 +#define _gather_79_type 1318 +#define _loop1_81_type 1319 +#define _loop1_82_type 1320 +#define _loop0_84_type 1321 +#define _gather_83_type 1322 +#define _loop1_85_type 1323 +#define _loop1_86_type 1324 +#define _loop1_87_type 1325 +#define _tmp_88_type 1326 +#define _loop0_90_type 1327 +#define _gather_89_type 1328 +#define _tmp_91_type 1329 +#define _tmp_92_type 1330 +#define _tmp_93_type 1331 +#define _tmp_94_type 1332 +#define _tmp_95_type 1333 +#define _tmp_96_type 1334 +#define _loop0_97_type 1335 +#define _loop0_98_type 1336 +#define _loop0_99_type 1337 +#define _loop1_100_type 1338 +#define _loop0_101_type 1339 +#define _loop1_102_type 1340 +#define _loop1_103_type 1341 +#define _loop1_104_type 1342 +#define _loop0_105_type 1343 +#define _loop1_106_type 1344 +#define _loop0_107_type 1345 +#define _loop1_108_type 1346 +#define _loop0_109_type 1347 +#define _loop1_110_type 1348 +#define _tmp_111_type 1349 +#define _loop0_112_type 1350 +#define _loop1_113_type 1351 +#define _tmp_114_type 1352 +#define _loop0_116_type 1353 +#define _gather_115_type 1354 +#define _loop1_117_type 1355 +#define _loop0_118_type 1356 +#define _loop0_119_type 1357 +#define _tmp_120_type 1358 +#define _loop0_122_type 1359 +#define _gather_121_type 1360 +#define _tmp_123_type 1361 +#define _loop0_125_type 1362 +#define _gather_124_type 1363 +#define _loop0_127_type 1364 +#define _gather_126_type 1365 +#define _loop0_129_type 1366 +#define _gather_128_type 1367 +#define _loop0_131_type 1368 +#define _gather_130_type 1369 +#define _loop0_132_type 1370 +#define _loop0_134_type 1371 +#define _gather_133_type 1372 +#define _loop1_135_type 1373 +#define _tmp_136_type 1374 +#define _loop0_138_type 1375 +#define _gather_137_type 1376 +#define _loop0_140_type 1377 +#define _gather_139_type 1378 +#define _loop0_142_type 1379 +#define _gather_141_type 1380 +#define _loop0_144_type 1381 +#define _gather_143_type 1382 +#define _loop0_146_type 1383 +#define _gather_145_type 1384 +#define _tmp_147_type 1385 +#define _tmp_148_type 1386 +#define _tmp_149_type 1387 +#define _tmp_150_type 1388 +#define _tmp_151_type 1389 +#define _tmp_152_type 1390 +#define _tmp_153_type 1391 +#define _tmp_154_type 1392 +#define _tmp_155_type 1393 +#define _tmp_156_type 1394 +#define _tmp_157_type 1395 +#define _tmp_158_type 1396 +#define _loop0_159_type 1397 +#define _loop0_160_type 1398 +#define _loop0_161_type 1399 +#define _tmp_162_type 1400 +#define _tmp_163_type 1401 +#define _tmp_164_type 1402 +#define _tmp_165_type 1403 +#define _tmp_166_type 1404 +#define _loop0_167_type 1405 +#define _loop0_168_type 1406 +#define _loop0_169_type 1407 +#define _loop1_170_type 1408 +#define _tmp_171_type 1409 +#define _loop0_172_type 1410 +#define _tmp_173_type 1411 +#define _loop0_174_type 1412 +#define _loop1_175_type 1413 +#define _tmp_176_type 1414 +#define _tmp_177_type 1415 +#define _tmp_178_type 1416 +#define _loop0_179_type 1417 +#define _tmp_180_type 1418 +#define _tmp_181_type 1419 +#define _loop1_182_type 1420 +#define _tmp_183_type 1421 +#define _loop0_184_type 1422 +#define _loop0_185_type 1423 +#define _loop0_186_type 1424 +#define _loop0_188_type 1425 +#define _gather_187_type 1426 +#define _tmp_189_type 1427 +#define _loop0_190_type 1428 +#define _tmp_191_type 1429 +#define _loop0_192_type 1430 +#define _loop1_193_type 1431 +#define _loop1_194_type 1432 +#define _tmp_195_type 1433 +#define _tmp_196_type 1434 +#define _loop0_197_type 1435 +#define _tmp_198_type 1436 +#define _tmp_199_type 1437 +#define _tmp_200_type 1438 +#define _loop0_202_type 1439 +#define _gather_201_type 1440 +#define _loop0_204_type 1441 +#define _gather_203_type 1442 +#define _loop0_206_type 1443 +#define _gather_205_type 1444 +#define _loop0_208_type 1445 +#define _gather_207_type 1446 +#define _tmp_209_type 1447 +#define _loop0_210_type 1448 +#define _loop1_211_type 1449 +#define _tmp_212_type 1450 +#define _loop0_213_type 1451 +#define _loop1_214_type 1452 +#define _tmp_215_type 1453 +#define _tmp_216_type 1454 +#define _tmp_217_type 1455 +#define _tmp_218_type 1456 +#define _tmp_219_type 1457 +#define _tmp_220_type 1458 +#define _tmp_221_type 1459 +#define _tmp_222_type 1460 +#define _tmp_223_type 1461 +#define _tmp_224_type 1462 +#define _loop0_226_type 1463 +#define _gather_225_type 1464 +#define _tmp_227_type 1465 +#define _tmp_228_type 1466 +#define _tmp_229_type 1467 +#define _tmp_230_type 1468 +#define _tmp_231_type 1469 +#define _tmp_232_type 1470 +#define _tmp_233_type 1471 +#define _tmp_234_type 1472 +#define _tmp_235_type 1473 +#define _tmp_236_type 1474 +#define _tmp_237_type 1475 +#define _tmp_238_type 1476 +#define _tmp_239_type 1477 +#define _loop0_240_type 1478 +#define _tmp_241_type 1479 +#define _tmp_242_type 1480 +#define _tmp_243_type 1481 +#define _tmp_244_type 1482 +#define _tmp_245_type 1483 +#define _tmp_246_type 1484 +#define _tmp_247_type 1485 +#define _tmp_248_type 1486 +#define _tmp_249_type 1487 +#define _tmp_250_type 1488 +#define _tmp_251_type 1489 +#define _tmp_252_type 1490 +#define _tmp_253_type 1491 +#define _tmp_254_type 1492 +#define _tmp_255_type 1493 +#define _tmp_256_type 1494 +#define _tmp_257_type 1495 +#define _tmp_258_type 1496 +#define _tmp_259_type 1497 +#define _tmp_260_type 1498 +#define _tmp_261_type 1499 +#define _tmp_262_type 1500 +#define _tmp_263_type 1501 +#define _tmp_264_type 1502 +#define _tmp_265_type 1503 +#define _tmp_266_type 1504 +#define _tmp_267_type 1505 +#define _tmp_268_type 1506 +#define _tmp_269_type 1507 +#define _tmp_270_type 1508 +#define _tmp_271_type 1509 +#define _tmp_272_type 1510 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -707,6 +736,12 @@ static arg_ty lambda_param_no_default_rule(Parser *p); static NameDefaultPair* lambda_param_with_default_rule(Parser *p); static NameDefaultPair* lambda_param_maybe_default_rule(Parser *p); static arg_ty lambda_param_rule(Parser *p); +static expr_ty fstring_middle_rule(Parser *p); +static expr_ty fstring_replacement_field_rule(Parser *p); +static ResultTokenWithMetadata* fstring_conversion_rule(Parser *p); +static ResultTokenWithMetadata* fstring_full_format_spec_rule(Parser *p); +static expr_ty fstring_format_spec_rule(Parser *p); +static expr_ty string_rule(Parser *p); static expr_ty strings_rule(Parser *p); static expr_ty list_rule(Parser *p); static expr_ty tuple_rule(Parser *p); @@ -791,12 +826,14 @@ static void *invalid_class_def_raw_rule(Parser *p); static void *invalid_double_starred_kvpairs_rule(Parser *p); static void *invalid_kvpair_rule(Parser *p); static void *invalid_starred_expression_rule(Parser *p); +static void *invalid_replacement_field_rule(Parser *p); +static void *invalid_conversion_character_rule(Parser *p); static asdl_seq *_loop0_1_rule(Parser *p); static asdl_seq *_loop0_2_rule(Parser *p); -static asdl_seq *_loop1_3_rule(Parser *p); -static asdl_seq *_loop0_5_rule(Parser *p); -static asdl_seq *_gather_4_rule(Parser *p); -static void *_tmp_6_rule(Parser *p); +static asdl_seq *_loop0_3_rule(Parser *p); +static asdl_seq *_loop1_4_rule(Parser *p); +static asdl_seq *_loop0_6_rule(Parser *p); +static asdl_seq *_gather_5_rule(Parser *p); static void *_tmp_7_rule(Parser *p); static void *_tmp_8_rule(Parser *p); static void *_tmp_9_rule(Parser *p); @@ -804,139 +841,139 @@ static void *_tmp_10_rule(Parser *p); static void *_tmp_11_rule(Parser *p); static void *_tmp_12_rule(Parser *p); static void *_tmp_13_rule(Parser *p); -static asdl_seq *_loop1_14_rule(Parser *p); -static void *_tmp_15_rule(Parser *p); +static void *_tmp_14_rule(Parser *p); +static asdl_seq *_loop1_15_rule(Parser *p); static void *_tmp_16_rule(Parser *p); static void *_tmp_17_rule(Parser *p); -static asdl_seq *_loop0_19_rule(Parser *p); -static asdl_seq *_gather_18_rule(Parser *p); -static asdl_seq *_loop0_21_rule(Parser *p); -static asdl_seq *_gather_20_rule(Parser *p); -static void *_tmp_22_rule(Parser *p); +static void *_tmp_18_rule(Parser *p); +static asdl_seq *_loop0_20_rule(Parser *p); +static asdl_seq *_gather_19_rule(Parser *p); +static asdl_seq *_loop0_22_rule(Parser *p); +static asdl_seq *_gather_21_rule(Parser *p); static void *_tmp_23_rule(Parser *p); -static asdl_seq *_loop0_24_rule(Parser *p); -static asdl_seq *_loop1_25_rule(Parser *p); -static asdl_seq *_loop0_27_rule(Parser *p); -static asdl_seq *_gather_26_rule(Parser *p); -static void *_tmp_28_rule(Parser *p); -static asdl_seq *_loop0_30_rule(Parser *p); -static asdl_seq *_gather_29_rule(Parser *p); -static void *_tmp_31_rule(Parser *p); -static asdl_seq *_loop1_32_rule(Parser *p); -static void *_tmp_33_rule(Parser *p); +static void *_tmp_24_rule(Parser *p); +static asdl_seq *_loop0_25_rule(Parser *p); +static asdl_seq *_loop1_26_rule(Parser *p); +static asdl_seq *_loop0_28_rule(Parser *p); +static asdl_seq *_gather_27_rule(Parser *p); +static void *_tmp_29_rule(Parser *p); +static asdl_seq *_loop0_31_rule(Parser *p); +static asdl_seq *_gather_30_rule(Parser *p); +static void *_tmp_32_rule(Parser *p); +static asdl_seq *_loop1_33_rule(Parser *p); static void *_tmp_34_rule(Parser *p); static void *_tmp_35_rule(Parser *p); -static asdl_seq *_loop0_36_rule(Parser *p); +static void *_tmp_36_rule(Parser *p); static asdl_seq *_loop0_37_rule(Parser *p); static asdl_seq *_loop0_38_rule(Parser *p); -static asdl_seq *_loop1_39_rule(Parser *p); -static asdl_seq *_loop0_40_rule(Parser *p); -static asdl_seq *_loop1_41_rule(Parser *p); +static asdl_seq *_loop0_39_rule(Parser *p); +static asdl_seq *_loop1_40_rule(Parser *p); +static asdl_seq *_loop0_41_rule(Parser *p); static asdl_seq *_loop1_42_rule(Parser *p); static asdl_seq *_loop1_43_rule(Parser *p); -static asdl_seq *_loop0_44_rule(Parser *p); -static asdl_seq *_loop1_45_rule(Parser *p); -static asdl_seq *_loop0_46_rule(Parser *p); -static asdl_seq *_loop1_47_rule(Parser *p); -static asdl_seq *_loop0_48_rule(Parser *p); +static asdl_seq *_loop1_44_rule(Parser *p); +static asdl_seq *_loop0_45_rule(Parser *p); +static asdl_seq *_loop1_46_rule(Parser *p); +static asdl_seq *_loop0_47_rule(Parser *p); +static asdl_seq *_loop1_48_rule(Parser *p); static asdl_seq *_loop0_49_rule(Parser *p); -static asdl_seq *_loop1_50_rule(Parser *p); -static asdl_seq *_loop0_52_rule(Parser *p); -static asdl_seq *_gather_51_rule(Parser *p); -static asdl_seq *_loop0_54_rule(Parser *p); -static asdl_seq *_gather_53_rule(Parser *p); -static asdl_seq *_loop0_56_rule(Parser *p); -static asdl_seq *_gather_55_rule(Parser *p); -static asdl_seq *_loop0_58_rule(Parser *p); -static asdl_seq *_gather_57_rule(Parser *p); -static void *_tmp_59_rule(Parser *p); -static asdl_seq *_loop1_60_rule(Parser *p); +static asdl_seq *_loop0_50_rule(Parser *p); +static asdl_seq *_loop1_51_rule(Parser *p); +static asdl_seq *_loop0_53_rule(Parser *p); +static asdl_seq *_gather_52_rule(Parser *p); +static asdl_seq *_loop0_55_rule(Parser *p); +static asdl_seq *_gather_54_rule(Parser *p); +static asdl_seq *_loop0_57_rule(Parser *p); +static asdl_seq *_gather_56_rule(Parser *p); +static asdl_seq *_loop0_59_rule(Parser *p); +static asdl_seq *_gather_58_rule(Parser *p); +static void *_tmp_60_rule(Parser *p); static asdl_seq *_loop1_61_rule(Parser *p); -static void *_tmp_62_rule(Parser *p); +static asdl_seq *_loop1_62_rule(Parser *p); static void *_tmp_63_rule(Parser *p); -static asdl_seq *_loop1_64_rule(Parser *p); -static asdl_seq *_loop0_66_rule(Parser *p); -static asdl_seq *_gather_65_rule(Parser *p); -static void *_tmp_67_rule(Parser *p); +static void *_tmp_64_rule(Parser *p); +static asdl_seq *_loop1_65_rule(Parser *p); +static asdl_seq *_loop0_67_rule(Parser *p); +static asdl_seq *_gather_66_rule(Parser *p); static void *_tmp_68_rule(Parser *p); static void *_tmp_69_rule(Parser *p); static void *_tmp_70_rule(Parser *p); -static asdl_seq *_loop0_72_rule(Parser *p); -static asdl_seq *_gather_71_rule(Parser *p); -static asdl_seq *_loop0_74_rule(Parser *p); -static asdl_seq *_gather_73_rule(Parser *p); -static void *_tmp_75_rule(Parser *p); -static asdl_seq *_loop0_77_rule(Parser *p); -static asdl_seq *_gather_76_rule(Parser *p); -static asdl_seq *_loop0_79_rule(Parser *p); -static asdl_seq *_gather_78_rule(Parser *p); -static asdl_seq *_loop1_80_rule(Parser *p); +static void *_tmp_71_rule(Parser *p); +static asdl_seq *_loop0_73_rule(Parser *p); +static asdl_seq *_gather_72_rule(Parser *p); +static asdl_seq *_loop0_75_rule(Parser *p); +static asdl_seq *_gather_74_rule(Parser *p); +static void *_tmp_76_rule(Parser *p); +static asdl_seq *_loop0_78_rule(Parser *p); +static asdl_seq *_gather_77_rule(Parser *p); +static asdl_seq *_loop0_80_rule(Parser *p); +static asdl_seq *_gather_79_rule(Parser *p); static asdl_seq *_loop1_81_rule(Parser *p); -static asdl_seq *_loop0_83_rule(Parser *p); -static asdl_seq *_gather_82_rule(Parser *p); -static asdl_seq *_loop1_84_rule(Parser *p); +static asdl_seq *_loop1_82_rule(Parser *p); +static asdl_seq *_loop0_84_rule(Parser *p); +static asdl_seq *_gather_83_rule(Parser *p); static asdl_seq *_loop1_85_rule(Parser *p); static asdl_seq *_loop1_86_rule(Parser *p); -static void *_tmp_87_rule(Parser *p); -static asdl_seq *_loop0_89_rule(Parser *p); -static asdl_seq *_gather_88_rule(Parser *p); -static void *_tmp_90_rule(Parser *p); +static asdl_seq *_loop1_87_rule(Parser *p); +static void *_tmp_88_rule(Parser *p); +static asdl_seq *_loop0_90_rule(Parser *p); +static asdl_seq *_gather_89_rule(Parser *p); static void *_tmp_91_rule(Parser *p); static void *_tmp_92_rule(Parser *p); static void *_tmp_93_rule(Parser *p); static void *_tmp_94_rule(Parser *p); -static asdl_seq *_loop0_95_rule(Parser *p); -static asdl_seq *_loop0_96_rule(Parser *p); +static void *_tmp_95_rule(Parser *p); +static void *_tmp_96_rule(Parser *p); static asdl_seq *_loop0_97_rule(Parser *p); -static asdl_seq *_loop1_98_rule(Parser *p); +static asdl_seq *_loop0_98_rule(Parser *p); static asdl_seq *_loop0_99_rule(Parser *p); static asdl_seq *_loop1_100_rule(Parser *p); -static asdl_seq *_loop1_101_rule(Parser *p); +static asdl_seq *_loop0_101_rule(Parser *p); static asdl_seq *_loop1_102_rule(Parser *p); -static asdl_seq *_loop0_103_rule(Parser *p); +static asdl_seq *_loop1_103_rule(Parser *p); static asdl_seq *_loop1_104_rule(Parser *p); static asdl_seq *_loop0_105_rule(Parser *p); static asdl_seq *_loop1_106_rule(Parser *p); static asdl_seq *_loop0_107_rule(Parser *p); static asdl_seq *_loop1_108_rule(Parser *p); -static asdl_seq *_loop1_109_rule(Parser *p); -static void *_tmp_110_rule(Parser *p); +static asdl_seq *_loop0_109_rule(Parser *p); +static asdl_seq *_loop1_110_rule(Parser *p); +static void *_tmp_111_rule(Parser *p); static asdl_seq *_loop0_112_rule(Parser *p); -static asdl_seq *_gather_111_rule(Parser *p); static asdl_seq *_loop1_113_rule(Parser *p); -static asdl_seq *_loop0_114_rule(Parser *p); -static asdl_seq *_loop0_115_rule(Parser *p); -static void *_tmp_116_rule(Parser *p); +static void *_tmp_114_rule(Parser *p); +static asdl_seq *_loop0_116_rule(Parser *p); +static asdl_seq *_gather_115_rule(Parser *p); +static asdl_seq *_loop1_117_rule(Parser *p); static asdl_seq *_loop0_118_rule(Parser *p); -static asdl_seq *_gather_117_rule(Parser *p); -static void *_tmp_119_rule(Parser *p); -static asdl_seq *_loop0_121_rule(Parser *p); -static asdl_seq *_gather_120_rule(Parser *p); -static asdl_seq *_loop0_123_rule(Parser *p); -static asdl_seq *_gather_122_rule(Parser *p); +static asdl_seq *_loop0_119_rule(Parser *p); +static void *_tmp_120_rule(Parser *p); +static asdl_seq *_loop0_122_rule(Parser *p); +static asdl_seq *_gather_121_rule(Parser *p); +static void *_tmp_123_rule(Parser *p); static asdl_seq *_loop0_125_rule(Parser *p); static asdl_seq *_gather_124_rule(Parser *p); static asdl_seq *_loop0_127_rule(Parser *p); static asdl_seq *_gather_126_rule(Parser *p); -static asdl_seq *_loop0_128_rule(Parser *p); -static asdl_seq *_loop0_130_rule(Parser *p); -static asdl_seq *_gather_129_rule(Parser *p); -static asdl_seq *_loop1_131_rule(Parser *p); -static void *_tmp_132_rule(Parser *p); +static asdl_seq *_loop0_129_rule(Parser *p); +static asdl_seq *_gather_128_rule(Parser *p); +static asdl_seq *_loop0_131_rule(Parser *p); +static asdl_seq *_gather_130_rule(Parser *p); +static asdl_seq *_loop0_132_rule(Parser *p); static asdl_seq *_loop0_134_rule(Parser *p); static asdl_seq *_gather_133_rule(Parser *p); -static asdl_seq *_loop0_136_rule(Parser *p); -static asdl_seq *_gather_135_rule(Parser *p); +static asdl_seq *_loop1_135_rule(Parser *p); +static void *_tmp_136_rule(Parser *p); static asdl_seq *_loop0_138_rule(Parser *p); static asdl_seq *_gather_137_rule(Parser *p); static asdl_seq *_loop0_140_rule(Parser *p); static asdl_seq *_gather_139_rule(Parser *p); static asdl_seq *_loop0_142_rule(Parser *p); static asdl_seq *_gather_141_rule(Parser *p); -static void *_tmp_143_rule(Parser *p); -static void *_tmp_144_rule(Parser *p); -static void *_tmp_145_rule(Parser *p); -static void *_tmp_146_rule(Parser *p); +static asdl_seq *_loop0_144_rule(Parser *p); +static asdl_seq *_gather_143_rule(Parser *p); +static asdl_seq *_loop0_146_rule(Parser *p); +static asdl_seq *_gather_145_rule(Parser *p); static void *_tmp_147_rule(Parser *p); static void *_tmp_148_rule(Parser *p); static void *_tmp_149_rule(Parser *p); @@ -944,79 +981,79 @@ static void *_tmp_150_rule(Parser *p); static void *_tmp_151_rule(Parser *p); static void *_tmp_152_rule(Parser *p); static void *_tmp_153_rule(Parser *p); -static asdl_seq *_loop0_154_rule(Parser *p); -static asdl_seq *_loop0_155_rule(Parser *p); -static asdl_seq *_loop0_156_rule(Parser *p); +static void *_tmp_154_rule(Parser *p); +static void *_tmp_155_rule(Parser *p); +static void *_tmp_156_rule(Parser *p); static void *_tmp_157_rule(Parser *p); static void *_tmp_158_rule(Parser *p); -static void *_tmp_159_rule(Parser *p); -static void *_tmp_160_rule(Parser *p); -static void *_tmp_161_rule(Parser *p); -static asdl_seq *_loop0_162_rule(Parser *p); -static asdl_seq *_loop0_163_rule(Parser *p); -static asdl_seq *_loop0_164_rule(Parser *p); -static asdl_seq *_loop1_165_rule(Parser *p); +static asdl_seq *_loop0_159_rule(Parser *p); +static asdl_seq *_loop0_160_rule(Parser *p); +static asdl_seq *_loop0_161_rule(Parser *p); +static void *_tmp_162_rule(Parser *p); +static void *_tmp_163_rule(Parser *p); +static void *_tmp_164_rule(Parser *p); +static void *_tmp_165_rule(Parser *p); static void *_tmp_166_rule(Parser *p); static asdl_seq *_loop0_167_rule(Parser *p); -static void *_tmp_168_rule(Parser *p); +static asdl_seq *_loop0_168_rule(Parser *p); static asdl_seq *_loop0_169_rule(Parser *p); static asdl_seq *_loop1_170_rule(Parser *p); static void *_tmp_171_rule(Parser *p); -static void *_tmp_172_rule(Parser *p); +static asdl_seq *_loop0_172_rule(Parser *p); static void *_tmp_173_rule(Parser *p); static asdl_seq *_loop0_174_rule(Parser *p); -static void *_tmp_175_rule(Parser *p); +static asdl_seq *_loop1_175_rule(Parser *p); static void *_tmp_176_rule(Parser *p); -static asdl_seq *_loop1_177_rule(Parser *p); +static void *_tmp_177_rule(Parser *p); static void *_tmp_178_rule(Parser *p); static asdl_seq *_loop0_179_rule(Parser *p); -static asdl_seq *_loop0_180_rule(Parser *p); -static asdl_seq *_loop0_181_rule(Parser *p); -static asdl_seq *_loop0_183_rule(Parser *p); -static asdl_seq *_gather_182_rule(Parser *p); -static void *_tmp_184_rule(Parser *p); +static void *_tmp_180_rule(Parser *p); +static void *_tmp_181_rule(Parser *p); +static asdl_seq *_loop1_182_rule(Parser *p); +static void *_tmp_183_rule(Parser *p); +static asdl_seq *_loop0_184_rule(Parser *p); static asdl_seq *_loop0_185_rule(Parser *p); -static void *_tmp_186_rule(Parser *p); -static asdl_seq *_loop0_187_rule(Parser *p); -static asdl_seq *_loop1_188_rule(Parser *p); -static asdl_seq *_loop1_189_rule(Parser *p); -static void *_tmp_190_rule(Parser *p); +static asdl_seq *_loop0_186_rule(Parser *p); +static asdl_seq *_loop0_188_rule(Parser *p); +static asdl_seq *_gather_187_rule(Parser *p); +static void *_tmp_189_rule(Parser *p); +static asdl_seq *_loop0_190_rule(Parser *p); static void *_tmp_191_rule(Parser *p); static asdl_seq *_loop0_192_rule(Parser *p); -static void *_tmp_193_rule(Parser *p); -static void *_tmp_194_rule(Parser *p); +static asdl_seq *_loop1_193_rule(Parser *p); +static asdl_seq *_loop1_194_rule(Parser *p); static void *_tmp_195_rule(Parser *p); +static void *_tmp_196_rule(Parser *p); static asdl_seq *_loop0_197_rule(Parser *p); -static asdl_seq *_gather_196_rule(Parser *p); -static asdl_seq *_loop0_199_rule(Parser *p); -static asdl_seq *_gather_198_rule(Parser *p); -static asdl_seq *_loop0_201_rule(Parser *p); -static asdl_seq *_gather_200_rule(Parser *p); -static asdl_seq *_loop0_203_rule(Parser *p); -static asdl_seq *_gather_202_rule(Parser *p); -static void *_tmp_204_rule(Parser *p); -static asdl_seq *_loop0_205_rule(Parser *p); -static asdl_seq *_loop1_206_rule(Parser *p); -static void *_tmp_207_rule(Parser *p); +static void *_tmp_198_rule(Parser *p); +static void *_tmp_199_rule(Parser *p); +static void *_tmp_200_rule(Parser *p); +static asdl_seq *_loop0_202_rule(Parser *p); +static asdl_seq *_gather_201_rule(Parser *p); +static asdl_seq *_loop0_204_rule(Parser *p); +static asdl_seq *_gather_203_rule(Parser *p); +static asdl_seq *_loop0_206_rule(Parser *p); +static asdl_seq *_gather_205_rule(Parser *p); static asdl_seq *_loop0_208_rule(Parser *p); -static asdl_seq *_loop1_209_rule(Parser *p); -static void *_tmp_210_rule(Parser *p); -static void *_tmp_211_rule(Parser *p); +static asdl_seq *_gather_207_rule(Parser *p); +static void *_tmp_209_rule(Parser *p); +static asdl_seq *_loop0_210_rule(Parser *p); +static asdl_seq *_loop1_211_rule(Parser *p); static void *_tmp_212_rule(Parser *p); -static void *_tmp_213_rule(Parser *p); -static void *_tmp_214_rule(Parser *p); +static asdl_seq *_loop0_213_rule(Parser *p); +static asdl_seq *_loop1_214_rule(Parser *p); static void *_tmp_215_rule(Parser *p); static void *_tmp_216_rule(Parser *p); static void *_tmp_217_rule(Parser *p); static void *_tmp_218_rule(Parser *p); static void *_tmp_219_rule(Parser *p); -static asdl_seq *_loop0_221_rule(Parser *p); -static asdl_seq *_gather_220_rule(Parser *p); +static void *_tmp_220_rule(Parser *p); +static void *_tmp_221_rule(Parser *p); static void *_tmp_222_rule(Parser *p); static void *_tmp_223_rule(Parser *p); static void *_tmp_224_rule(Parser *p); -static void *_tmp_225_rule(Parser *p); -static void *_tmp_226_rule(Parser *p); +static asdl_seq *_loop0_226_rule(Parser *p); +static asdl_seq *_gather_225_rule(Parser *p); static void *_tmp_227_rule(Parser *p); static void *_tmp_228_rule(Parser *p); static void *_tmp_229_rule(Parser *p); @@ -1030,7 +1067,7 @@ static void *_tmp_236_rule(Parser *p); static void *_tmp_237_rule(Parser *p); static void *_tmp_238_rule(Parser *p); static void *_tmp_239_rule(Parser *p); -static void *_tmp_240_rule(Parser *p); +static asdl_seq *_loop0_240_rule(Parser *p); static void *_tmp_241_rule(Parser *p); static void *_tmp_242_rule(Parser *p); static void *_tmp_243_rule(Parser *p); @@ -1042,6 +1079,27 @@ static void *_tmp_248_rule(Parser *p); static void *_tmp_249_rule(Parser *p); static void *_tmp_250_rule(Parser *p); static void *_tmp_251_rule(Parser *p); +static void *_tmp_252_rule(Parser *p); +static void *_tmp_253_rule(Parser *p); +static void *_tmp_254_rule(Parser *p); +static void *_tmp_255_rule(Parser *p); +static void *_tmp_256_rule(Parser *p); +static void *_tmp_257_rule(Parser *p); +static void *_tmp_258_rule(Parser *p); +static void *_tmp_259_rule(Parser *p); +static void *_tmp_260_rule(Parser *p); +static void *_tmp_261_rule(Parser *p); +static void *_tmp_262_rule(Parser *p); +static void *_tmp_263_rule(Parser *p); +static void *_tmp_264_rule(Parser *p); +static void *_tmp_265_rule(Parser *p); +static void *_tmp_266_rule(Parser *p); +static void *_tmp_267_rule(Parser *p); +static void *_tmp_268_rule(Parser *p); +static void *_tmp_269_rule(Parser *p); +static void *_tmp_270_rule(Parser *p); +static void *_tmp_271_rule(Parser *p); +static void *_tmp_272_rule(Parser *p); // file: statements? $ @@ -1247,7 +1305,7 @@ func_type_rule(Parser *p) return _res; } -// fstring: star_expressions +// fstring: FSTRING_START fstring_middle* FSTRING_END static expr_ty fstring_rule(Parser *p) { @@ -1261,24 +1319,35 @@ fstring_rule(Parser *p) } expr_ty _res = NULL; int _mark = p->mark; - { // star_expressions + { // FSTRING_START fstring_middle* FSTRING_END if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> fstring[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); - expr_ty star_expressions_var; + D(fprintf(stderr, "%*c> fstring[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_START fstring_middle* FSTRING_END")); + Token * a; + asdl_seq * b; + Token * c; if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions + (a = _PyPegen_expect_token(p, FSTRING_START)) // token='FSTRING_START' + && + (b = _loop0_3_rule(p)) // fstring_middle* + && + (c = _PyPegen_expect_token(p, FSTRING_END)) // token='FSTRING_END' ) { - D(fprintf(stderr, "%*c+ fstring[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); - _res = star_expressions_var; + D(fprintf(stderr, "%*c+ fstring[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_START fstring_middle* FSTRING_END")); + _res = _PyPegen_joined_str ( p , a , ( asdl_expr_seq* ) b , c ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } goto done; } p->mark = _mark; D(fprintf(stderr, "%*c%s fstring[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_START fstring_middle* FSTRING_END")); } _res = NULL; done: @@ -1308,7 +1377,7 @@ statements_rule(Parser *p) D(fprintf(stderr, "%*c> statements[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement+")); asdl_seq * a; if ( - (a = _loop1_3_rule(p)) // statement+ + (a = _loop1_4_rule(p)) // statement+ ) { D(fprintf(stderr, "%*c+ statements[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "statement+")); @@ -1584,7 +1653,7 @@ simple_stmts_rule(Parser *p) asdl_stmt_seq* a; Token * newline_var; if ( - (a = (asdl_stmt_seq*)_gather_4_rule(p)) // ';'.simple_stmt+ + (a = (asdl_stmt_seq*)_gather_5_rule(p)) // ';'.simple_stmt+ && (_opt_var = _PyPegen_expect_token(p, 13), !p->error_indicator) // ';'? && @@ -1731,7 +1800,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('import' | 'from') import_stmt")); stmt_ty import_stmt_var; if ( - _PyPegen_lookahead(1, _tmp_6_rule, p) + _PyPegen_lookahead(1, _tmp_7_rule, p) && (import_stmt_var = import_stmt_rule(p)) // import_stmt ) @@ -1806,7 +1875,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt")); stmt_ty del_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 603) // token='del' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 604) // token='del' && (del_stmt_var = del_stmt_rule(p)) // del_stmt ) @@ -2006,7 +2075,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | ASYNC) function_def")); stmt_ty function_def_var; if ( - _PyPegen_lookahead(1, _tmp_7_rule, p) + _PyPegen_lookahead(1, _tmp_8_rule, p) && (function_def_var = function_def_rule(p)) // function_def ) @@ -2027,7 +2096,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt")); stmt_ty if_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 641) // token='if' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 642) // token='if' && (if_stmt_var = if_stmt_rule(p)) // if_stmt ) @@ -2048,7 +2117,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('class' | '@') class_def")); stmt_ty class_def_var; if ( - _PyPegen_lookahead(1, _tmp_8_rule, p) + _PyPegen_lookahead(1, _tmp_9_rule, p) && (class_def_var = class_def_rule(p)) // class_def ) @@ -2069,7 +2138,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('with' | ASYNC) with_stmt")); stmt_ty with_stmt_var; if ( - _PyPegen_lookahead(1, _tmp_9_rule, p) + _PyPegen_lookahead(1, _tmp_10_rule, p) && (with_stmt_var = with_stmt_rule(p)) // with_stmt ) @@ -2090,7 +2159,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('for' | ASYNC) for_stmt")); stmt_ty for_stmt_var; if ( - _PyPegen_lookahead(1, _tmp_10_rule, p) + _PyPegen_lookahead(1, _tmp_11_rule, p) && (for_stmt_var = for_stmt_rule(p)) // for_stmt ) @@ -2111,7 +2180,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt")); stmt_ty try_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 623) // token='try' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 624) // token='try' && (try_stmt_var = try_stmt_rule(p)) // try_stmt ) @@ -2132,7 +2201,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt")); stmt_ty while_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 646) // token='while' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 647) // token='while' && (while_stmt_var = while_stmt_rule(p)) // while_stmt ) @@ -2215,7 +2284,7 @@ assignment_rule(Parser *p) && (b = expression_rule(p)) // expression && - (c = _tmp_11_rule(p), !p->error_indicator) // ['=' annotated_rhs] + (c = _tmp_12_rule(p), !p->error_indicator) // ['=' annotated_rhs] ) { D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ':' expression ['=' annotated_rhs]")); @@ -2251,13 +2320,13 @@ assignment_rule(Parser *p) expr_ty b; void *c; if ( - (a = _tmp_12_rule(p)) // '(' single_target ')' | single_subscript_attribute_target + (a = _tmp_13_rule(p)) // '(' single_target ')' | single_subscript_attribute_target && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = expression_rule(p)) // expression && - (c = _tmp_13_rule(p), !p->error_indicator) // ['=' annotated_rhs] + (c = _tmp_14_rule(p), !p->error_indicator) // ['=' annotated_rhs] ) { D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs]")); @@ -2292,9 +2361,9 @@ assignment_rule(Parser *p) void *b; void *tc; if ( - (a = (asdl_expr_seq*)_loop1_14_rule(p)) // ((star_targets '='))+ + (a = (asdl_expr_seq*)_loop1_15_rule(p)) // ((star_targets '='))+ && - (b = _tmp_15_rule(p)) // yield_expr | star_expressions + (b = _tmp_16_rule(p)) // yield_expr | star_expressions && _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='=' && @@ -2340,7 +2409,7 @@ assignment_rule(Parser *p) && (_cut_var = 1) && - (c = _tmp_16_rule(p)) // yield_expr | star_expressions + (c = _tmp_17_rule(p)) // yield_expr | star_expressions ) { D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_target augassign ~ (yield_expr | star_expressions)")); @@ -2899,7 +2968,7 @@ raise_stmt_rule(Parser *p) && (a = expression_rule(p)) // expression && - (b = _tmp_17_rule(p), !p->error_indicator) // ['from' expression] + (b = _tmp_18_rule(p), !p->error_indicator) // ['from' expression] ) { D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' expression ['from' expression]")); @@ -2997,7 +3066,7 @@ global_stmt_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 523)) // token='global' && - (a = (asdl_expr_seq*)_gather_18_rule(p)) // ','.NAME+ + (a = (asdl_expr_seq*)_gather_19_rule(p)) // ','.NAME+ ) { D(fprintf(stderr, "%*c+ global_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'global' ','.NAME+")); @@ -3062,7 +3131,7 @@ nonlocal_stmt_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 524)) // token='nonlocal' && - (a = (asdl_expr_seq*)_gather_20_rule(p)) // ','.NAME+ + (a = (asdl_expr_seq*)_gather_21_rule(p)) // ','.NAME+ ) { D(fprintf(stderr, "%*c+ nonlocal_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'nonlocal' ','.NAME+")); @@ -3125,11 +3194,11 @@ del_stmt_rule(Parser *p) Token * _keyword; asdl_expr_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 603)) // token='del' + (_keyword = _PyPegen_expect_token(p, 604)) // token='del' && (a = del_targets_rule(p)) // del_targets && - _PyPegen_lookahead(1, _tmp_22_rule, p) + _PyPegen_lookahead(1, _tmp_23_rule, p) ) { D(fprintf(stderr, "%*c+ del_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'del' del_targets &(';' | NEWLINE)")); @@ -3278,7 +3347,7 @@ assert_stmt_rule(Parser *p) && (a = expression_rule(p)) // expression && - (b = _tmp_23_rule(p), !p->error_indicator) // [',' expression] + (b = _tmp_24_rule(p), !p->error_indicator) // [',' expression] ) { D(fprintf(stderr, "%*c+ assert_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'assert' expression [',' expression]")); @@ -3418,7 +3487,7 @@ import_name_rule(Parser *p) Token * _keyword; asdl_alias_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 606)) // token='import' + (_keyword = _PyPegen_expect_token(p, 607)) // token='import' && (a = dotted_as_names_rule(p)) // dotted_as_names ) @@ -3488,13 +3557,13 @@ import_from_rule(Parser *p) expr_ty b; asdl_alias_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 607)) // token='from' + (_keyword = _PyPegen_expect_token(p, 608)) // token='from' && - (a = _loop0_24_rule(p)) // (('.' | '...'))* + (a = _loop0_25_rule(p)) // (('.' | '...'))* && (b = dotted_name_rule(p)) // dotted_name && - (_keyword_1 = _PyPegen_expect_token(p, 606)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 607)) // token='import' && (c = import_from_targets_rule(p)) // import_from_targets ) @@ -3532,11 +3601,11 @@ import_from_rule(Parser *p) asdl_seq * a; asdl_alias_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 607)) // token='from' + (_keyword = _PyPegen_expect_token(p, 608)) // token='from' && - (a = _loop1_25_rule(p)) // (('.' | '...'))+ + (a = _loop1_26_rule(p)) // (('.' | '...'))+ && - (_keyword_1 = _PyPegen_expect_token(p, 606)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 607)) // token='import' && (b = import_from_targets_rule(p)) // import_from_targets ) @@ -3731,7 +3800,7 @@ import_from_as_names_rule(Parser *p) D(fprintf(stderr, "%*c> import_from_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+")); asdl_alias_seq* a; if ( - (a = (asdl_alias_seq*)_gather_26_rule(p)) // ','.import_from_as_name+ + (a = (asdl_alias_seq*)_gather_27_rule(p)) // ','.import_from_as_name+ ) { D(fprintf(stderr, "%*c+ import_from_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+")); @@ -3787,7 +3856,7 @@ import_from_as_name_rule(Parser *p) if ( (a = _PyPegen_name_token(p)) // NAME && - (b = _tmp_28_rule(p), !p->error_indicator) // ['as' NAME] + (b = _tmp_29_rule(p), !p->error_indicator) // ['as' NAME] ) { D(fprintf(stderr, "%*c+ import_from_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ['as' NAME]")); @@ -3840,7 +3909,7 @@ dotted_as_names_rule(Parser *p) D(fprintf(stderr, "%*c> dotted_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+")); asdl_alias_seq* a; if ( - (a = (asdl_alias_seq*)_gather_29_rule(p)) // ','.dotted_as_name+ + (a = (asdl_alias_seq*)_gather_30_rule(p)) // ','.dotted_as_name+ ) { D(fprintf(stderr, "%*c+ dotted_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+")); @@ -3896,7 +3965,7 @@ dotted_as_name_rule(Parser *p) if ( (a = dotted_name_rule(p)) // dotted_name && - (b = _tmp_31_rule(p), !p->error_indicator) // ['as' NAME] + (b = _tmp_32_rule(p), !p->error_indicator) // ['as' NAME] ) { D(fprintf(stderr, "%*c+ dotted_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_name ['as' NAME]")); @@ -4151,7 +4220,7 @@ decorators_rule(Parser *p) D(fprintf(stderr, "%*c> decorators[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+")); asdl_expr_seq* a; if ( - (a = (asdl_expr_seq*)_loop1_32_rule(p)) // (('@' named_expression NEWLINE))+ + (a = (asdl_expr_seq*)_loop1_33_rule(p)) // (('@' named_expression NEWLINE))+ ) { D(fprintf(stderr, "%*c+ decorators[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+")); @@ -4293,11 +4362,11 @@ class_def_raw_rule(Parser *p) void *b; asdl_stmt_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 653)) // token='class' + (_keyword = _PyPegen_expect_token(p, 654)) // token='class' && (a = _PyPegen_name_token(p)) // NAME && - (b = _tmp_33_rule(p), !p->error_indicator) // ['(' arguments? ')'] + (b = _tmp_34_rule(p), !p->error_indicator) // ['(' arguments? ')'] && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -4459,7 +4528,7 @@ function_def_raw_rule(Parser *p) void *params; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 651)) // token='def' + (_keyword = _PyPegen_expect_token(p, 652)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -4469,7 +4538,7 @@ function_def_raw_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (a = _tmp_34_rule(p), !p->error_indicator) // ['->' expression] + (a = _tmp_35_rule(p), !p->error_indicator) // ['->' expression] && (_literal_2 = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -4519,7 +4588,7 @@ function_def_raw_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 651)) // token='def' + (_keyword = _PyPegen_expect_token(p, 652)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -4529,7 +4598,7 @@ function_def_raw_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (a = _tmp_35_rule(p), !p->error_indicator) // ['->' expression] + (a = _tmp_36_rule(p), !p->error_indicator) // ['->' expression] && (_literal_2 = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -4656,9 +4725,9 @@ parameters_rule(Parser *p) if ( (a = slash_no_default_rule(p)) // slash_no_default && - (b = (asdl_arg_seq*)_loop0_36_rule(p)) // param_no_default* + (b = (asdl_arg_seq*)_loop0_37_rule(p)) // param_no_default* && - (c = _loop0_37_rule(p)) // param_with_default* + (c = _loop0_38_rule(p)) // param_with_default* && (d = star_etc_rule(p), !p->error_indicator) // star_etc? ) @@ -4688,7 +4757,7 @@ parameters_rule(Parser *p) if ( (a = slash_with_default_rule(p)) // slash_with_default && - (b = _loop0_38_rule(p)) // param_with_default* + (b = _loop0_39_rule(p)) // param_with_default* && (c = star_etc_rule(p), !p->error_indicator) // star_etc? ) @@ -4716,9 +4785,9 @@ parameters_rule(Parser *p) asdl_seq * b; void *c; if ( - (a = (asdl_arg_seq*)_loop1_39_rule(p)) // param_no_default+ + (a = (asdl_arg_seq*)_loop1_40_rule(p)) // param_no_default+ && - (b = _loop0_40_rule(p)) // param_with_default* + (b = _loop0_41_rule(p)) // param_with_default* && (c = star_etc_rule(p), !p->error_indicator) // star_etc? ) @@ -4745,7 +4814,7 @@ parameters_rule(Parser *p) asdl_seq * a; void *b; if ( - (a = _loop1_41_rule(p)) // param_with_default+ + (a = _loop1_42_rule(p)) // param_with_default+ && (b = star_etc_rule(p), !p->error_indicator) // star_etc? ) @@ -4817,7 +4886,7 @@ slash_no_default_rule(Parser *p) Token * _literal_1; asdl_arg_seq* a; if ( - (a = (asdl_arg_seq*)_loop1_42_rule(p)) // param_no_default+ + (a = (asdl_arg_seq*)_loop1_43_rule(p)) // param_no_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -4846,7 +4915,7 @@ slash_no_default_rule(Parser *p) Token * _literal; asdl_arg_seq* a; if ( - (a = (asdl_arg_seq*)_loop1_43_rule(p)) // param_no_default+ + (a = (asdl_arg_seq*)_loop1_44_rule(p)) // param_no_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -4899,9 +4968,9 @@ slash_with_default_rule(Parser *p) asdl_seq * a; asdl_seq * b; if ( - (a = _loop0_44_rule(p)) // param_no_default* + (a = _loop0_45_rule(p)) // param_no_default* && - (b = _loop1_45_rule(p)) // param_with_default+ + (b = _loop1_46_rule(p)) // param_with_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -4931,9 +5000,9 @@ slash_with_default_rule(Parser *p) asdl_seq * a; asdl_seq * b; if ( - (a = _loop0_46_rule(p)) // param_no_default* + (a = _loop0_47_rule(p)) // param_no_default* && - (b = _loop1_47_rule(p)) // param_with_default+ + (b = _loop1_48_rule(p)) // param_with_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -5012,7 +5081,7 @@ star_etc_rule(Parser *p) && (a = param_no_default_rule(p)) // param_no_default && - (b = _loop0_48_rule(p)) // param_maybe_default* + (b = _loop0_49_rule(p)) // param_maybe_default* && (c = kwds_rule(p), !p->error_indicator) // kwds? ) @@ -5045,7 +5114,7 @@ star_etc_rule(Parser *p) && (a = param_no_default_star_annotation_rule(p)) // param_no_default_star_annotation && - (b = _loop0_49_rule(p)) // param_maybe_default* + (b = _loop0_50_rule(p)) // param_maybe_default* && (c = kwds_rule(p), !p->error_indicator) // kwds? ) @@ -5078,7 +5147,7 @@ star_etc_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _loop1_50_rule(p)) // param_maybe_default+ + (b = _loop1_51_rule(p)) // param_maybe_default+ && (c = kwds_rule(p), !p->error_indicator) // kwds? ) @@ -5871,7 +5940,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 641)) // token='if' + (_keyword = _PyPegen_expect_token(p, 642)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -5916,7 +5985,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 641)) // token='if' + (_keyword = _PyPegen_expect_token(p, 642)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -6012,7 +6081,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 643)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 644)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6057,7 +6126,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 643)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 644)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6139,7 +6208,7 @@ else_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 644)) // token='else' + (_keyword = _PyPegen_expect_token(p, 645)) // token='else' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6219,7 +6288,7 @@ while_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 646)) // token='while' + (_keyword = _PyPegen_expect_token(p, 647)) // token='while' && (a = named_expression_rule(p)) // named_expression && @@ -6320,11 +6389,11 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 649)) // token='for' + (_keyword = _PyPegen_expect_token(p, 650)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in' && (_cut_var = 1) && @@ -6384,11 +6453,11 @@ for_stmt_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 649)) // token='for' + (_keyword = _PyPegen_expect_token(p, 650)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in' && (_cut_var = 1) && @@ -6517,11 +6586,11 @@ with_stmt_rule(Parser *p) asdl_withitem_seq* a; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 614)) // token='with' + (_keyword = _PyPegen_expect_token(p, 615)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = (asdl_withitem_seq*)_gather_51_rule(p)) // ','.with_item+ + (a = (asdl_withitem_seq*)_gather_52_rule(p)) // ','.with_item+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -6566,9 +6635,9 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 614)) // token='with' + (_keyword = _PyPegen_expect_token(p, 615)) // token='with' && - (a = (asdl_withitem_seq*)_gather_53_rule(p)) // ','.with_item+ + (a = (asdl_withitem_seq*)_gather_54_rule(p)) // ','.with_item+ && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -6617,11 +6686,11 @@ with_stmt_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 614)) // token='with' + (_keyword = _PyPegen_expect_token(p, 615)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = (asdl_withitem_seq*)_gather_55_rule(p)) // ','.with_item+ + (a = (asdl_withitem_seq*)_gather_56_rule(p)) // ','.with_item+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -6669,9 +6738,9 @@ with_stmt_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 614)) // token='with' + (_keyword = _PyPegen_expect_token(p, 615)) // token='with' && - (a = (asdl_withitem_seq*)_gather_57_rule(p)) // ','.with_item+ + (a = (asdl_withitem_seq*)_gather_58_rule(p)) // ','.with_item+ && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -6756,11 +6825,11 @@ with_item_rule(Parser *p) if ( (e = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (t = star_target_rule(p)) // star_target && - _PyPegen_lookahead(1, _tmp_59_rule, p) + _PyPegen_lookahead(1, _tmp_60_rule, p) ) { D(fprintf(stderr, "%*c+ with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression 'as' star_target &(',' | ')' | ':')")); @@ -6882,7 +6951,7 @@ try_stmt_rule(Parser *p) asdl_stmt_seq* b; asdl_stmt_seq* f; if ( - (_keyword = _PyPegen_expect_token(p, 623)) // token='try' + (_keyword = _PyPegen_expect_token(p, 624)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6926,13 +6995,13 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 623)) // token='try' + (_keyword = _PyPegen_expect_token(p, 624)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && (b = block_rule(p)) // block && - (ex = (asdl_excepthandler_seq*)_loop1_60_rule(p)) // except_block+ + (ex = (asdl_excepthandler_seq*)_loop1_61_rule(p)) // except_block+ && (el = else_block_rule(p), !p->error_indicator) // else_block? && @@ -6974,13 +7043,13 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 623)) // token='try' + (_keyword = _PyPegen_expect_token(p, 624)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && (b = block_rule(p)) // block && - (ex = (asdl_excepthandler_seq*)_loop1_61_rule(p)) // except_star_block+ + (ex = (asdl_excepthandler_seq*)_loop1_62_rule(p)) // except_star_block+ && (el = else_block_rule(p), !p->error_indicator) // else_block? && @@ -7073,11 +7142,11 @@ except_block_rule(Parser *p) expr_ty e; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 636)) // token='except' + (_keyword = _PyPegen_expect_token(p, 637)) // token='except' && (e = expression_rule(p)) // expression && - (t = _tmp_62_rule(p), !p->error_indicator) // ['as' NAME] + (t = _tmp_63_rule(p), !p->error_indicator) // ['as' NAME] && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -7116,7 +7185,7 @@ except_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 636)) // token='except' + (_keyword = _PyPegen_expect_token(p, 637)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -7228,13 +7297,13 @@ except_star_block_rule(Parser *p) expr_ty e; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 636)) // token='except' + (_keyword = _PyPegen_expect_token(p, 637)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (e = expression_rule(p)) // expression && - (t = _tmp_63_rule(p), !p->error_indicator) // ['as' NAME] + (t = _tmp_64_rule(p), !p->error_indicator) // ['as' NAME] && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' && @@ -7331,7 +7400,7 @@ finally_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 633)) // token='finally' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7406,7 +7475,7 @@ match_stmt_rule(Parser *p) && (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' && - (cases = (asdl_match_case_seq*)_loop1_64_rule(p)) // case_block+ + (cases = (asdl_match_case_seq*)_loop1_65_rule(p)) // case_block+ && (dedent_var = _PyPegen_expect_token(p, DEDENT)) // token='DEDENT' ) @@ -7643,7 +7712,7 @@ guard_rule(Parser *p) Token * _keyword; expr_ty guard; if ( - (_keyword = _PyPegen_expect_token(p, 641)) // token='if' + (_keyword = _PyPegen_expect_token(p, 642)) // token='if' && (guard = named_expression_rule(p)) // named_expression ) @@ -7841,7 +7910,7 @@ as_pattern_rule(Parser *p) if ( (pattern = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (target = pattern_capture_target_rule(p)) // pattern_capture_target ) @@ -7924,7 +7993,7 @@ or_pattern_rule(Parser *p) D(fprintf(stderr, "%*c> or_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'|'.closed_pattern+")); asdl_pattern_seq* patterns; if ( - (patterns = (asdl_pattern_seq*)_gather_65_rule(p)) // '|'.closed_pattern+ + (patterns = (asdl_pattern_seq*)_gather_66_rule(p)) // '|'.closed_pattern+ ) { D(fprintf(stderr, "%*c+ or_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'|'.closed_pattern+")); @@ -8179,7 +8248,7 @@ literal_pattern_rule(Parser *p) if ( (value = signed_number_rule(p)) // signed_number && - _PyPegen_lookahead(0, _tmp_67_rule, p) + _PyPegen_lookahead(0, _tmp_68_rule, p) ) { D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "signed_number !('+' | '-')")); @@ -8278,7 +8347,7 @@ literal_pattern_rule(Parser *p) D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 601)) // token='None' + (_keyword = _PyPegen_expect_token(p, 602)) // token='None' ) { D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -8311,7 +8380,7 @@ literal_pattern_rule(Parser *p) D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 600)) // token='True' + (_keyword = _PyPegen_expect_token(p, 601)) // token='True' ) { D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -8344,7 +8413,7 @@ literal_pattern_rule(Parser *p) D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 602)) // token='False' + (_keyword = _PyPegen_expect_token(p, 603)) // token='False' ) { D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -8414,7 +8483,7 @@ literal_expr_rule(Parser *p) if ( (signed_number_var = signed_number_rule(p)) // signed_number && - _PyPegen_lookahead(0, _tmp_68_rule, p) + _PyPegen_lookahead(0, _tmp_69_rule, p) ) { D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "signed_number !('+' | '-')")); @@ -8471,7 +8540,7 @@ literal_expr_rule(Parser *p) D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 601)) // token='None' + (_keyword = _PyPegen_expect_token(p, 602)) // token='None' ) { D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -8504,7 +8573,7 @@ literal_expr_rule(Parser *p) D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 600)) // token='True' + (_keyword = _PyPegen_expect_token(p, 601)) // token='True' ) { D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -8537,7 +8606,7 @@ literal_expr_rule(Parser *p) D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 602)) // token='False' + (_keyword = _PyPegen_expect_token(p, 603)) // token='False' ) { D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -9021,7 +9090,7 @@ pattern_capture_target_rule(Parser *p) && (name = _PyPegen_name_token(p)) // NAME && - _PyPegen_lookahead(0, _tmp_69_rule, p) + _PyPegen_lookahead(0, _tmp_70_rule, p) ) { D(fprintf(stderr, "%*c+ pattern_capture_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!\"_\" NAME !('.' | '(' | '=')")); @@ -9138,7 +9207,7 @@ value_pattern_rule(Parser *p) if ( (attr = attr_rule(p)) // attr && - _PyPegen_lookahead(0, _tmp_70_rule, p) + _PyPegen_lookahead(0, _tmp_71_rule, p) ) { D(fprintf(stderr, "%*c+ value_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "attr !('.' | '(' | '=')")); @@ -9564,7 +9633,7 @@ maybe_sequence_pattern_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_seq * patterns; if ( - (patterns = _gather_71_rule(p)) // ','.maybe_star_pattern+ + (patterns = _gather_72_rule(p)) // ','.maybe_star_pattern+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? ) @@ -9976,13 +10045,13 @@ items_pattern_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> items_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.key_value_pattern+")); - asdl_seq * _gather_73_var; + asdl_seq * _gather_74_var; if ( - (_gather_73_var = _gather_73_rule(p)) // ','.key_value_pattern+ + (_gather_74_var = _gather_74_rule(p)) // ','.key_value_pattern+ ) { D(fprintf(stderr, "%*c+ items_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.key_value_pattern+")); - _res = _gather_73_var; + _res = _gather_74_var; goto done; } p->mark = _mark; @@ -10019,7 +10088,7 @@ key_value_pattern_rule(Parser *p) void *key; pattern_ty pattern; if ( - (key = _tmp_75_rule(p)) // literal_expr | attr + (key = _tmp_76_rule(p)) // literal_expr | attr && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -10350,7 +10419,7 @@ positional_patterns_rule(Parser *p) D(fprintf(stderr, "%*c> positional_patterns[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.pattern+")); asdl_pattern_seq* args; if ( - (args = (asdl_pattern_seq*)_gather_76_rule(p)) // ','.pattern+ + (args = (asdl_pattern_seq*)_gather_77_rule(p)) // ','.pattern+ ) { D(fprintf(stderr, "%*c+ positional_patterns[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.pattern+")); @@ -10392,13 +10461,13 @@ keyword_patterns_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> keyword_patterns[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.keyword_pattern+")); - asdl_seq * _gather_78_var; + asdl_seq * _gather_79_var; if ( - (_gather_78_var = _gather_78_rule(p)) // ','.keyword_pattern+ + (_gather_79_var = _gather_79_rule(p)) // ','.keyword_pattern+ ) { D(fprintf(stderr, "%*c+ keyword_patterns[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.keyword_pattern+")); - _res = _gather_78_var; + _res = _gather_79_var; goto done; } p->mark = _mark; @@ -10497,7 +10566,7 @@ expressions_rule(Parser *p) if ( (a = expression_rule(p)) // expression && - (b = _loop1_80_rule(p)) // ((',' expression))+ + (b = _loop1_81_rule(p)) // ((',' expression))+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? ) @@ -10669,11 +10738,11 @@ expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 641)) // token='if' + (_keyword = _PyPegen_expect_token(p, 642)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 644)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 645)) // token='else' && (c = expression_rule(p)) // expression ) @@ -10780,7 +10849,7 @@ yield_expr_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 573)) // token='yield' && - (_keyword_1 = _PyPegen_expect_token(p, 607)) // token='from' + (_keyword_1 = _PyPegen_expect_token(p, 608)) // token='from' && (a = expression_rule(p)) // expression ) @@ -10888,7 +10957,7 @@ star_expressions_rule(Parser *p) if ( (a = star_expression_rule(p)) // star_expression && - (b = _loop1_81_rule(p)) // ((',' star_expression))+ + (b = _loop1_82_rule(p)) // ((',' star_expression))+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? ) @@ -11089,7 +11158,7 @@ star_named_expressions_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_expr_seq* a; if ( - (a = (asdl_expr_seq*)_gather_82_rule(p)) // ','.star_named_expression+ + (a = (asdl_expr_seq*)_gather_83_rule(p)) // ','.star_named_expression+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? ) @@ -11389,7 +11458,7 @@ disjunction_rule(Parser *p) if ( (a = conjunction_rule(p)) // conjunction && - (b = _loop1_84_rule(p)) // (('or' conjunction))+ + (b = _loop1_85_rule(p)) // (('or' conjunction))+ ) { D(fprintf(stderr, "%*c+ disjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "conjunction (('or' conjunction))+")); @@ -11478,7 +11547,7 @@ conjunction_rule(Parser *p) if ( (a = inversion_rule(p)) // inversion && - (b = _loop1_85_rule(p)) // (('and' inversion))+ + (b = _loop1_86_rule(p)) // (('and' inversion))+ ) { D(fprintf(stderr, "%*c+ conjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "inversion (('and' inversion))+")); @@ -11652,7 +11721,7 @@ comparison_rule(Parser *p) if ( (a = bitwise_or_rule(p)) // bitwise_or && - (b = _loop1_86_rule(p)) // compare_op_bitwise_or_pair+ + (b = _loop1_87_rule(p)) // compare_op_bitwise_or_pair+ ) { D(fprintf(stderr, "%*c+ comparison[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or compare_op_bitwise_or_pair+")); @@ -11989,10 +12058,10 @@ noteq_bitwise_or_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> noteq_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('!=') bitwise_or")); - void *_tmp_87_var; + void *_tmp_88_var; expr_ty a; if ( - (_tmp_87_var = _tmp_87_rule(p)) // '!=' + (_tmp_88_var = _tmp_88_rule(p)) // '!=' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -12230,7 +12299,7 @@ notin_bitwise_or_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 581)) // token='not' && - (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -12277,7 +12346,7 @@ in_bitwise_or_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 650)) // token='in' + (_keyword = _PyPegen_expect_token(p, 651)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -14027,7 +14096,7 @@ slices_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_expr_seq* a; if ( - (a = (asdl_expr_seq*)_gather_88_rule(p)) // ','.(slice | starred_expression)+ + (a = (asdl_expr_seq*)_gather_89_rule(p)) // ','.(slice | starred_expression)+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? ) @@ -14100,7 +14169,7 @@ slice_rule(Parser *p) && (b = expression_rule(p), !p->error_indicator) // expression? && - (c = _tmp_90_rule(p), !p->error_indicator) // [':' expression?] + (c = _tmp_91_rule(p), !p->error_indicator) // [':' expression?] ) { D(fprintf(stderr, "%*c+ slice[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression? ':' expression? [':' expression?]")); @@ -14160,7 +14229,7 @@ slice_rule(Parser *p) // | 'True' // | 'False' // | 'None' -// | &STRING strings +// | &(STRING | FSTRING_START) strings // | NUMBER // | &'(' (tuple | group | genexp) // | &'[' (list | listcomp) @@ -14215,7 +14284,7 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 600)) // token='True' + (_keyword = _PyPegen_expect_token(p, 601)) // token='True' ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -14248,7 +14317,7 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 602)) // token='False' + (_keyword = _PyPegen_expect_token(p, 603)) // token='False' ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -14281,7 +14350,7 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 601)) // token='None' + (_keyword = _PyPegen_expect_token(p, 602)) // token='None' ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -14306,26 +14375,26 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'")); } - { // &STRING strings + { // &(STRING | FSTRING_START) strings if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&STRING strings")); + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&(STRING | FSTRING_START) strings")); expr_ty strings_var; if ( - _PyPegen_lookahead(1, _PyPegen_string_token, p) + _PyPegen_lookahead(1, _tmp_92_rule, p) && (strings_var = strings_rule(p)) // strings ) { - D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&STRING strings")); + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&(STRING | FSTRING_START) strings")); _res = strings_var; goto done; } p->mark = _mark; D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&STRING strings")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&(STRING | FSTRING_START) strings")); } { // NUMBER if (p->error_indicator) { @@ -14352,15 +14421,15 @@ atom_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)")); - void *_tmp_91_var; + void *_tmp_93_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) // token='(' && - (_tmp_91_var = _tmp_91_rule(p)) // tuple | group | genexp + (_tmp_93_var = _tmp_93_rule(p)) // tuple | group | genexp ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)")); - _res = _tmp_91_var; + _res = _tmp_93_var; goto done; } p->mark = _mark; @@ -14373,15 +14442,15 @@ atom_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)")); - void *_tmp_92_var; + void *_tmp_94_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) // token='[' && - (_tmp_92_var = _tmp_92_rule(p)) // list | listcomp + (_tmp_94_var = _tmp_94_rule(p)) // list | listcomp ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)")); - _res = _tmp_92_var; + _res = _tmp_94_var; goto done; } p->mark = _mark; @@ -14394,15 +14463,15 @@ atom_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)")); - void *_tmp_93_var; + void *_tmp_95_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) // token='{' && - (_tmp_93_var = _tmp_93_rule(p)) // dict | set | dictcomp | setcomp + (_tmp_95_var = _tmp_95_rule(p)) // dict | set | dictcomp | setcomp ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)")); - _res = _tmp_93_var; + _res = _tmp_95_var; goto done; } p->mark = _mark; @@ -14474,7 +14543,7 @@ group_rule(Parser *p) if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _tmp_94_rule(p)) // yield_expr | named_expression + (a = _tmp_96_rule(p)) // yield_expr | named_expression && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) @@ -14551,7 +14620,7 @@ lambdef_rule(Parser *p) void *a; expr_ty b; if ( - (_keyword = _PyPegen_expect_token(p, 586)) // token='lambda' + (_keyword = _PyPegen_expect_token(p, 600)) // token='lambda' && (a = lambda_params_rule(p), !p->error_indicator) // lambda_params? && @@ -14678,9 +14747,9 @@ lambda_parameters_rule(Parser *p) if ( (a = lambda_slash_no_default_rule(p)) // lambda_slash_no_default && - (b = (asdl_arg_seq*)_loop0_95_rule(p)) // lambda_param_no_default* + (b = (asdl_arg_seq*)_loop0_97_rule(p)) // lambda_param_no_default* && - (c = _loop0_96_rule(p)) // lambda_param_with_default* + (c = _loop0_98_rule(p)) // lambda_param_with_default* && (d = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc? ) @@ -14710,7 +14779,7 @@ lambda_parameters_rule(Parser *p) if ( (a = lambda_slash_with_default_rule(p)) // lambda_slash_with_default && - (b = _loop0_97_rule(p)) // lambda_param_with_default* + (b = _loop0_99_rule(p)) // lambda_param_with_default* && (c = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc? ) @@ -14738,9 +14807,9 @@ lambda_parameters_rule(Parser *p) asdl_seq * b; void *c; if ( - (a = (asdl_arg_seq*)_loop1_98_rule(p)) // lambda_param_no_default+ + (a = (asdl_arg_seq*)_loop1_100_rule(p)) // lambda_param_no_default+ && - (b = _loop0_99_rule(p)) // lambda_param_with_default* + (b = _loop0_101_rule(p)) // lambda_param_with_default* && (c = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc? ) @@ -14767,7 +14836,7 @@ lambda_parameters_rule(Parser *p) asdl_seq * a; void *b; if ( - (a = _loop1_100_rule(p)) // lambda_param_with_default+ + (a = _loop1_102_rule(p)) // lambda_param_with_default+ && (b = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc? ) @@ -14841,7 +14910,7 @@ lambda_slash_no_default_rule(Parser *p) Token * _literal_1; asdl_arg_seq* a; if ( - (a = (asdl_arg_seq*)_loop1_101_rule(p)) // lambda_param_no_default+ + (a = (asdl_arg_seq*)_loop1_103_rule(p)) // lambda_param_no_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -14870,7 +14939,7 @@ lambda_slash_no_default_rule(Parser *p) Token * _literal; asdl_arg_seq* a; if ( - (a = (asdl_arg_seq*)_loop1_102_rule(p)) // lambda_param_no_default+ + (a = (asdl_arg_seq*)_loop1_104_rule(p)) // lambda_param_no_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -14923,9 +14992,9 @@ lambda_slash_with_default_rule(Parser *p) asdl_seq * a; asdl_seq * b; if ( - (a = _loop0_103_rule(p)) // lambda_param_no_default* + (a = _loop0_105_rule(p)) // lambda_param_no_default* && - (b = _loop1_104_rule(p)) // lambda_param_with_default+ + (b = _loop1_106_rule(p)) // lambda_param_with_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -14955,9 +15024,9 @@ lambda_slash_with_default_rule(Parser *p) asdl_seq * a; asdl_seq * b; if ( - (a = _loop0_105_rule(p)) // lambda_param_no_default* + (a = _loop0_107_rule(p)) // lambda_param_no_default* && - (b = _loop1_106_rule(p)) // lambda_param_with_default+ + (b = _loop1_108_rule(p)) // lambda_param_with_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -15035,7 +15104,7 @@ lambda_star_etc_rule(Parser *p) && (a = lambda_param_no_default_rule(p)) // lambda_param_no_default && - (b = _loop0_107_rule(p)) // lambda_param_maybe_default* + (b = _loop0_109_rule(p)) // lambda_param_maybe_default* && (c = lambda_kwds_rule(p), !p->error_indicator) // lambda_kwds? ) @@ -15068,7 +15137,7 @@ lambda_star_etc_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _loop1_108_rule(p)) // lambda_param_maybe_default+ + (b = _loop1_110_rule(p)) // lambda_param_maybe_default+ && (c = lambda_kwds_rule(p), !p->error_indicator) // lambda_kwds? ) @@ -15475,7 +15544,387 @@ lambda_param_rule(Parser *p) return _res; } -// strings: STRING+ +// fstring_middle: fstring_replacement_field | FSTRING_MIDDLE +static expr_ty +fstring_middle_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + { // fstring_replacement_field + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> fstring_middle[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field")); + expr_ty fstring_replacement_field_var; + if ( + (fstring_replacement_field_var = fstring_replacement_field_rule(p)) // fstring_replacement_field + ) + { + D(fprintf(stderr, "%*c+ fstring_middle[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field")); + _res = fstring_replacement_field_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s fstring_middle[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_replacement_field")); + } + { // FSTRING_MIDDLE + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> fstring_middle[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE")); + Token * t; + if ( + (t = _PyPegen_expect_token(p, FSTRING_MIDDLE)) // token='FSTRING_MIDDLE' + ) + { + D(fprintf(stderr, "%*c+ fstring_middle[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE")); + _res = _PyPegen_constant_from_token ( p , t ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s fstring_middle[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_MIDDLE")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// fstring_replacement_field: +// | '{' (yield_expr | star_expressions) "="? fstring_conversion? fstring_full_format_spec? '}' +// | invalid_replacement_field +static expr_ty +fstring_replacement_field_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + p->level--; + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // '{' (yield_expr | star_expressions) "="? fstring_conversion? fstring_full_format_spec? '}' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> fstring_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) \"=\"? fstring_conversion? fstring_full_format_spec? '}'")); + Token * _literal; + void *a; + void *conversion; + void *debug_expr; + void *format; + Token * rbrace; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (a = _tmp_111_rule(p)) // yield_expr | star_expressions + && + (debug_expr = _PyPegen_expect_token(p, 22), !p->error_indicator) // "="? + && + (conversion = fstring_conversion_rule(p), !p->error_indicator) // fstring_conversion? + && + (format = fstring_full_format_spec_rule(p), !p->error_indicator) // fstring_full_format_spec? + && + (rbrace = _PyPegen_expect_token(p, 26)) // token='}' + ) + { + D(fprintf(stderr, "%*c+ fstring_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) \"=\"? fstring_conversion? fstring_full_format_spec? '}'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + p->level--; + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_formatted_value ( p , a , debug_expr , conversion , format , rbrace , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s fstring_replacement_field[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) \"=\"? fstring_conversion? fstring_full_format_spec? '}'")); + } + if (p->call_invalid_rules) { // invalid_replacement_field + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> fstring_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_replacement_field")); + void *invalid_replacement_field_var; + if ( + (invalid_replacement_field_var = invalid_replacement_field_rule(p)) // invalid_replacement_field + ) + { + D(fprintf(stderr, "%*c+ fstring_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_replacement_field")); + _res = invalid_replacement_field_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s fstring_replacement_field[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_replacement_field")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// fstring_conversion: "!" NAME +static ResultTokenWithMetadata* +fstring_conversion_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + ResultTokenWithMetadata* _res = NULL; + int _mark = p->mark; + { // "!" NAME + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> fstring_conversion[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "\"!\" NAME")); + expr_ty conv; + Token * conv_token; + if ( + (conv_token = _PyPegen_expect_token(p, 54)) // token='!' + && + (conv = _PyPegen_name_token(p)) // NAME + ) + { + D(fprintf(stderr, "%*c+ fstring_conversion[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "\"!\" NAME")); + _res = _PyPegen_check_fstring_conversion ( p , conv_token , conv ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s fstring_conversion[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "\"!\" NAME")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// fstring_full_format_spec: ':' fstring_format_spec* +static ResultTokenWithMetadata* +fstring_full_format_spec_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + ResultTokenWithMetadata* _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + p->level--; + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // ':' fstring_format_spec* + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> fstring_full_format_spec[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' fstring_format_spec*")); + Token * colon; + asdl_seq * spec; + if ( + (colon = _PyPegen_expect_token(p, 11)) // token=':' + && + (spec = _loop0_112_rule(p)) // fstring_format_spec* + ) + { + D(fprintf(stderr, "%*c+ fstring_full_format_spec[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' fstring_format_spec*")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + p->level--; + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_setup_full_format_spec ( p , colon , ( asdl_expr_seq* ) spec , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s fstring_full_format_spec[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':' fstring_format_spec*")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// fstring_format_spec: FSTRING_MIDDLE | fstring_replacement_field +static expr_ty +fstring_format_spec_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + { // FSTRING_MIDDLE + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> fstring_format_spec[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE")); + Token * t; + if ( + (t = _PyPegen_expect_token(p, FSTRING_MIDDLE)) // token='FSTRING_MIDDLE' + ) + { + D(fprintf(stderr, "%*c+ fstring_format_spec[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE")); + _res = _PyPegen_constant_from_token ( p , t ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s fstring_format_spec[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_MIDDLE")); + } + { // fstring_replacement_field + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> fstring_format_spec[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field")); + expr_ty fstring_replacement_field_var; + if ( + (fstring_replacement_field_var = fstring_replacement_field_rule(p)) // fstring_replacement_field + ) + { + D(fprintf(stderr, "%*c+ fstring_format_spec[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field")); + _res = fstring_replacement_field_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s fstring_format_spec[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_replacement_field")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// string: STRING +static expr_ty +string_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + { // STRING + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> string[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING")); + Token* s; + if ( + (s = (Token*)_PyPegen_string_token(p)) // STRING + ) + { + D(fprintf(stderr, "%*c+ string[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "STRING")); + _res = _PyPegen_constant_from_string ( p , s ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s string[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// strings: ((fstring | string))+ static expr_ty strings_rule(Parser *p) { @@ -15493,19 +15942,37 @@ strings_rule(Parser *p) return _res; } int _mark = p->mark; - { // STRING+ + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + p->level--; + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // ((fstring | string))+ if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> strings[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING+")); - asdl_seq * a; + D(fprintf(stderr, "%*c> strings[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((fstring | string))+")); + asdl_expr_seq* a; if ( - (a = _loop1_109_rule(p)) // STRING+ + (a = (asdl_expr_seq*)_loop1_113_rule(p)) // ((fstring | string))+ ) { - D(fprintf(stderr, "%*c+ strings[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "STRING+")); - _res = _PyPegen_concatenate_strings ( p , a ); + D(fprintf(stderr, "%*c+ strings[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((fstring | string))+")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + p->level--; + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_concatenate_strings ( p , a , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; p->level--; @@ -15515,7 +15982,7 @@ strings_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s strings[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING+")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "((fstring | string))+")); } _res = NULL; done: @@ -15627,7 +16094,7 @@ tuple_rule(Parser *p) if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _tmp_110_rule(p), !p->error_indicator) // [star_named_expression ',' star_named_expressions?] + (a = _tmp_114_rule(p), !p->error_indicator) // [star_named_expression ',' star_named_expressions?] && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) @@ -15845,7 +16312,7 @@ double_starred_kvpairs_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; if ( - (a = _gather_111_rule(p)) // ','.double_starred_kvpair+ + (a = _gather_115_rule(p)) // ','.double_starred_kvpair+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? ) @@ -16007,7 +16474,7 @@ for_if_clauses_rule(Parser *p) D(fprintf(stderr, "%*c> for_if_clauses[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause+")); asdl_comprehension_seq* a; if ( - (a = (asdl_comprehension_seq*)_loop1_113_rule(p)) // for_if_clause+ + (a = (asdl_comprehension_seq*)_loop1_117_rule(p)) // for_if_clause+ ) { D(fprintf(stderr, "%*c+ for_if_clauses[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "for_if_clause+")); @@ -16062,17 +16529,17 @@ for_if_clause_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 649)) // token='for' + (_keyword = _PyPegen_expect_token(p, 650)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in' && (_cut_var = 1) && (b = disjunction_rule(p)) // disjunction && - (c = (asdl_expr_seq*)_loop0_114_rule(p)) // (('if' disjunction))* + (c = (asdl_expr_seq*)_loop0_118_rule(p)) // (('if' disjunction))* ) { D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))*")); @@ -16105,17 +16572,17 @@ for_if_clause_rule(Parser *p) expr_ty b; asdl_expr_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 649)) // token='for' + (_keyword = _PyPegen_expect_token(p, 650)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in' && (_cut_var = 1) && (b = disjunction_rule(p)) // disjunction && - (c = (asdl_expr_seq*)_loop0_115_rule(p)) // (('if' disjunction))* + (c = (asdl_expr_seq*)_loop0_119_rule(p)) // (('if' disjunction))* ) { D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' ~ disjunction (('if' disjunction))*")); @@ -16378,7 +16845,7 @@ genexp_rule(Parser *p) if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _tmp_116_rule(p)) // assignment_expression | expression !':=' + (a = _tmp_120_rule(p)) // assignment_expression | expression !':=' && (b = for_if_clauses_rule(p)) // for_if_clauses && @@ -16630,9 +17097,9 @@ args_rule(Parser *p) asdl_expr_seq* a; void *b; if ( - (a = (asdl_expr_seq*)_gather_117_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ + (a = (asdl_expr_seq*)_gather_121_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ && - (b = _tmp_119_rule(p), !p->error_indicator) // [',' kwargs] + (b = _tmp_123_rule(p), !p->error_indicator) // [',' kwargs] ) { D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ [',' kwargs]")); @@ -16723,11 +17190,11 @@ kwargs_rule(Parser *p) asdl_seq * a; asdl_seq * b; if ( - (a = _gather_120_rule(p)) // ','.kwarg_or_starred+ + (a = _gather_124_rule(p)) // ','.kwarg_or_starred+ && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _gather_122_rule(p)) // ','.kwarg_or_double_starred+ + (b = _gather_126_rule(p)) // ','.kwarg_or_double_starred+ ) { D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+")); @@ -16749,13 +17216,13 @@ kwargs_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+")); - asdl_seq * _gather_124_var; + asdl_seq * _gather_128_var; if ( - (_gather_124_var = _gather_124_rule(p)) // ','.kwarg_or_starred+ + (_gather_128_var = _gather_128_rule(p)) // ','.kwarg_or_starred+ ) { D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+")); - _res = _gather_124_var; + _res = _gather_128_var; goto done; } p->mark = _mark; @@ -16768,13 +17235,13 @@ kwargs_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+")); - asdl_seq * _gather_126_var; + asdl_seq * _gather_130_var; if ( - (_gather_126_var = _gather_126_rule(p)) // ','.kwarg_or_double_starred+ + (_gather_130_var = _gather_130_rule(p)) // ','.kwarg_or_double_starred+ ) { D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+")); - _res = _gather_126_var; + _res = _gather_130_var; goto done; } p->mark = _mark; @@ -17167,7 +17634,7 @@ star_targets_rule(Parser *p) if ( (a = star_target_rule(p)) // star_target && - (b = _loop0_128_rule(p)) // ((',' star_target))* + (b = _loop0_132_rule(p)) // ((',' star_target))* && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? ) @@ -17224,7 +17691,7 @@ star_targets_list_seq_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_expr_seq* a; if ( - (a = (asdl_expr_seq*)_gather_129_rule(p)) // ','.star_target+ + (a = (asdl_expr_seq*)_gather_133_rule(p)) // ','.star_target+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? ) @@ -17275,7 +17742,7 @@ star_targets_tuple_seq_rule(Parser *p) if ( (a = star_target_rule(p)) // star_target && - (b = _loop1_131_rule(p)) // ((',' star_target))+ + (b = _loop1_135_rule(p)) // ((',' star_target))+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? ) @@ -17364,7 +17831,7 @@ star_target_rule(Parser *p) if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = _tmp_132_rule(p)) // !'*' star_target + (a = _tmp_136_rule(p)) // !'*' star_target ) { D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (!'*' star_target)")); @@ -18295,7 +18762,7 @@ del_targets_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_expr_seq* a; if ( - (a = (asdl_expr_seq*)_gather_133_rule(p)) // ','.del_target+ + (a = (asdl_expr_seq*)_gather_137_rule(p)) // ','.del_target+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? ) @@ -18656,7 +19123,7 @@ type_expressions_rule(Parser *p) expr_ty b; expr_ty c; if ( - (a = _gather_135_rule(p)) // ','.expression+ + (a = _gather_139_rule(p)) // ','.expression+ && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && @@ -18695,7 +19162,7 @@ type_expressions_rule(Parser *p) asdl_seq * a; expr_ty b; if ( - (a = _gather_137_rule(p)) // ','.expression+ + (a = _gather_141_rule(p)) // ','.expression+ && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && @@ -18728,7 +19195,7 @@ type_expressions_rule(Parser *p) asdl_seq * a; expr_ty b; if ( - (a = _gather_139_rule(p)) // ','.expression+ + (a = _gather_143_rule(p)) // ','.expression+ && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && @@ -18848,7 +19315,7 @@ type_expressions_rule(Parser *p) D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+")); asdl_expr_seq* a; if ( - (a = (asdl_expr_seq*)_gather_141_rule(p)) // ','.expression+ + (a = (asdl_expr_seq*)_gather_145_rule(p)) // ','.expression+ ) { D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+")); @@ -18900,7 +19367,7 @@ func_type_comment_rule(Parser *p) && (t = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' && - _PyPegen_lookahead(1, _tmp_143_rule, p) + _PyPegen_lookahead(1, _tmp_147_rule, p) ) { D(fprintf(stderr, "%*c+ func_type_comment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE TYPE_COMMENT &(NEWLINE INDENT)")); @@ -19029,7 +19496,7 @@ invalid_arguments_rule(Parser *p) && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_opt_var = _tmp_144_rule(p), !p->error_indicator) // [args | expression for_if_clauses] + (_opt_var = _tmp_148_rule(p), !p->error_indicator) // [args | expression for_if_clauses] ) { D(fprintf(stderr, "%*c+ invalid_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses ',' [args | expression for_if_clauses]")); @@ -19089,13 +19556,13 @@ invalid_arguments_rule(Parser *p) expr_ty a; Token * b; if ( - (_opt_var = _tmp_145_rule(p), !p->error_indicator) // [(args ',')] + (_opt_var = _tmp_149_rule(p), !p->error_indicator) // [(args ',')] && (a = _PyPegen_name_token(p)) // NAME && (b = _PyPegen_expect_token(p, 22)) // token='=' && - _PyPegen_lookahead(1, _tmp_146_rule, p) + _PyPegen_lookahead(1, _tmp_150_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "[(args ',')] NAME '=' &(',' | ')')")); @@ -19234,7 +19701,7 @@ invalid_kwarg_rule(Parser *p) Token* a; Token * b; if ( - (a = (Token*)_tmp_147_rule(p)) // 'True' | 'False' | 'None' + (a = (Token*)_tmp_151_rule(p)) // 'True' | 'False' | 'None' && (b = _PyPegen_expect_token(p, 22)) // token='=' ) @@ -19294,7 +19761,7 @@ invalid_kwarg_rule(Parser *p) expr_ty a; Token * b; if ( - _PyPegen_lookahead(0, _tmp_148_rule, p) + _PyPegen_lookahead(0, _tmp_152_rule, p) && (a = expression_rule(p)) // expression && @@ -19398,11 +19865,11 @@ expression_without_invalid_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 641)) // token='if' + (_keyword = _PyPegen_expect_token(p, 642)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 644)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 645)) // token='else' && (c = expression_rule(p)) // expression ) @@ -19530,6 +19997,7 @@ invalid_legacy_expression_rule(Parser *p) // invalid_expression: // | !(NAME STRING | SOFT_KEYWORD) disjunction expression_without_invalid // | disjunction 'if' disjunction !('else' | ':') +// | 'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field) static void * invalid_expression_rule(Parser *p) { @@ -19552,7 +20020,7 @@ invalid_expression_rule(Parser *p) expr_ty a; expr_ty b; if ( - _PyPegen_lookahead(0, _tmp_149_rule, p) + _PyPegen_lookahead(0, _tmp_153_rule, p) && (a = disjunction_rule(p)) // disjunction && @@ -19584,11 +20052,11 @@ invalid_expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 641)) // token='if' + (_keyword = _PyPegen_expect_token(p, 642)) // token='if' && (b = disjunction_rule(p)) // disjunction && - _PyPegen_lookahead(0, _tmp_150_rule, p) + _PyPegen_lookahead(0, _tmp_154_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "disjunction 'if' disjunction !('else' | ':')")); @@ -19604,6 +20072,39 @@ invalid_expression_rule(Parser *p) D(fprintf(stderr, "%*c%s invalid_expression[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "disjunction 'if' disjunction !('else' | ':')")); } + { // 'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field) + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field)")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + Token * a; + Token * b; + if ( + (a = _PyPegen_expect_token(p, 600)) // token='lambda' + && + (_opt_var = lambda_params_rule(p), !p->error_indicator) // lambda_params? + && + (b = _PyPegen_expect_token(p, 11)) // token=':' + && + _PyPegen_lookahead(1, _tmp_155_rule, p) + ) + { + D(fprintf(stderr, "%*c+ invalid_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field)")); + _res = RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a , b , "f-string: lambda expressions are not allowed without parentheses" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field)")); + } _res = NULL; done: p->level--; @@ -19677,7 +20178,7 @@ invalid_named_expression_rule(Parser *p) && (b = bitwise_or_rule(p)) // bitwise_or && - _PyPegen_lookahead(0, _tmp_151_rule, p) + _PyPegen_lookahead(0, _tmp_156_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '=' bitwise_or !('=' | ':=')")); @@ -19703,7 +20204,7 @@ invalid_named_expression_rule(Parser *p) Token * b; expr_ty bitwise_or_var; if ( - _PyPegen_lookahead(0, _tmp_152_rule, p) + _PyPegen_lookahead(0, _tmp_157_rule, p) && (a = bitwise_or_rule(p)) // bitwise_or && @@ -19711,7 +20212,7 @@ invalid_named_expression_rule(Parser *p) && (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or && - _PyPegen_lookahead(0, _tmp_153_rule, p) + _PyPegen_lookahead(0, _tmp_158_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!(list | tuple | genexp | 'True' | 'None' | 'False') bitwise_or '=' bitwise_or !('=' | ':=')")); @@ -19792,7 +20293,7 @@ invalid_assignment_rule(Parser *p) D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':' expression")); Token * _literal; Token * _literal_1; - asdl_seq * _loop0_154_var; + asdl_seq * _loop0_159_var; expr_ty a; expr_ty expression_var; if ( @@ -19800,7 +20301,7 @@ invalid_assignment_rule(Parser *p) && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_loop0_154_var = _loop0_154_rule(p)) // star_named_expressions* + (_loop0_159_var = _loop0_159_rule(p)) // star_named_expressions* && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' && @@ -19857,10 +20358,10 @@ invalid_assignment_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='")); Token * _literal; - asdl_seq * _loop0_155_var; + asdl_seq * _loop0_160_var; expr_ty a; if ( - (_loop0_155_var = _loop0_155_rule(p)) // ((star_targets '='))* + (_loop0_160_var = _loop0_160_rule(p)) // ((star_targets '='))* && (a = star_expressions_rule(p)) // star_expressions && @@ -19887,10 +20388,10 @@ invalid_assignment_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* yield_expr '='")); Token * _literal; - asdl_seq * _loop0_156_var; + asdl_seq * _loop0_161_var; expr_ty a; if ( - (_loop0_156_var = _loop0_156_rule(p)) // ((star_targets '='))* + (_loop0_161_var = _loop0_161_rule(p)) // ((star_targets '='))* && (a = yield_expr_rule(p)) // yield_expr && @@ -19916,7 +20417,7 @@ invalid_assignment_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); - void *_tmp_157_var; + void *_tmp_162_var; expr_ty a; AugOperator* augassign_var; if ( @@ -19924,7 +20425,7 @@ invalid_assignment_rule(Parser *p) && (augassign_var = augassign_rule(p)) // augassign && - (_tmp_157_var = _tmp_157_rule(p)) // yield_expr | star_expressions + (_tmp_162_var = _tmp_162_rule(p)) // yield_expr | star_expressions ) { D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); @@ -20057,7 +20558,7 @@ invalid_del_stmt_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 603)) // token='del' + (_keyword = _PyPegen_expect_token(p, 604)) // token='del' && (a = star_expressions_rule(p)) // star_expressions ) @@ -20150,11 +20651,11 @@ invalid_comprehension_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); - void *_tmp_158_var; + void *_tmp_163_var; expr_ty a; asdl_comprehension_seq* for_if_clauses_var; if ( - (_tmp_158_var = _tmp_158_rule(p)) // '[' | '(' | '{' + (_tmp_163_var = _tmp_163_rule(p)) // '[' | '(' | '{' && (a = starred_expression_rule(p)) // starred_expression && @@ -20181,12 +20682,12 @@ invalid_comprehension_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '{') star_named_expression ',' star_named_expressions for_if_clauses")); Token * _literal; - void *_tmp_159_var; + void *_tmp_164_var; expr_ty a; asdl_expr_seq* b; asdl_comprehension_seq* for_if_clauses_var; if ( - (_tmp_159_var = _tmp_159_rule(p)) // '[' | '{' + (_tmp_164_var = _tmp_164_rule(p)) // '[' | '{' && (a = star_named_expression_rule(p)) // star_named_expression && @@ -20216,12 +20717,12 @@ invalid_comprehension_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '{') star_named_expression ',' for_if_clauses")); - void *_tmp_160_var; + void *_tmp_165_var; expr_ty a; Token * b; asdl_comprehension_seq* for_if_clauses_var; if ( - (_tmp_160_var = _tmp_160_rule(p)) // '[' | '{' + (_tmp_165_var = _tmp_165_rule(p)) // '[' | '{' && (a = star_named_expression_rule(p)) // star_named_expression && @@ -20358,13 +20859,13 @@ invalid_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(slash_no_default | slash_with_default) param_maybe_default* '/'")); - asdl_seq * _loop0_162_var; - void *_tmp_161_var; + asdl_seq * _loop0_167_var; + void *_tmp_166_var; Token * a; if ( - (_tmp_161_var = _tmp_161_rule(p)) // slash_no_default | slash_with_default + (_tmp_166_var = _tmp_166_rule(p)) // slash_no_default | slash_with_default && - (_loop0_162_var = _loop0_162_rule(p)) // param_maybe_default* + (_loop0_167_var = _loop0_167_rule(p)) // param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -20388,7 +20889,7 @@ invalid_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default? param_no_default* invalid_parameters_helper param_no_default")); - asdl_seq * _loop0_163_var; + asdl_seq * _loop0_168_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings arg_ty a; @@ -20396,7 +20897,7 @@ invalid_parameters_rule(Parser *p) if ( (_opt_var = slash_no_default_rule(p), !p->error_indicator) // slash_no_default? && - (_loop0_163_var = _loop0_163_rule(p)) // param_no_default* + (_loop0_168_var = _loop0_168_rule(p)) // param_no_default* && (invalid_parameters_helper_var = invalid_parameters_helper_rule(p)) // invalid_parameters_helper && @@ -20422,18 +20923,18 @@ invalid_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* '(' param_no_default+ ','? ')'")); - asdl_seq * _loop0_164_var; - asdl_seq * _loop1_165_var; + asdl_seq * _loop0_169_var; + asdl_seq * _loop1_170_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings Token * a; Token * b; if ( - (_loop0_164_var = _loop0_164_rule(p)) // param_no_default* + (_loop0_169_var = _loop0_169_rule(p)) // param_no_default* && (a = _PyPegen_expect_token(p, 7)) // token='(' && - (_loop1_165_var = _loop1_165_rule(p)) // param_no_default+ + (_loop1_170_var = _loop1_170_rule(p)) // param_no_default+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -20460,22 +20961,22 @@ invalid_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(slash_no_default | slash_with_default)] param_maybe_default* '*' (',' | param_no_default) param_maybe_default* '/'")); Token * _literal; - asdl_seq * _loop0_167_var; - asdl_seq * _loop0_169_var; + asdl_seq * _loop0_172_var; + asdl_seq * _loop0_174_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings - void *_tmp_168_var; + void *_tmp_173_var; Token * a; if ( - (_opt_var = _tmp_166_rule(p), !p->error_indicator) // [(slash_no_default | slash_with_default)] + (_opt_var = _tmp_171_rule(p), !p->error_indicator) // [(slash_no_default | slash_with_default)] && - (_loop0_167_var = _loop0_167_rule(p)) // param_maybe_default* + (_loop0_172_var = _loop0_172_rule(p)) // param_maybe_default* && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_168_var = _tmp_168_rule(p)) // ',' | param_no_default + (_tmp_173_var = _tmp_173_rule(p)) // ',' | param_no_default && - (_loop0_169_var = _loop0_169_rule(p)) // param_maybe_default* + (_loop0_174_var = _loop0_174_rule(p)) // param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -20500,10 +21001,10 @@ invalid_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default+ '/' '*'")); Token * _literal; - asdl_seq * _loop1_170_var; + asdl_seq * _loop1_175_var; Token * a; if ( - (_loop1_170_var = _loop1_170_rule(p)) // param_maybe_default+ + (_loop1_175_var = _loop1_175_rule(p)) // param_maybe_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -20553,7 +21054,7 @@ invalid_default_rule(Parser *p) if ( (a = _PyPegen_expect_token(p, 22)) // token='=' && - _PyPegen_lookahead(1, _tmp_171_rule, p) + _PyPegen_lookahead(1, _tmp_176_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' &(')' | ',')")); @@ -20599,12 +21100,12 @@ invalid_star_etc_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); - void *_tmp_172_var; + void *_tmp_177_var; Token * a; if ( (a = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_172_var = _tmp_172_rule(p)) // ')' | ',' (')' | '**') + (_tmp_177_var = _tmp_177_rule(p)) // ')' | ',' (')' | '**') ) { D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); @@ -20687,20 +21188,20 @@ invalid_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',')")); Token * _literal; - asdl_seq * _loop0_174_var; - void *_tmp_173_var; - void *_tmp_175_var; + asdl_seq * _loop0_179_var; + void *_tmp_178_var; + void *_tmp_180_var; Token * a; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_173_var = _tmp_173_rule(p)) // param_no_default | ',' + (_tmp_178_var = _tmp_178_rule(p)) // param_no_default | ',' && - (_loop0_174_var = _loop0_174_rule(p)) // param_maybe_default* + (_loop0_179_var = _loop0_179_rule(p)) // param_maybe_default* && (a = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_175_var = _tmp_175_rule(p)) // param_no_default | ',' + (_tmp_180_var = _tmp_180_rule(p)) // param_no_default | ',' ) { D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',')")); @@ -20816,7 +21317,7 @@ invalid_kwds_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (a = (Token*)_tmp_176_rule(p)) // '*' | '**' | '/' + (a = (Token*)_tmp_181_rule(p)) // '*' | '**' | '/' ) { D(fprintf(stderr, "%*c+ invalid_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' param ',' ('*' | '**' | '/')")); @@ -20882,13 +21383,13 @@ invalid_parameters_helper_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters_helper[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - asdl_seq * _loop1_177_var; + asdl_seq * _loop1_182_var; if ( - (_loop1_177_var = _loop1_177_rule(p)) // param_with_default+ + (_loop1_182_var = _loop1_182_rule(p)) // param_with_default+ ) { D(fprintf(stderr, "%*c+ invalid_parameters_helper[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - _res = _loop1_177_var; + _res = _loop1_182_var; goto done; } p->mark = _mark; @@ -20954,13 +21455,13 @@ invalid_lambda_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(lambda_slash_no_default | lambda_slash_with_default) lambda_param_maybe_default* '/'")); - asdl_seq * _loop0_179_var; - void *_tmp_178_var; + asdl_seq * _loop0_184_var; + void *_tmp_183_var; Token * a; if ( - (_tmp_178_var = _tmp_178_rule(p)) // lambda_slash_no_default | lambda_slash_with_default + (_tmp_183_var = _tmp_183_rule(p)) // lambda_slash_no_default | lambda_slash_with_default && - (_loop0_179_var = _loop0_179_rule(p)) // lambda_param_maybe_default* + (_loop0_184_var = _loop0_184_rule(p)) // lambda_param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -20984,7 +21485,7 @@ invalid_lambda_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default? lambda_param_no_default* invalid_lambda_parameters_helper lambda_param_no_default")); - asdl_seq * _loop0_180_var; + asdl_seq * _loop0_185_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings arg_ty a; @@ -20992,7 +21493,7 @@ invalid_lambda_parameters_rule(Parser *p) if ( (_opt_var = lambda_slash_no_default_rule(p), !p->error_indicator) // lambda_slash_no_default? && - (_loop0_180_var = _loop0_180_rule(p)) // lambda_param_no_default* + (_loop0_185_var = _loop0_185_rule(p)) // lambda_param_no_default* && (invalid_lambda_parameters_helper_var = invalid_lambda_parameters_helper_rule(p)) // invalid_lambda_parameters_helper && @@ -21018,18 +21519,18 @@ invalid_lambda_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* '(' ','.lambda_param+ ','? ')'")); - asdl_seq * _gather_182_var; - asdl_seq * _loop0_181_var; + asdl_seq * _gather_187_var; + asdl_seq * _loop0_186_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings Token * a; Token * b; if ( - (_loop0_181_var = _loop0_181_rule(p)) // lambda_param_no_default* + (_loop0_186_var = _loop0_186_rule(p)) // lambda_param_no_default* && (a = _PyPegen_expect_token(p, 7)) // token='(' && - (_gather_182_var = _gather_182_rule(p)) // ','.lambda_param+ + (_gather_187_var = _gather_187_rule(p)) // ','.lambda_param+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -21056,22 +21557,22 @@ invalid_lambda_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(lambda_slash_no_default | lambda_slash_with_default)] lambda_param_maybe_default* '*' (',' | lambda_param_no_default) lambda_param_maybe_default* '/'")); Token * _literal; - asdl_seq * _loop0_185_var; - asdl_seq * _loop0_187_var; + asdl_seq * _loop0_190_var; + asdl_seq * _loop0_192_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings - void *_tmp_186_var; + void *_tmp_191_var; Token * a; if ( - (_opt_var = _tmp_184_rule(p), !p->error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)] + (_opt_var = _tmp_189_rule(p), !p->error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)] && - (_loop0_185_var = _loop0_185_rule(p)) // lambda_param_maybe_default* + (_loop0_190_var = _loop0_190_rule(p)) // lambda_param_maybe_default* && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_186_var = _tmp_186_rule(p)) // ',' | lambda_param_no_default + (_tmp_191_var = _tmp_191_rule(p)) // ',' | lambda_param_no_default && - (_loop0_187_var = _loop0_187_rule(p)) // lambda_param_maybe_default* + (_loop0_192_var = _loop0_192_rule(p)) // lambda_param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -21096,10 +21597,10 @@ invalid_lambda_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default+ '/' '*'")); Token * _literal; - asdl_seq * _loop1_188_var; + asdl_seq * _loop1_193_var; Token * a; if ( - (_loop1_188_var = _loop1_188_rule(p)) // lambda_param_maybe_default+ + (_loop1_193_var = _loop1_193_rule(p)) // lambda_param_maybe_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -21171,13 +21672,13 @@ invalid_lambda_parameters_helper_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters_helper[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); - asdl_seq * _loop1_189_var; + asdl_seq * _loop1_194_var; if ( - (_loop1_189_var = _loop1_189_rule(p)) // lambda_param_with_default+ + (_loop1_194_var = _loop1_194_rule(p)) // lambda_param_with_default+ ) { D(fprintf(stderr, "%*c+ invalid_lambda_parameters_helper[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); - _res = _loop1_189_var; + _res = _loop1_194_var; goto done; } p->mark = _mark; @@ -21214,11 +21715,11 @@ invalid_lambda_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); Token * _literal; - void *_tmp_190_var; + void *_tmp_195_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_190_var = _tmp_190_rule(p)) // ':' | ',' (':' | '**') + (_tmp_195_var = _tmp_195_rule(p)) // ':' | ',' (':' | '**') ) { D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); @@ -21271,20 +21772,20 @@ invalid_lambda_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',')")); Token * _literal; - asdl_seq * _loop0_192_var; - void *_tmp_191_var; - void *_tmp_193_var; + asdl_seq * _loop0_197_var; + void *_tmp_196_var; + void *_tmp_198_var; Token * a; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_191_var = _tmp_191_rule(p)) // lambda_param_no_default | ',' + (_tmp_196_var = _tmp_196_rule(p)) // lambda_param_no_default | ',' && - (_loop0_192_var = _loop0_192_rule(p)) // lambda_param_maybe_default* + (_loop0_197_var = _loop0_197_rule(p)) // lambda_param_maybe_default* && (a = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_193_var = _tmp_193_rule(p)) // lambda_param_no_default | ',' + (_tmp_198_var = _tmp_198_rule(p)) // lambda_param_no_default | ',' ) { D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',')")); @@ -21403,7 +21904,7 @@ invalid_lambda_kwds_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (a = (Token*)_tmp_194_rule(p)) // '*' | '**' | '/' + (a = (Token*)_tmp_199_rule(p)) // '*' | '**' | '/' ) { D(fprintf(stderr, "%*c+ invalid_lambda_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' lambda_param ',' ('*' | '**' | '/')")); @@ -21507,11 +22008,11 @@ invalid_with_item_rule(Parser *p) if ( (expression_var = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (a = expression_rule(p)) // expression && - _PyPegen_lookahead(1, _tmp_195_rule, p) + _PyPegen_lookahead(1, _tmp_200_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression 'as' expression &(',' | ')' | ':')")); @@ -21560,7 +22061,7 @@ invalid_for_target_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (_keyword = _PyPegen_expect_token(p, 649)) // token='for' + (_keyword = _PyPegen_expect_token(p, 650)) // token='for' && (a = star_expressions_rule(p)) // star_expressions ) @@ -21692,11 +22193,11 @@ invalid_import_rule(Parser *p) expr_ty dotted_name_var; expr_ty dotted_name_var_1; if ( - (a = _PyPegen_expect_token(p, 606)) // token='import' + (a = _PyPegen_expect_token(p, 607)) // token='import' && (dotted_name_var = dotted_name_rule(p)) // dotted_name && - (_keyword = _PyPegen_expect_token(p, 607)) // token='from' + (_keyword = _PyPegen_expect_token(p, 608)) // token='from' && (dotted_name_var_1 = dotted_name_rule(p)) // dotted_name ) @@ -21792,7 +22293,7 @@ invalid_with_stmt_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE")); - asdl_seq * _gather_196_var; + asdl_seq * _gather_201_var; Token * _keyword; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings @@ -21800,9 +22301,9 @@ invalid_with_stmt_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (_keyword = _PyPegen_expect_token(p, 614)) // token='with' + (_keyword = _PyPegen_expect_token(p, 615)) // token='with' && - (_gather_196_var = _gather_196_rule(p)) // ','.(expression ['as' star_target])+ + (_gather_201_var = _gather_201_rule(p)) // ','.(expression ['as' star_target])+ && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -21826,7 +22327,7 @@ invalid_with_stmt_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE")); - asdl_seq * _gather_198_var; + asdl_seq * _gather_203_var; Token * _keyword; Token * _literal; Token * _literal_1; @@ -21838,11 +22339,11 @@ invalid_with_stmt_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (_keyword = _PyPegen_expect_token(p, 614)) // token='with' + (_keyword = _PyPegen_expect_token(p, 615)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (_gather_198_var = _gather_198_rule(p)) // ','.(expressions ['as' star_target])+ + (_gather_203_var = _gather_203_rule(p)) // ','.(expressions ['as' star_target])+ && (_opt_var_1 = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -21892,7 +22393,7 @@ invalid_with_stmt_indent_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT")); - asdl_seq * _gather_200_var; + asdl_seq * _gather_205_var; Token * _literal; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings @@ -21901,9 +22402,9 @@ invalid_with_stmt_indent_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (a = _PyPegen_expect_token(p, 614)) // token='with' + (a = _PyPegen_expect_token(p, 615)) // token='with' && - (_gather_200_var = _gather_200_rule(p)) // ','.(expression ['as' star_target])+ + (_gather_205_var = _gather_205_rule(p)) // ','.(expression ['as' star_target])+ && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -21931,7 +22432,7 @@ invalid_with_stmt_indent_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT")); - asdl_seq * _gather_202_var; + asdl_seq * _gather_207_var; Token * _literal; Token * _literal_1; Token * _literal_2; @@ -21944,11 +22445,11 @@ invalid_with_stmt_indent_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (a = _PyPegen_expect_token(p, 614)) // token='with' + (a = _PyPegen_expect_token(p, 615)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (_gather_202_var = _gather_202_rule(p)) // ','.(expressions ['as' star_target])+ + (_gather_207_var = _gather_207_rule(p)) // ','.(expressions ['as' star_target])+ && (_opt_var_1 = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -22008,7 +22509,7 @@ invalid_try_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 623)) // token='try' + (a = _PyPegen_expect_token(p, 624)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22040,13 +22541,13 @@ invalid_try_stmt_rule(Parser *p) Token * _literal; asdl_stmt_seq* block_var; if ( - (_keyword = _PyPegen_expect_token(p, 623)) // token='try' + (_keyword = _PyPegen_expect_token(p, 624)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (block_var = block_rule(p)) // block && - _PyPegen_lookahead(0, _tmp_204_rule, p) + _PyPegen_lookahead(0, _tmp_209_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block !('except' | 'finally')")); @@ -22071,29 +22572,29 @@ invalid_try_stmt_rule(Parser *p) Token * _keyword; Token * _literal; Token * _literal_1; - asdl_seq * _loop0_205_var; - asdl_seq * _loop1_206_var; + asdl_seq * _loop0_210_var; + asdl_seq * _loop1_211_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings Token * a; Token * b; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 623)) // token='try' + (_keyword = _PyPegen_expect_token(p, 624)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (_loop0_205_var = _loop0_205_rule(p)) // block* + (_loop0_210_var = _loop0_210_rule(p)) // block* && - (_loop1_206_var = _loop1_206_rule(p)) // except_block+ + (_loop1_211_var = _loop1_211_rule(p)) // except_block+ && - (a = _PyPegen_expect_token(p, 636)) // token='except' + (a = _PyPegen_expect_token(p, 637)) // token='except' && (b = _PyPegen_expect_token(p, 16)) // token='*' && (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_207_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var = _tmp_212_rule(p), !p->error_indicator) // ['as' NAME] && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' ) @@ -22120,23 +22621,23 @@ invalid_try_stmt_rule(Parser *p) Token * _keyword; Token * _literal; Token * _literal_1; - asdl_seq * _loop0_208_var; - asdl_seq * _loop1_209_var; + asdl_seq * _loop0_213_var; + asdl_seq * _loop1_214_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings Token * a; if ( - (_keyword = _PyPegen_expect_token(p, 623)) // token='try' + (_keyword = _PyPegen_expect_token(p, 624)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (_loop0_208_var = _loop0_208_rule(p)) // block* + (_loop0_213_var = _loop0_213_rule(p)) // block* && - (_loop1_209_var = _loop1_209_rule(p)) // except_star_block+ + (_loop1_214_var = _loop1_214_rule(p)) // except_star_block+ && - (a = _PyPegen_expect_token(p, 636)) // token='except' + (a = _PyPegen_expect_token(p, 637)) // token='except' && - (_opt_var = _tmp_210_rule(p), !p->error_indicator) // [expression ['as' NAME]] + (_opt_var = _tmp_215_rule(p), !p->error_indicator) // [expression ['as' NAME]] && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' ) @@ -22194,7 +22695,7 @@ invalid_except_stmt_rule(Parser *p) expr_ty a; expr_ty expressions_var; if ( - (_keyword = _PyPegen_expect_token(p, 636)) // token='except' + (_keyword = _PyPegen_expect_token(p, 637)) // token='except' && (_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'? && @@ -22204,7 +22705,7 @@ invalid_except_stmt_rule(Parser *p) && (expressions_var = expressions_rule(p)) // expressions && - (_opt_var_1 = _tmp_211_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var_1 = _tmp_216_rule(p), !p->error_indicator) // ['as' NAME] && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' ) @@ -22236,13 +22737,13 @@ invalid_except_stmt_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 636)) // token='except' + (a = _PyPegen_expect_token(p, 637)) // token='except' && (_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'? && (expression_var = expression_rule(p)) // expression && - (_opt_var_1 = _tmp_212_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var_1 = _tmp_217_rule(p), !p->error_indicator) // ['as' NAME] && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -22269,7 +22770,7 @@ invalid_except_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 636)) // token='except' + (a = _PyPegen_expect_token(p, 637)) // token='except' && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -22294,14 +22795,14 @@ invalid_except_stmt_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_except_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' '*' (NEWLINE | ':')")); Token * _literal; - void *_tmp_213_var; + void *_tmp_218_var; Token * a; if ( - (a = _PyPegen_expect_token(p, 636)) // token='except' + (a = _PyPegen_expect_token(p, 637)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_213_var = _tmp_213_rule(p)) // NEWLINE | ':' + (_tmp_218_var = _tmp_218_rule(p)) // NEWLINE | ':' ) { D(fprintf(stderr, "%*c+ invalid_except_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' '*' (NEWLINE | ':')")); @@ -22347,7 +22848,7 @@ invalid_finally_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 632)) // token='finally' + (a = _PyPegen_expect_token(p, 633)) // token='finally' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22404,11 +22905,11 @@ invalid_except_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 636)) // token='except' + (a = _PyPegen_expect_token(p, 637)) // token='except' && (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_214_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var = _tmp_219_rule(p), !p->error_indicator) // ['as' NAME] && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22440,7 +22941,7 @@ invalid_except_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 636)) // token='except' + (a = _PyPegen_expect_token(p, 637)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22497,13 +22998,13 @@ invalid_except_star_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 636)) // token='except' + (a = _PyPegen_expect_token(p, 637)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_215_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var = _tmp_220_rule(p), !p->error_indicator) // ['as' NAME] && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22739,7 +23240,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (a = _PyPegen_expect_soft_keyword(p, "_")) // soft_keyword='"_"' ) @@ -22769,7 +23270,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && _PyPegen_lookahead_with_name(0, _PyPegen_name_token, p) && @@ -22872,7 +23373,7 @@ invalid_class_argument_pattern_rule(Parser *p) asdl_pattern_seq* a; asdl_seq* keyword_patterns_var; if ( - (_opt_var = _tmp_216_rule(p), !p->error_indicator) // [positional_patterns ','] + (_opt_var = _tmp_221_rule(p), !p->error_indicator) // [positional_patterns ','] && (keyword_patterns_var = keyword_patterns_rule(p)) // keyword_patterns && @@ -22926,7 +23427,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 641)) // token='if' + (_keyword = _PyPegen_expect_token(p, 642)) // token='if' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -22957,7 +23458,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty a_1; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 641)) // token='if' + (a = _PyPegen_expect_token(p, 642)) // token='if' && (a_1 = named_expression_rule(p)) // named_expression && @@ -23013,7 +23514,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 643)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 644)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -23044,7 +23545,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 643)) // token='elif' + (a = _PyPegen_expect_token(p, 644)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -23098,7 +23599,7 @@ invalid_else_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 644)) // token='else' + (a = _PyPegen_expect_token(p, 645)) // token='else' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23152,7 +23653,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 646)) // token='while' + (_keyword = _PyPegen_expect_token(p, 647)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -23183,7 +23684,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 646)) // token='while' + (a = _PyPegen_expect_token(p, 647)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -23245,11 +23746,11 @@ invalid_for_stmt_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (_keyword = _PyPegen_expect_token(p, 649)) // token='for' + (_keyword = _PyPegen_expect_token(p, 650)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -23286,11 +23787,11 @@ invalid_for_stmt_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (a = _PyPegen_expect_token(p, 649)) // token='for' + (a = _PyPegen_expect_token(p, 650)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword = _PyPegen_expect_token(p, 650)) // token='in' + (_keyword = _PyPegen_expect_token(p, 651)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -23356,7 +23857,7 @@ invalid_def_raw_rule(Parser *p) if ( (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? && - (a = _PyPegen_expect_token(p, 651)) // token='def' + (a = _PyPegen_expect_token(p, 652)) // token='def' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -23366,7 +23867,7 @@ invalid_def_raw_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (_opt_var_2 = _tmp_217_rule(p), !p->error_indicator) // ['->' expression] + (_opt_var_2 = _tmp_222_rule(p), !p->error_indicator) // ['->' expression] && (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23422,11 +23923,11 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 653)) // token='class' + (_keyword = _PyPegen_expect_token(p, 654)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && - (_opt_var = _tmp_218_rule(p), !p->error_indicator) // ['(' arguments? ')'] + (_opt_var = _tmp_223_rule(p), !p->error_indicator) // ['(' arguments? ')'] && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -23457,11 +23958,11 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 653)) // token='class' + (a = _PyPegen_expect_token(p, 654)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && - (_opt_var = _tmp_219_rule(p), !p->error_indicator) // ['(' arguments? ')'] + (_opt_var = _tmp_224_rule(p), !p->error_indicator) // ['(' arguments? ')'] && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23512,11 +24013,11 @@ invalid_double_starred_kvpairs_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_double_starred_kvpairs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ',' invalid_kvpair")); - asdl_seq * _gather_220_var; + asdl_seq * _gather_225_var; Token * _literal; void *invalid_kvpair_var; if ( - (_gather_220_var = _gather_220_rule(p)) // ','.double_starred_kvpair+ + (_gather_225_var = _gather_225_rule(p)) // ','.double_starred_kvpair+ && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && @@ -23524,7 +24025,7 @@ invalid_double_starred_kvpairs_rule(Parser *p) ) { D(fprintf(stderr, "%*c+ invalid_double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ',' invalid_kvpair")); - _res = _PyPegen_dummy_name(p, _gather_220_var, _literal, invalid_kvpair_var); + _res = _PyPegen_dummy_name(p, _gather_225_var, _literal, invalid_kvpair_var); goto done; } p->mark = _mark; @@ -23577,7 +24078,7 @@ invalid_double_starred_kvpairs_rule(Parser *p) && (a = _PyPegen_expect_token(p, 11)) // token=':' && - _PyPegen_lookahead(1, _tmp_222_rule, p) + _PyPegen_lookahead(1, _tmp_227_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' &('}' | ',')")); @@ -23688,7 +24189,7 @@ invalid_kvpair_rule(Parser *p) && (a = _PyPegen_expect_token(p, 11)) // token=':' && - _PyPegen_lookahead(1, _tmp_223_rule, p) + _PyPegen_lookahead(1, _tmp_228_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' &('}' | ',')")); @@ -23763,6 +24264,450 @@ invalid_starred_expression_rule(Parser *p) return _res; } +// invalid_replacement_field: +// | '{' '=' +// | '{' '!' +// | '{' ':' +// | '{' '}' +// | '{' !(yield_expr | star_expressions) +// | '{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}') +// | '{' (yield_expr | star_expressions) '=' !('!' | ':' | '}') +// | '{' (yield_expr | star_expressions) '='? invalid_conversion_character +// | '{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}') +// | '{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}' +// | '{' (yield_expr | star_expressions) '='? ['!' NAME] !'}' +static void * +invalid_replacement_field_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '{' '=' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' '='")); + Token * _literal; + Token * a; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (a = _PyPegen_expect_token(p, 22)) // token='=' + ) + { + D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' '='")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "f-string: valid expression required before '='" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' '='")); + } + { // '{' '!' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' '!'")); + Token * _literal; + Token * a; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (a = _PyPegen_expect_token(p, 54)) // token='!' + ) + { + D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' '!'")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "f-string: valid expression required before '!'" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' '!'")); + } + { // '{' ':' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' ':'")); + Token * _literal; + Token * a; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (a = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' ':'")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "f-string: valid expression required before ':'" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' ':'")); + } + { // '{' '}' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' '}'")); + Token * _literal; + Token * a; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (a = _PyPegen_expect_token(p, 26)) // token='}' + ) + { + D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' '}'")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "f-string: valid expression required before '}'" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' '}'")); + } + { // '{' !(yield_expr | star_expressions) + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' !(yield_expr | star_expressions)")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + _PyPegen_lookahead(0, _tmp_229_rule, p) + ) + { + D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' !(yield_expr | star_expressions)")); + _res = RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting a valid expression after '{'" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' !(yield_expr | star_expressions)")); + } + { // '{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}') + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}')")); + Token * _literal; + void *_tmp_230_var; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (_tmp_230_var = _tmp_230_rule(p)) // yield_expr | star_expressions + && + _PyPegen_lookahead(0, _tmp_231_rule, p) + ) + { + D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}')")); + _res = PyErr_Occurred ( ) ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '=', or '!', or ':', or '}'" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}')")); + } + { // '{' (yield_expr | star_expressions) '=' !('!' | ':' | '}') + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '=' !('!' | ':' | '}')")); + Token * _literal; + Token * _literal_1; + void *_tmp_232_var; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (_tmp_232_var = _tmp_232_rule(p)) // yield_expr | star_expressions + && + (_literal_1 = _PyPegen_expect_token(p, 22)) // token='=' + && + _PyPegen_lookahead(0, _tmp_233_rule, p) + ) + { + D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '=' !('!' | ':' | '}')")); + _res = PyErr_Occurred ( ) ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '!', or ':', or '}'" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) '=' !('!' | ':' | '}')")); + } + { // '{' (yield_expr | star_expressions) '='? invalid_conversion_character + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? invalid_conversion_character")); + Token * _literal; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + void *_tmp_234_var; + void *invalid_conversion_character_var; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (_tmp_234_var = _tmp_234_rule(p)) // yield_expr | star_expressions + && + (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='? + && + (invalid_conversion_character_var = invalid_conversion_character_rule(p)) // invalid_conversion_character + ) + { + D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? invalid_conversion_character")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_234_var, _opt_var, invalid_conversion_character_var); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) '='? invalid_conversion_character")); + } + { // '{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}') + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}')")); + Token * _literal; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + void *_opt_var_1; + UNUSED(_opt_var_1); // Silence compiler warnings + void *_tmp_235_var; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (_tmp_235_var = _tmp_235_rule(p)) // yield_expr | star_expressions + && + (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='? + && + (_opt_var_1 = _tmp_236_rule(p), !p->error_indicator) // ['!' NAME] + && + _PyPegen_lookahead(0, _tmp_237_rule, p) + ) + { + D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}')")); + _res = PyErr_Occurred ( ) ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting ':' or '}'" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}')")); + } + { // '{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}'")); + Token * _literal; + Token * _literal_1; + asdl_seq * _loop0_240_var; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + void *_opt_var_1; + UNUSED(_opt_var_1); // Silence compiler warnings + void *_tmp_238_var; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (_tmp_238_var = _tmp_238_rule(p)) // yield_expr | star_expressions + && + (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='? + && + (_opt_var_1 = _tmp_239_rule(p), !p->error_indicator) // ['!' NAME] + && + (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' + && + (_loop0_240_var = _loop0_240_rule(p)) // fstring_format_spec* + && + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 26) // token='}' + ) + { + D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}'")); + _res = PyErr_Occurred ( ) ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '}', or format specs" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}'")); + } + { // '{' (yield_expr | star_expressions) '='? ['!' NAME] !'}' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !'}'")); + Token * _literal; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + void *_opt_var_1; + UNUSED(_opt_var_1); // Silence compiler warnings + void *_tmp_241_var; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (_tmp_241_var = _tmp_241_rule(p)) // yield_expr | star_expressions + && + (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='? + && + (_opt_var_1 = _tmp_242_rule(p), !p->error_indicator) // ['!' NAME] + && + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 26) // token='}' + ) + { + D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !'}'")); + _res = PyErr_Occurred ( ) ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '}'" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !'}'")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// invalid_conversion_character: '!' &(':' | '}') | '!' !NAME +static void * +invalid_conversion_character_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '!' &(':' | '}') + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_conversion_character[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' &(':' | '}')")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 54)) // token='!' + && + _PyPegen_lookahead(1, _tmp_243_rule, p) + ) + { + D(fprintf(stderr, "%*c+ invalid_conversion_character[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' &(':' | '}')")); + _res = RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: missing conversion character" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_conversion_character[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' &(':' | '}')")); + } + { // '!' !NAME + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_conversion_character[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' !NAME")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 54)) // token='!' + && + _PyPegen_lookahead_with_name(0, _PyPegen_name_token, p) + ) + { + D(fprintf(stderr, "%*c+ invalid_conversion_character[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' !NAME")); + _res = RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: invalid conversion character" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_conversion_character[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' !NAME")); + } + _res = NULL; + done: + p->level--; + return _res; +} + // _loop0_1: NEWLINE static asdl_seq * _loop0_1_rule(Parser *p) @@ -23899,9 +24844,77 @@ _loop0_2_rule(Parser *p) return _seq; } -// _loop1_3: statement +// _loop0_3: fstring_middle static asdl_seq * -_loop1_3_rule(Parser *p) +_loop0_3_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + Py_ssize_t _children_capacity = 1; + Py_ssize_t _n = 0; + { // fstring_middle + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_3[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_middle")); + expr_ty fstring_middle_var; + while ( + (fstring_middle_var = fstring_middle_rule(p)) // fstring_middle + ) + { + _res = fstring_middle_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_3[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_middle")); + } + asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); + PyMem_Free(_children); + p->level--; + return _seq; +} + +// _loop1_4: statement +static asdl_seq * +_loop1_4_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -23927,7 +24940,7 @@ _loop1_3_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_3[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement")); + D(fprintf(stderr, "%*c> _loop1_4[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement")); asdl_stmt_seq* statement_var; while ( (statement_var = statement_rule(p)) // statement @@ -23950,7 +24963,7 @@ _loop1_3_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_3[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_4[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "statement")); } if (_n == 0 || p->error_indicator) { @@ -23972,9 +24985,9 @@ _loop1_3_rule(Parser *p) return _seq; } -// _loop0_5: ';' simple_stmt +// _loop0_6: ';' simple_stmt static asdl_seq * -_loop0_5_rule(Parser *p) +_loop0_6_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24000,7 +25013,7 @@ _loop0_5_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';' simple_stmt")); + D(fprintf(stderr, "%*c> _loop0_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';' simple_stmt")); Token * _literal; stmt_ty elem; while ( @@ -24032,7 +25045,7 @@ _loop0_5_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_5[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_6[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';' simple_stmt")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -24049,9 +25062,9 @@ _loop0_5_rule(Parser *p) return _seq; } -// _gather_4: simple_stmt _loop0_5 +// _gather_5: simple_stmt _loop0_6 static asdl_seq * -_gather_4_rule(Parser *p) +_gather_5_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24063,27 +25076,27 @@ _gather_4_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // simple_stmt _loop0_5 + { // simple_stmt _loop0_6 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_4[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "simple_stmt _loop0_5")); + D(fprintf(stderr, "%*c> _gather_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "simple_stmt _loop0_6")); stmt_ty elem; asdl_seq * seq; if ( (elem = simple_stmt_rule(p)) // simple_stmt && - (seq = _loop0_5_rule(p)) // _loop0_5 + (seq = _loop0_6_rule(p)) // _loop0_6 ) { - D(fprintf(stderr, "%*c+ _gather_4[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "simple_stmt _loop0_5")); + D(fprintf(stderr, "%*c+ _gather_5[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "simple_stmt _loop0_6")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_4[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "simple_stmt _loop0_5")); + D(fprintf(stderr, "%*c%s _gather_5[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "simple_stmt _loop0_6")); } _res = NULL; done: @@ -24091,9 +25104,9 @@ _gather_4_rule(Parser *p) return _res; } -// _tmp_6: 'import' | 'from' +// _tmp_7: 'import' | 'from' static void * -_tmp_6_rule(Parser *p) +_tmp_7_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24110,18 +25123,18 @@ _tmp_6_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'")); + D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 606)) // token='import' + (_keyword = _PyPegen_expect_token(p, 607)) // token='import' ) { - D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'")); + D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_6[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_7[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'import'")); } { // 'from' @@ -24129,18 +25142,18 @@ _tmp_6_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'")); + D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 607)) // token='from' + (_keyword = _PyPegen_expect_token(p, 608)) // token='from' ) { - D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'")); + D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_6[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_7[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from'")); } _res = NULL; @@ -24149,9 +25162,9 @@ _tmp_6_rule(Parser *p) return _res; } -// _tmp_7: 'def' | '@' | ASYNC +// _tmp_8: 'def' | '@' | ASYNC static void * -_tmp_7_rule(Parser *p) +_tmp_8_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24168,18 +25181,18 @@ _tmp_7_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'")); + D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 651)) // token='def' + (_keyword = _PyPegen_expect_token(p, 652)) // token='def' ) { - D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'")); + D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_7[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'def'")); } { // '@' @@ -24187,18 +25200,18 @@ _tmp_7_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'")); + D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 49)) // token='@' ) { - D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'")); + D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_7[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'")); } { // ASYNC @@ -24206,18 +25219,18 @@ _tmp_7_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); + D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); Token * async_var; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { - D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); + D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); _res = async_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_7[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC")); } _res = NULL; @@ -24226,9 +25239,9 @@ _tmp_7_rule(Parser *p) return _res; } -// _tmp_8: 'class' | '@' +// _tmp_9: 'class' | '@' static void * -_tmp_8_rule(Parser *p) +_tmp_9_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24245,18 +25258,18 @@ _tmp_8_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'")); + D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 653)) // token='class' + (_keyword = _PyPegen_expect_token(p, 654)) // token='class' ) { - D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'")); + D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_9[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'class'")); } { // '@' @@ -24264,18 +25277,18 @@ _tmp_8_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'")); + D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 49)) // token='@' ) { - D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'")); + D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_9[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'")); } _res = NULL; @@ -24284,9 +25297,9 @@ _tmp_8_rule(Parser *p) return _res; } -// _tmp_9: 'with' | ASYNC +// _tmp_10: 'with' | ASYNC static void * -_tmp_9_rule(Parser *p) +_tmp_10_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24303,18 +25316,18 @@ _tmp_9_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'")); + D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 614)) // token='with' + (_keyword = _PyPegen_expect_token(p, 615)) // token='with' ) { - D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'")); + D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_9[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with'")); } { // ASYNC @@ -24322,18 +25335,18 @@ _tmp_9_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); + D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); Token * async_var; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { - D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); + D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); _res = async_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_9[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC")); } _res = NULL; @@ -24342,9 +25355,9 @@ _tmp_9_rule(Parser *p) return _res; } -// _tmp_10: 'for' | ASYNC +// _tmp_11: 'for' | ASYNC static void * -_tmp_10_rule(Parser *p) +_tmp_11_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24361,18 +25374,18 @@ _tmp_10_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'")); + D(fprintf(stderr, "%*c> _tmp_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 649)) // token='for' + (_keyword = _PyPegen_expect_token(p, 650)) // token='for' ) { - D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'")); + D(fprintf(stderr, "%*c+ _tmp_11[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_11[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for'")); } { // ASYNC @@ -24380,18 +25393,18 @@ _tmp_10_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); + D(fprintf(stderr, "%*c> _tmp_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); Token * async_var; if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { - D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); + D(fprintf(stderr, "%*c+ _tmp_11[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); _res = async_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_11[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC")); } _res = NULL; @@ -24400,9 +25413,9 @@ _tmp_10_rule(Parser *p) return _res; } -// _tmp_11: '=' annotated_rhs +// _tmp_12: '=' annotated_rhs static void * -_tmp_11_rule(Parser *p) +_tmp_12_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24419,7 +25432,7 @@ _tmp_11_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); + D(fprintf(stderr, "%*c> _tmp_12[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); Token * _literal; expr_ty d; if ( @@ -24428,7 +25441,7 @@ _tmp_11_rule(Parser *p) (d = annotated_rhs_rule(p)) // annotated_rhs ) { - D(fprintf(stderr, "%*c+ _tmp_11[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); + D(fprintf(stderr, "%*c+ _tmp_12[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); _res = d; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24438,7 +25451,7 @@ _tmp_11_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_11[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_12[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs")); } _res = NULL; @@ -24447,9 +25460,9 @@ _tmp_11_rule(Parser *p) return _res; } -// _tmp_12: '(' single_target ')' | single_subscript_attribute_target +// _tmp_13: '(' single_target ')' | single_subscript_attribute_target static void * -_tmp_12_rule(Parser *p) +_tmp_13_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24466,7 +25479,7 @@ _tmp_12_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_12[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); + D(fprintf(stderr, "%*c> _tmp_13[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); Token * _literal; Token * _literal_1; expr_ty b; @@ -24478,7 +25491,7 @@ _tmp_12_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_12[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); + D(fprintf(stderr, "%*c+ _tmp_13[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); _res = b; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24488,7 +25501,7 @@ _tmp_12_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_12[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_13[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' single_target ')'")); } { // single_subscript_attribute_target @@ -24496,18 +25509,18 @@ _tmp_12_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_12[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); + D(fprintf(stderr, "%*c> _tmp_13[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); expr_ty single_subscript_attribute_target_var; if ( (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target ) { - D(fprintf(stderr, "%*c+ _tmp_12[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); + D(fprintf(stderr, "%*c+ _tmp_13[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); _res = single_subscript_attribute_target_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_12[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_13[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "single_subscript_attribute_target")); } _res = NULL; @@ -24516,9 +25529,9 @@ _tmp_12_rule(Parser *p) return _res; } -// _tmp_13: '=' annotated_rhs +// _tmp_14: '=' annotated_rhs static void * -_tmp_13_rule(Parser *p) +_tmp_14_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24535,7 +25548,7 @@ _tmp_13_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_13[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); + D(fprintf(stderr, "%*c> _tmp_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); Token * _literal; expr_ty d; if ( @@ -24544,7 +25557,7 @@ _tmp_13_rule(Parser *p) (d = annotated_rhs_rule(p)) // annotated_rhs ) { - D(fprintf(stderr, "%*c+ _tmp_13[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); + D(fprintf(stderr, "%*c+ _tmp_14[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); _res = d; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24554,7 +25567,7 @@ _tmp_13_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_13[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_14[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs")); } _res = NULL; @@ -24563,9 +25576,9 @@ _tmp_13_rule(Parser *p) return _res; } -// _loop1_14: (star_targets '=') +// _loop1_15: (star_targets '=') static asdl_seq * -_loop1_14_rule(Parser *p) +_loop1_15_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24591,13 +25604,13 @@ _loop1_14_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_224_var; + D(fprintf(stderr, "%*c> _loop1_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_244_var; while ( - (_tmp_224_var = _tmp_224_rule(p)) // star_targets '=' + (_tmp_244_var = _tmp_244_rule(p)) // star_targets '=' ) { - _res = _tmp_224_var; + _res = _tmp_244_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -24614,7 +25627,7 @@ _loop1_14_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_14[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_15[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } if (_n == 0 || p->error_indicator) { @@ -24636,9 +25649,9 @@ _loop1_14_rule(Parser *p) return _seq; } -// _tmp_15: yield_expr | star_expressions +// _tmp_16: yield_expr | star_expressions static void * -_tmp_15_rule(Parser *p) +_tmp_16_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24655,18 +25668,18 @@ _tmp_15_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -24674,18 +25687,18 @@ _tmp_15_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -24694,9 +25707,9 @@ _tmp_15_rule(Parser *p) return _res; } -// _tmp_16: yield_expr | star_expressions +// _tmp_17: yield_expr | star_expressions static void * -_tmp_16_rule(Parser *p) +_tmp_17_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24713,18 +25726,18 @@ _tmp_16_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -24732,18 +25745,18 @@ _tmp_16_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -24752,9 +25765,9 @@ _tmp_16_rule(Parser *p) return _res; } -// _tmp_17: 'from' expression +// _tmp_18: 'from' expression static void * -_tmp_17_rule(Parser *p) +_tmp_18_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24771,16 +25784,16 @@ _tmp_17_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' expression")); + D(fprintf(stderr, "%*c> _tmp_18[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' expression")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 607)) // token='from' + (_keyword = _PyPegen_expect_token(p, 608)) // token='from' && (z = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' expression")); + D(fprintf(stderr, "%*c+ _tmp_18[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' expression")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24790,7 +25803,7 @@ _tmp_17_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_18[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' expression")); } _res = NULL; @@ -24799,9 +25812,9 @@ _tmp_17_rule(Parser *p) return _res; } -// _loop0_19: ',' NAME +// _loop0_20: ',' NAME static asdl_seq * -_loop0_19_rule(Parser *p) +_loop0_20_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24827,7 +25840,7 @@ _loop0_19_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_19[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME")); + D(fprintf(stderr, "%*c> _loop0_20[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME")); Token * _literal; expr_ty elem; while ( @@ -24859,7 +25872,7 @@ _loop0_19_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_19[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_20[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' NAME")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -24876,9 +25889,9 @@ _loop0_19_rule(Parser *p) return _seq; } -// _gather_18: NAME _loop0_19 +// _gather_19: NAME _loop0_20 static asdl_seq * -_gather_18_rule(Parser *p) +_gather_19_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24890,27 +25903,27 @@ _gather_18_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // NAME _loop0_19 + { // NAME _loop0_20 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_18[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_19")); + D(fprintf(stderr, "%*c> _gather_19[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_20")); expr_ty elem; asdl_seq * seq; if ( (elem = _PyPegen_name_token(p)) // NAME && - (seq = _loop0_19_rule(p)) // _loop0_19 + (seq = _loop0_20_rule(p)) // _loop0_20 ) { - D(fprintf(stderr, "%*c+ _gather_18[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_19")); + D(fprintf(stderr, "%*c+ _gather_19[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_20")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_18[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_19")); + D(fprintf(stderr, "%*c%s _gather_19[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_20")); } _res = NULL; done: @@ -24918,9 +25931,9 @@ _gather_18_rule(Parser *p) return _res; } -// _loop0_21: ',' NAME +// _loop0_22: ',' NAME static asdl_seq * -_loop0_21_rule(Parser *p) +_loop0_22_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -24946,7 +25959,7 @@ _loop0_21_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_21[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME")); + D(fprintf(stderr, "%*c> _loop0_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME")); Token * _literal; expr_ty elem; while ( @@ -24978,7 +25991,7 @@ _loop0_21_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_21[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_22[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' NAME")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -24995,9 +26008,9 @@ _loop0_21_rule(Parser *p) return _seq; } -// _gather_20: NAME _loop0_21 +// _gather_21: NAME _loop0_22 static asdl_seq * -_gather_20_rule(Parser *p) +_gather_21_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25009,27 +26022,27 @@ _gather_20_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // NAME _loop0_21 + { // NAME _loop0_22 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_20[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_21")); + D(fprintf(stderr, "%*c> _gather_21[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_22")); expr_ty elem; asdl_seq * seq; if ( (elem = _PyPegen_name_token(p)) // NAME && - (seq = _loop0_21_rule(p)) // _loop0_21 + (seq = _loop0_22_rule(p)) // _loop0_22 ) { - D(fprintf(stderr, "%*c+ _gather_20[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_21")); + D(fprintf(stderr, "%*c+ _gather_21[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_22")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_20[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_21")); + D(fprintf(stderr, "%*c%s _gather_21[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_22")); } _res = NULL; done: @@ -25037,9 +26050,9 @@ _gather_20_rule(Parser *p) return _res; } -// _tmp_22: ';' | NEWLINE +// _tmp_23: ';' | NEWLINE static void * -_tmp_22_rule(Parser *p) +_tmp_23_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25056,18 +26069,18 @@ _tmp_22_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'")); + D(fprintf(stderr, "%*c> _tmp_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 13)) // token=';' ) { - D(fprintf(stderr, "%*c+ _tmp_22[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'")); + D(fprintf(stderr, "%*c+ _tmp_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_22[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_23[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';'")); } { // NEWLINE @@ -25075,18 +26088,18 @@ _tmp_22_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + D(fprintf(stderr, "%*c> _tmp_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); Token * newline_var; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ _tmp_22[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + D(fprintf(stderr, "%*c+ _tmp_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); _res = newline_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_22[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_23[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); } _res = NULL; @@ -25095,9 +26108,9 @@ _tmp_22_rule(Parser *p) return _res; } -// _tmp_23: ',' expression +// _tmp_24: ',' expression static void * -_tmp_23_rule(Parser *p) +_tmp_24_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25114,7 +26127,7 @@ _tmp_23_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _tmp_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty z; if ( @@ -25123,7 +26136,7 @@ _tmp_23_rule(Parser *p) (z = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c+ _tmp_24[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -25133,7 +26146,7 @@ _tmp_23_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_23[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_24[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } _res = NULL; @@ -25142,9 +26155,9 @@ _tmp_23_rule(Parser *p) return _res; } -// _loop0_24: ('.' | '...') +// _loop0_25: ('.' | '...') static asdl_seq * -_loop0_24_rule(Parser *p) +_loop0_25_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25170,13 +26183,13 @@ _loop0_24_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_225_var; + D(fprintf(stderr, "%*c> _loop0_25[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); + void *_tmp_245_var; while ( - (_tmp_225_var = _tmp_225_rule(p)) // '.' | '...' + (_tmp_245_var = _tmp_245_rule(p)) // '.' | '...' ) { - _res = _tmp_225_var; + _res = _tmp_245_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -25193,7 +26206,7 @@ _loop0_24_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_24[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_25[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -25210,9 +26223,9 @@ _loop0_24_rule(Parser *p) return _seq; } -// _loop1_25: ('.' | '...') +// _loop1_26: ('.' | '...') static asdl_seq * -_loop1_25_rule(Parser *p) +_loop1_26_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25238,13 +26251,13 @@ _loop1_25_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_25[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_226_var; + D(fprintf(stderr, "%*c> _loop1_26[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); + void *_tmp_246_var; while ( - (_tmp_226_var = _tmp_226_rule(p)) // '.' | '...' + (_tmp_246_var = _tmp_246_rule(p)) // '.' | '...' ) { - _res = _tmp_226_var; + _res = _tmp_246_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -25261,7 +26274,7 @@ _loop1_25_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_25[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_26[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')")); } if (_n == 0 || p->error_indicator) { @@ -25283,9 +26296,9 @@ _loop1_25_rule(Parser *p) return _seq; } -// _loop0_27: ',' import_from_as_name +// _loop0_28: ',' import_from_as_name static asdl_seq * -_loop0_27_rule(Parser *p) +_loop0_28_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25311,7 +26324,7 @@ _loop0_27_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_27[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' import_from_as_name")); + D(fprintf(stderr, "%*c> _loop0_28[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' import_from_as_name")); Token * _literal; alias_ty elem; while ( @@ -25343,7 +26356,7 @@ _loop0_27_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_27[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_28[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' import_from_as_name")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -25360,9 +26373,9 @@ _loop0_27_rule(Parser *p) return _seq; } -// _gather_26: import_from_as_name _loop0_27 +// _gather_27: import_from_as_name _loop0_28 static asdl_seq * -_gather_26_rule(Parser *p) +_gather_27_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25374,27 +26387,27 @@ _gather_26_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // import_from_as_name _loop0_27 + { // import_from_as_name _loop0_28 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_26[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_27")); + D(fprintf(stderr, "%*c> _gather_27[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_28")); alias_ty elem; asdl_seq * seq; if ( (elem = import_from_as_name_rule(p)) // import_from_as_name && - (seq = _loop0_27_rule(p)) // _loop0_27 + (seq = _loop0_28_rule(p)) // _loop0_28 ) { - D(fprintf(stderr, "%*c+ _gather_26[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_27")); + D(fprintf(stderr, "%*c+ _gather_27[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_28")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_26[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_name _loop0_27")); + D(fprintf(stderr, "%*c%s _gather_27[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_name _loop0_28")); } _res = NULL; done: @@ -25402,9 +26415,9 @@ _gather_26_rule(Parser *p) return _res; } -// _tmp_28: 'as' NAME +// _tmp_29: 'as' NAME static void * -_tmp_28_rule(Parser *p) +_tmp_29_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25421,16 +26434,16 @@ _tmp_28_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_28[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_29[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_28[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_29[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -25440,7 +26453,7 @@ _tmp_28_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_28[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_29[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -25449,9 +26462,9 @@ _tmp_28_rule(Parser *p) return _res; } -// _loop0_30: ',' dotted_as_name +// _loop0_31: ',' dotted_as_name static asdl_seq * -_loop0_30_rule(Parser *p) +_loop0_31_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25477,7 +26490,7 @@ _loop0_30_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_as_name")); + D(fprintf(stderr, "%*c> _loop0_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_as_name")); Token * _literal; alias_ty elem; while ( @@ -25509,7 +26522,7 @@ _loop0_30_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_30[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_31[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' dotted_as_name")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -25526,9 +26539,9 @@ _loop0_30_rule(Parser *p) return _seq; } -// _gather_29: dotted_as_name _loop0_30 +// _gather_30: dotted_as_name _loop0_31 static asdl_seq * -_gather_29_rule(Parser *p) +_gather_30_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25540,27 +26553,27 @@ _gather_29_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // dotted_as_name _loop0_30 + { // dotted_as_name _loop0_31 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_29[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_30")); + D(fprintf(stderr, "%*c> _gather_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_31")); alias_ty elem; asdl_seq * seq; if ( (elem = dotted_as_name_rule(p)) // dotted_as_name && - (seq = _loop0_30_rule(p)) // _loop0_30 + (seq = _loop0_31_rule(p)) // _loop0_31 ) { - D(fprintf(stderr, "%*c+ _gather_29[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_30")); + D(fprintf(stderr, "%*c+ _gather_30[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_31")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_29[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_as_name _loop0_30")); + D(fprintf(stderr, "%*c%s _gather_30[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_as_name _loop0_31")); } _res = NULL; done: @@ -25568,9 +26581,9 @@ _gather_29_rule(Parser *p) return _res; } -// _tmp_31: 'as' NAME +// _tmp_32: 'as' NAME static void * -_tmp_31_rule(Parser *p) +_tmp_32_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25587,16 +26600,16 @@ _tmp_31_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_31[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_32[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -25606,7 +26619,7 @@ _tmp_31_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_31[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_32[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -25615,9 +26628,9 @@ _tmp_31_rule(Parser *p) return _res; } -// _loop1_32: ('@' named_expression NEWLINE) +// _loop1_33: ('@' named_expression NEWLINE) static asdl_seq * -_loop1_32_rule(Parser *p) +_loop1_33_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25643,13 +26656,13 @@ _loop1_32_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)")); - void *_tmp_227_var; + D(fprintf(stderr, "%*c> _loop1_33[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)")); + void *_tmp_247_var; while ( - (_tmp_227_var = _tmp_227_rule(p)) // '@' named_expression NEWLINE + (_tmp_247_var = _tmp_247_rule(p)) // '@' named_expression NEWLINE ) { - _res = _tmp_227_var; + _res = _tmp_247_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -25666,7 +26679,7 @@ _loop1_32_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_32[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_33[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('@' named_expression NEWLINE)")); } if (_n == 0 || p->error_indicator) { @@ -25688,9 +26701,9 @@ _loop1_32_rule(Parser *p) return _seq; } -// _tmp_33: '(' arguments? ')' +// _tmp_34: '(' arguments? ')' static void * -_tmp_33_rule(Parser *p) +_tmp_34_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25707,7 +26720,7 @@ _tmp_33_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_33[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c> _tmp_34[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); Token * _literal; Token * _literal_1; void *z; @@ -25719,7 +26732,7 @@ _tmp_33_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_33[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c+ _tmp_34[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -25729,7 +26742,7 @@ _tmp_33_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_33[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_34[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'")); } _res = NULL; @@ -25738,9 +26751,9 @@ _tmp_33_rule(Parser *p) return _res; } -// _tmp_34: '->' expression +// _tmp_35: '->' expression static void * -_tmp_34_rule(Parser *p) +_tmp_35_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25757,7 +26770,7 @@ _tmp_34_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_34[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c> _tmp_35[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); Token * _literal; expr_ty z; if ( @@ -25766,7 +26779,7 @@ _tmp_34_rule(Parser *p) (z = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_34[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c+ _tmp_35[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -25776,7 +26789,7 @@ _tmp_34_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_34[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_35[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression")); } _res = NULL; @@ -25785,9 +26798,9 @@ _tmp_34_rule(Parser *p) return _res; } -// _tmp_35: '->' expression +// _tmp_36: '->' expression static void * -_tmp_35_rule(Parser *p) +_tmp_36_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25804,7 +26817,7 @@ _tmp_35_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_35[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c> _tmp_36[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); Token * _literal; expr_ty z; if ( @@ -25813,7 +26826,7 @@ _tmp_35_rule(Parser *p) (z = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_35[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c+ _tmp_36[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -25823,7 +26836,7 @@ _tmp_35_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_35[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_36[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression")); } _res = NULL; @@ -25832,9 +26845,9 @@ _tmp_35_rule(Parser *p) return _res; } -// _loop0_36: param_no_default +// _loop0_37: param_no_default static asdl_seq * -_loop0_36_rule(Parser *p) +_loop0_37_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25860,7 +26873,7 @@ _loop0_36_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_36[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_37[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -25883,7 +26896,7 @@ _loop0_36_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_36[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_37[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -25900,9 +26913,9 @@ _loop0_36_rule(Parser *p) return _seq; } -// _loop0_37: param_with_default +// _loop0_38: param_with_default static asdl_seq * -_loop0_37_rule(Parser *p) +_loop0_38_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25928,7 +26941,7 @@ _loop0_37_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_37[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop0_38[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -25951,7 +26964,7 @@ _loop0_37_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_37[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_38[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -25968,9 +26981,9 @@ _loop0_37_rule(Parser *p) return _seq; } -// _loop0_38: param_with_default +// _loop0_39: param_with_default static asdl_seq * -_loop0_38_rule(Parser *p) +_loop0_39_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -25996,7 +27009,7 @@ _loop0_38_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_38[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop0_39[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -26019,7 +27032,7 @@ _loop0_38_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_38[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_39[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -26036,9 +27049,9 @@ _loop0_38_rule(Parser *p) return _seq; } -// _loop1_39: param_no_default +// _loop1_40: param_no_default static asdl_seq * -_loop1_39_rule(Parser *p) +_loop1_40_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -26064,7 +27077,7 @@ _loop1_39_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_39[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop1_40[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -26087,7 +27100,7 @@ _loop1_39_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_39[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_40[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } if (_n == 0 || p->error_indicator) { @@ -26109,9 +27122,9 @@ _loop1_39_rule(Parser *p) return _seq; } -// _loop0_40: param_with_default +// _loop0_41: param_with_default static asdl_seq * -_loop0_40_rule(Parser *p) +_loop0_41_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -26137,7 +27150,7 @@ _loop0_40_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_40[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop0_41[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -26160,7 +27173,7 @@ _loop0_40_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_40[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_41[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -26177,9 +27190,9 @@ _loop0_40_rule(Parser *p) return _seq; } -// _loop1_41: param_with_default +// _loop1_42: param_with_default static asdl_seq * -_loop1_41_rule(Parser *p) +_loop1_42_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -26205,7 +27218,7 @@ _loop1_41_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_41[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop1_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -26228,7 +27241,7 @@ _loop1_41_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_41[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_42[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -26250,9 +27263,9 @@ _loop1_41_rule(Parser *p) return _seq; } -// _loop1_42: param_no_default +// _loop1_43: param_no_default static asdl_seq * -_loop1_42_rule(Parser *p) +_loop1_43_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -26278,7 +27291,7 @@ _loop1_42_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop1_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -26301,7 +27314,7 @@ _loop1_42_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_42[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_43[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } if (_n == 0 || p->error_indicator) { @@ -26323,9 +27336,9 @@ _loop1_42_rule(Parser *p) return _seq; } -// _loop1_43: param_no_default +// _loop1_44: param_no_default static asdl_seq * -_loop1_43_rule(Parser *p) +_loop1_44_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -26351,7 +27364,7 @@ _loop1_43_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop1_44[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -26374,7 +27387,7 @@ _loop1_43_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_43[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_44[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } if (_n == 0 || p->error_indicator) { @@ -26396,9 +27409,9 @@ _loop1_43_rule(Parser *p) return _seq; } -// _loop0_44: param_no_default +// _loop0_45: param_no_default static asdl_seq * -_loop0_44_rule(Parser *p) +_loop0_45_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -26424,7 +27437,7 @@ _loop0_44_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_44[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_45[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -26447,7 +27460,7 @@ _loop0_44_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_44[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_45[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -26464,9 +27477,9 @@ _loop0_44_rule(Parser *p) return _seq; } -// _loop1_45: param_with_default +// _loop1_46: param_with_default static asdl_seq * -_loop1_45_rule(Parser *p) +_loop1_46_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -26492,7 +27505,7 @@ _loop1_45_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_45[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop1_46[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -26515,7 +27528,7 @@ _loop1_45_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_45[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_46[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -26537,9 +27550,9 @@ _loop1_45_rule(Parser *p) return _seq; } -// _loop0_46: param_no_default +// _loop0_47: param_no_default static asdl_seq * -_loop0_46_rule(Parser *p) +_loop0_47_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -26565,7 +27578,7 @@ _loop0_46_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_46[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_47[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -26588,7 +27601,7 @@ _loop0_46_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_46[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_47[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -26605,9 +27618,9 @@ _loop0_46_rule(Parser *p) return _seq; } -// _loop1_47: param_with_default +// _loop1_48: param_with_default static asdl_seq * -_loop1_47_rule(Parser *p) +_loop1_48_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -26633,7 +27646,7 @@ _loop1_47_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_47[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop1_48[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -26656,7 +27669,7 @@ _loop1_47_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_47[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_48[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -26678,9 +27691,9 @@ _loop1_47_rule(Parser *p) return _seq; } -// _loop0_48: param_maybe_default +// _loop0_49: param_maybe_default static asdl_seq * -_loop0_48_rule(Parser *p) +_loop0_49_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -26706,7 +27719,7 @@ _loop0_48_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_48[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_49[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -26729,7 +27742,7 @@ _loop0_48_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_48[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_49[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -26746,9 +27759,9 @@ _loop0_48_rule(Parser *p) return _seq; } -// _loop0_49: param_maybe_default +// _loop0_50: param_maybe_default static asdl_seq * -_loop0_49_rule(Parser *p) +_loop0_50_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -26774,7 +27787,7 @@ _loop0_49_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_49[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_50[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -26797,7 +27810,7 @@ _loop0_49_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_49[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_50[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -26814,9 +27827,9 @@ _loop0_49_rule(Parser *p) return _seq; } -// _loop1_50: param_maybe_default +// _loop1_51: param_maybe_default static asdl_seq * -_loop1_50_rule(Parser *p) +_loop1_51_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -26842,7 +27855,7 @@ _loop1_50_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_50[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop1_51[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -26865,7 +27878,7 @@ _loop1_50_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_50[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_51[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } if (_n == 0 || p->error_indicator) { @@ -26887,9 +27900,9 @@ _loop1_50_rule(Parser *p) return _seq; } -// _loop0_52: ',' with_item +// _loop0_53: ',' with_item static asdl_seq * -_loop0_52_rule(Parser *p) +_loop0_53_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -26915,7 +27928,7 @@ _loop0_52_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_52[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); + D(fprintf(stderr, "%*c> _loop0_53[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); Token * _literal; withitem_ty elem; while ( @@ -26947,7 +27960,7 @@ _loop0_52_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_52[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_53[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -26964,9 +27977,9 @@ _loop0_52_rule(Parser *p) return _seq; } -// _gather_51: with_item _loop0_52 +// _gather_52: with_item _loop0_53 static asdl_seq * -_gather_51_rule(Parser *p) +_gather_52_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -26978,27 +27991,27 @@ _gather_51_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // with_item _loop0_52 + { // with_item _loop0_53 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_51[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_52")); + D(fprintf(stderr, "%*c> _gather_52[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_53")); withitem_ty elem; asdl_seq * seq; if ( (elem = with_item_rule(p)) // with_item && - (seq = _loop0_52_rule(p)) // _loop0_52 + (seq = _loop0_53_rule(p)) // _loop0_53 ) { - D(fprintf(stderr, "%*c+ _gather_51[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_52")); + D(fprintf(stderr, "%*c+ _gather_52[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_53")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_51[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_52")); + D(fprintf(stderr, "%*c%s _gather_52[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_53")); } _res = NULL; done: @@ -27006,9 +28019,9 @@ _gather_51_rule(Parser *p) return _res; } -// _loop0_54: ',' with_item +// _loop0_55: ',' with_item static asdl_seq * -_loop0_54_rule(Parser *p) +_loop0_55_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -27034,7 +28047,7 @@ _loop0_54_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_54[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); + D(fprintf(stderr, "%*c> _loop0_55[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); Token * _literal; withitem_ty elem; while ( @@ -27066,7 +28079,7 @@ _loop0_54_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_54[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_55[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -27083,9 +28096,9 @@ _loop0_54_rule(Parser *p) return _seq; } -// _gather_53: with_item _loop0_54 +// _gather_54: with_item _loop0_55 static asdl_seq * -_gather_53_rule(Parser *p) +_gather_54_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -27097,27 +28110,27 @@ _gather_53_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // with_item _loop0_54 + { // with_item _loop0_55 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_53[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_54")); + D(fprintf(stderr, "%*c> _gather_54[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_55")); withitem_ty elem; asdl_seq * seq; if ( (elem = with_item_rule(p)) // with_item && - (seq = _loop0_54_rule(p)) // _loop0_54 + (seq = _loop0_55_rule(p)) // _loop0_55 ) { - D(fprintf(stderr, "%*c+ _gather_53[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_54")); + D(fprintf(stderr, "%*c+ _gather_54[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_55")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_53[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_54")); + D(fprintf(stderr, "%*c%s _gather_54[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_55")); } _res = NULL; done: @@ -27125,9 +28138,9 @@ _gather_53_rule(Parser *p) return _res; } -// _loop0_56: ',' with_item +// _loop0_57: ',' with_item static asdl_seq * -_loop0_56_rule(Parser *p) +_loop0_57_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -27153,7 +28166,7 @@ _loop0_56_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_56[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); + D(fprintf(stderr, "%*c> _loop0_57[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); Token * _literal; withitem_ty elem; while ( @@ -27185,7 +28198,7 @@ _loop0_56_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_56[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_57[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -27202,9 +28215,9 @@ _loop0_56_rule(Parser *p) return _seq; } -// _gather_55: with_item _loop0_56 +// _gather_56: with_item _loop0_57 static asdl_seq * -_gather_55_rule(Parser *p) +_gather_56_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -27216,27 +28229,27 @@ _gather_55_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // with_item _loop0_56 + { // with_item _loop0_57 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_55[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_56")); + D(fprintf(stderr, "%*c> _gather_56[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_57")); withitem_ty elem; asdl_seq * seq; if ( (elem = with_item_rule(p)) // with_item && - (seq = _loop0_56_rule(p)) // _loop0_56 + (seq = _loop0_57_rule(p)) // _loop0_57 ) { - D(fprintf(stderr, "%*c+ _gather_55[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_56")); + D(fprintf(stderr, "%*c+ _gather_56[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_57")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_55[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_56")); + D(fprintf(stderr, "%*c%s _gather_56[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_57")); } _res = NULL; done: @@ -27244,9 +28257,9 @@ _gather_55_rule(Parser *p) return _res; } -// _loop0_58: ',' with_item +// _loop0_59: ',' with_item static asdl_seq * -_loop0_58_rule(Parser *p) +_loop0_59_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -27272,7 +28285,7 @@ _loop0_58_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_58[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); + D(fprintf(stderr, "%*c> _loop0_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); Token * _literal; withitem_ty elem; while ( @@ -27304,7 +28317,7 @@ _loop0_58_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_58[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_59[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -27321,9 +28334,9 @@ _loop0_58_rule(Parser *p) return _seq; } -// _gather_57: with_item _loop0_58 +// _gather_58: with_item _loop0_59 static asdl_seq * -_gather_57_rule(Parser *p) +_gather_58_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -27335,27 +28348,27 @@ _gather_57_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // with_item _loop0_58 + { // with_item _loop0_59 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_57[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_58")); + D(fprintf(stderr, "%*c> _gather_58[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_59")); withitem_ty elem; asdl_seq * seq; if ( (elem = with_item_rule(p)) // with_item && - (seq = _loop0_58_rule(p)) // _loop0_58 + (seq = _loop0_59_rule(p)) // _loop0_59 ) { - D(fprintf(stderr, "%*c+ _gather_57[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_58")); + D(fprintf(stderr, "%*c+ _gather_58[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_59")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_57[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_58")); + D(fprintf(stderr, "%*c%s _gather_58[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_59")); } _res = NULL; done: @@ -27363,9 +28376,9 @@ _gather_57_rule(Parser *p) return _res; } -// _tmp_59: ',' | ')' | ':' +// _tmp_60: ',' | ')' | ':' static void * -_tmp_59_rule(Parser *p) +_tmp_60_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -27382,18 +28395,18 @@ _tmp_59_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_59[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_60[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_59[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_60[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // ')' @@ -27401,18 +28414,18 @@ _tmp_59_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_59[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_60[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_59[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_60[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ':' @@ -27420,18 +28433,18 @@ _tmp_59_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_59[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_60[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_59[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_60[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } _res = NULL; @@ -27440,9 +28453,9 @@ _tmp_59_rule(Parser *p) return _res; } -// _loop1_60: except_block +// _loop1_61: except_block static asdl_seq * -_loop1_60_rule(Parser *p) +_loop1_61_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -27468,7 +28481,7 @@ _loop1_60_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block")); + D(fprintf(stderr, "%*c> _loop1_61[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block")); excepthandler_ty except_block_var; while ( (except_block_var = except_block_rule(p)) // except_block @@ -27491,7 +28504,7 @@ _loop1_60_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_60[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_61[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block")); } if (_n == 0 || p->error_indicator) { @@ -27513,9 +28526,9 @@ _loop1_60_rule(Parser *p) return _seq; } -// _loop1_61: except_star_block +// _loop1_62: except_star_block static asdl_seq * -_loop1_61_rule(Parser *p) +_loop1_62_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -27541,7 +28554,7 @@ _loop1_61_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_61[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block")); + D(fprintf(stderr, "%*c> _loop1_62[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block")); excepthandler_ty except_star_block_var; while ( (except_star_block_var = except_star_block_rule(p)) // except_star_block @@ -27564,7 +28577,7 @@ _loop1_61_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_61[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_62[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_star_block")); } if (_n == 0 || p->error_indicator) { @@ -27586,9 +28599,9 @@ _loop1_61_rule(Parser *p) return _seq; } -// _tmp_62: 'as' NAME +// _tmp_63: 'as' NAME static void * -_tmp_62_rule(Parser *p) +_tmp_63_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -27605,16 +28618,16 @@ _tmp_62_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_62[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_63[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_62[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_63[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -27624,7 +28637,7 @@ _tmp_62_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_62[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_63[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -27633,9 +28646,9 @@ _tmp_62_rule(Parser *p) return _res; } -// _tmp_63: 'as' NAME +// _tmp_64: 'as' NAME static void * -_tmp_63_rule(Parser *p) +_tmp_64_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -27652,16 +28665,16 @@ _tmp_63_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_63[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_64[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_63[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_64[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -27671,7 +28684,7 @@ _tmp_63_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_63[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_64[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -27680,9 +28693,9 @@ _tmp_63_rule(Parser *p) return _res; } -// _loop1_64: case_block +// _loop1_65: case_block static asdl_seq * -_loop1_64_rule(Parser *p) +_loop1_65_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -27708,7 +28721,7 @@ _loop1_64_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_64[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "case_block")); + D(fprintf(stderr, "%*c> _loop1_65[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "case_block")); match_case_ty case_block_var; while ( (case_block_var = case_block_rule(p)) // case_block @@ -27731,7 +28744,7 @@ _loop1_64_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_64[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_65[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "case_block")); } if (_n == 0 || p->error_indicator) { @@ -27753,9 +28766,9 @@ _loop1_64_rule(Parser *p) return _seq; } -// _loop0_66: '|' closed_pattern +// _loop0_67: '|' closed_pattern static asdl_seq * -_loop0_66_rule(Parser *p) +_loop0_67_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -27781,7 +28794,7 @@ _loop0_66_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_66[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'|' closed_pattern")); + D(fprintf(stderr, "%*c> _loop0_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'|' closed_pattern")); Token * _literal; pattern_ty elem; while ( @@ -27813,7 +28826,7 @@ _loop0_66_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_66[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_67[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'|' closed_pattern")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -27830,9 +28843,9 @@ _loop0_66_rule(Parser *p) return _seq; } -// _gather_65: closed_pattern _loop0_66 +// _gather_66: closed_pattern _loop0_67 static asdl_seq * -_gather_65_rule(Parser *p) +_gather_66_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -27844,85 +28857,27 @@ _gather_65_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // closed_pattern _loop0_66 + { // closed_pattern _loop0_67 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_65[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_66")); + D(fprintf(stderr, "%*c> _gather_66[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_67")); pattern_ty elem; asdl_seq * seq; if ( (elem = closed_pattern_rule(p)) // closed_pattern && - (seq = _loop0_66_rule(p)) // _loop0_66 + (seq = _loop0_67_rule(p)) // _loop0_67 ) { - D(fprintf(stderr, "%*c+ _gather_65[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_66")); + D(fprintf(stderr, "%*c+ _gather_66[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_67")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_65[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "closed_pattern _loop0_66")); - } - _res = NULL; - done: - p->level--; - return _res; -} - -// _tmp_67: '+' | '-' -static void * -_tmp_67_rule(Parser *p) -{ - if (p->level++ == MAXSTACK) { - p->error_indicator = 1; - PyErr_NoMemory(); - } - if (p->error_indicator) { - p->level--; - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '+' - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 14)) // token='+' - ) - { - D(fprintf(stderr, "%*c+ _tmp_67[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_67[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+'")); - } - { // '-' - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 15)) // token='-' - ) - { - D(fprintf(stderr, "%*c+ _tmp_67[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_67[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-'")); + D(fprintf(stderr, "%*c%s _gather_66[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "closed_pattern _loop0_67")); } _res = NULL; done: @@ -27988,7 +28943,7 @@ _tmp_68_rule(Parser *p) return _res; } -// _tmp_69: '.' | '(' | '=' +// _tmp_69: '+' | '-' static void * _tmp_69_rule(Parser *p) { @@ -28002,62 +28957,43 @@ _tmp_69_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // '.' - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 23)) // token='.' - ) - { - D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); - } - { // '(' + { // '+' if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+'")); Token * _literal; if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' + (_literal = _PyPegen_expect_token(p, 14)) // token='+' ) { - D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+'")); _res = _literal; goto done; } p->mark = _mark; D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+'")); } - { // '=' + { // '-' if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-'")); Token * _literal; if ( - (_literal = _PyPegen_expect_token(p, 22)) // token='=' + (_literal = _PyPegen_expect_token(p, 15)) // token='-' ) { - D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-'")); _res = _literal; goto done; } p->mark = _mark; D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-'")); } _res = NULL; done: @@ -28142,9 +29078,86 @@ _tmp_70_rule(Parser *p) return _res; } -// _loop0_72: ',' maybe_star_pattern +// _tmp_71: '.' | '(' | '=' +static void * +_tmp_71_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '.' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 23)) // token='.' + ) + { + D(fprintf(stderr, "%*c+ _tmp_71[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_71[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); + } + { // '(' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + ) + { + D(fprintf(stderr, "%*c+ _tmp_71[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_71[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('")); + } + { // '=' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + ) + { + D(fprintf(stderr, "%*c+ _tmp_71[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_71[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _loop0_73: ',' maybe_star_pattern static asdl_seq * -_loop0_72_rule(Parser *p) +_loop0_73_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -28170,7 +29183,7 @@ _loop0_72_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_72[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' maybe_star_pattern")); + D(fprintf(stderr, "%*c> _loop0_73[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' maybe_star_pattern")); Token * _literal; pattern_ty elem; while ( @@ -28202,7 +29215,7 @@ _loop0_72_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_72[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_73[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' maybe_star_pattern")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -28219,9 +29232,9 @@ _loop0_72_rule(Parser *p) return _seq; } -// _gather_71: maybe_star_pattern _loop0_72 +// _gather_72: maybe_star_pattern _loop0_73 static asdl_seq * -_gather_71_rule(Parser *p) +_gather_72_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -28233,27 +29246,27 @@ _gather_71_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // maybe_star_pattern _loop0_72 + { // maybe_star_pattern _loop0_73 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_72")); + D(fprintf(stderr, "%*c> _gather_72[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_73")); pattern_ty elem; asdl_seq * seq; if ( (elem = maybe_star_pattern_rule(p)) // maybe_star_pattern && - (seq = _loop0_72_rule(p)) // _loop0_72 + (seq = _loop0_73_rule(p)) // _loop0_73 ) { - D(fprintf(stderr, "%*c+ _gather_71[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_72")); + D(fprintf(stderr, "%*c+ _gather_72[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_73")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_71[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "maybe_star_pattern _loop0_72")); + D(fprintf(stderr, "%*c%s _gather_72[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "maybe_star_pattern _loop0_73")); } _res = NULL; done: @@ -28261,9 +29274,9 @@ _gather_71_rule(Parser *p) return _res; } -// _loop0_74: ',' key_value_pattern +// _loop0_75: ',' key_value_pattern static asdl_seq * -_loop0_74_rule(Parser *p) +_loop0_75_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -28289,7 +29302,7 @@ _loop0_74_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_74[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' key_value_pattern")); + D(fprintf(stderr, "%*c> _loop0_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' key_value_pattern")); Token * _literal; KeyPatternPair* elem; while ( @@ -28321,7 +29334,7 @@ _loop0_74_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_74[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_75[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' key_value_pattern")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -28338,9 +29351,9 @@ _loop0_74_rule(Parser *p) return _seq; } -// _gather_73: key_value_pattern _loop0_74 +// _gather_74: key_value_pattern _loop0_75 static asdl_seq * -_gather_73_rule(Parser *p) +_gather_74_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -28352,27 +29365,27 @@ _gather_73_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // key_value_pattern _loop0_74 + { // key_value_pattern _loop0_75 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_73[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_74")); + D(fprintf(stderr, "%*c> _gather_74[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_75")); KeyPatternPair* elem; asdl_seq * seq; if ( (elem = key_value_pattern_rule(p)) // key_value_pattern && - (seq = _loop0_74_rule(p)) // _loop0_74 + (seq = _loop0_75_rule(p)) // _loop0_75 ) { - D(fprintf(stderr, "%*c+ _gather_73[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_74")); + D(fprintf(stderr, "%*c+ _gather_74[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_75")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_73[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "key_value_pattern _loop0_74")); + D(fprintf(stderr, "%*c%s _gather_74[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "key_value_pattern _loop0_75")); } _res = NULL; done: @@ -28380,9 +29393,9 @@ _gather_73_rule(Parser *p) return _res; } -// _tmp_75: literal_expr | attr +// _tmp_76: literal_expr | attr static void * -_tmp_75_rule(Parser *p) +_tmp_76_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -28399,18 +29412,18 @@ _tmp_75_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "literal_expr")); + D(fprintf(stderr, "%*c> _tmp_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "literal_expr")); expr_ty literal_expr_var; if ( (literal_expr_var = literal_expr_rule(p)) // literal_expr ) { - D(fprintf(stderr, "%*c+ _tmp_75[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "literal_expr")); + D(fprintf(stderr, "%*c+ _tmp_76[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "literal_expr")); _res = literal_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_75[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_76[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "literal_expr")); } { // attr @@ -28418,18 +29431,18 @@ _tmp_75_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "attr")); + D(fprintf(stderr, "%*c> _tmp_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "attr")); expr_ty attr_var; if ( (attr_var = attr_rule(p)) // attr ) { - D(fprintf(stderr, "%*c+ _tmp_75[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "attr")); + D(fprintf(stderr, "%*c+ _tmp_76[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "attr")); _res = attr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_75[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_76[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "attr")); } _res = NULL; @@ -28438,9 +29451,9 @@ _tmp_75_rule(Parser *p) return _res; } -// _loop0_77: ',' pattern +// _loop0_78: ',' pattern static asdl_seq * -_loop0_77_rule(Parser *p) +_loop0_78_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -28466,7 +29479,7 @@ _loop0_77_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_77[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' pattern")); + D(fprintf(stderr, "%*c> _loop0_78[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' pattern")); Token * _literal; pattern_ty elem; while ( @@ -28498,7 +29511,7 @@ _loop0_77_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_77[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_78[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' pattern")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -28515,9 +29528,9 @@ _loop0_77_rule(Parser *p) return _seq; } -// _gather_76: pattern _loop0_77 +// _gather_77: pattern _loop0_78 static asdl_seq * -_gather_76_rule(Parser *p) +_gather_77_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -28529,27 +29542,27 @@ _gather_76_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // pattern _loop0_77 + { // pattern _loop0_78 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "pattern _loop0_77")); + D(fprintf(stderr, "%*c> _gather_77[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "pattern _loop0_78")); pattern_ty elem; asdl_seq * seq; if ( (elem = pattern_rule(p)) // pattern && - (seq = _loop0_77_rule(p)) // _loop0_77 + (seq = _loop0_78_rule(p)) // _loop0_78 ) { - D(fprintf(stderr, "%*c+ _gather_76[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "pattern _loop0_77")); + D(fprintf(stderr, "%*c+ _gather_77[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "pattern _loop0_78")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_76[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "pattern _loop0_77")); + D(fprintf(stderr, "%*c%s _gather_77[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "pattern _loop0_78")); } _res = NULL; done: @@ -28557,9 +29570,9 @@ _gather_76_rule(Parser *p) return _res; } -// _loop0_79: ',' keyword_pattern +// _loop0_80: ',' keyword_pattern static asdl_seq * -_loop0_79_rule(Parser *p) +_loop0_80_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -28585,7 +29598,7 @@ _loop0_79_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_79[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' keyword_pattern")); + D(fprintf(stderr, "%*c> _loop0_80[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' keyword_pattern")); Token * _literal; KeyPatternPair* elem; while ( @@ -28617,7 +29630,7 @@ _loop0_79_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_79[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_80[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' keyword_pattern")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -28634,9 +29647,9 @@ _loop0_79_rule(Parser *p) return _seq; } -// _gather_78: keyword_pattern _loop0_79 +// _gather_79: keyword_pattern _loop0_80 static asdl_seq * -_gather_78_rule(Parser *p) +_gather_79_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -28648,27 +29661,27 @@ _gather_78_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // keyword_pattern _loop0_79 + { // keyword_pattern _loop0_80 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_78[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_79")); + D(fprintf(stderr, "%*c> _gather_79[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_80")); KeyPatternPair* elem; asdl_seq * seq; if ( (elem = keyword_pattern_rule(p)) // keyword_pattern && - (seq = _loop0_79_rule(p)) // _loop0_79 + (seq = _loop0_80_rule(p)) // _loop0_80 ) { - D(fprintf(stderr, "%*c+ _gather_78[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_79")); + D(fprintf(stderr, "%*c+ _gather_79[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_80")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_78[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "keyword_pattern _loop0_79")); + D(fprintf(stderr, "%*c%s _gather_79[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "keyword_pattern _loop0_80")); } _res = NULL; done: @@ -28676,9 +29689,9 @@ _gather_78_rule(Parser *p) return _res; } -// _loop1_80: (',' expression) +// _loop1_81: (',' expression) static asdl_seq * -_loop1_80_rule(Parser *p) +_loop1_81_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -28704,13 +29717,13 @@ _loop1_80_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_80[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)")); - void *_tmp_228_var; + D(fprintf(stderr, "%*c> _loop1_81[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)")); + void *_tmp_248_var; while ( - (_tmp_228_var = _tmp_228_rule(p)) // ',' expression + (_tmp_248_var = _tmp_248_rule(p)) // ',' expression ) { - _res = _tmp_228_var; + _res = _tmp_248_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -28727,7 +29740,7 @@ _loop1_80_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_80[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_81[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' expression)")); } if (_n == 0 || p->error_indicator) { @@ -28749,9 +29762,9 @@ _loop1_80_rule(Parser *p) return _seq; } -// _loop1_81: (',' star_expression) +// _loop1_82: (',' star_expression) static asdl_seq * -_loop1_81_rule(Parser *p) +_loop1_82_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -28777,13 +29790,13 @@ _loop1_81_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_81[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)")); - void *_tmp_229_var; + D(fprintf(stderr, "%*c> _loop1_82[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)")); + void *_tmp_249_var; while ( - (_tmp_229_var = _tmp_229_rule(p)) // ',' star_expression + (_tmp_249_var = _tmp_249_rule(p)) // ',' star_expression ) { - _res = _tmp_229_var; + _res = _tmp_249_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -28800,7 +29813,7 @@ _loop1_81_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_81[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_82[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_expression)")); } if (_n == 0 || p->error_indicator) { @@ -28822,9 +29835,9 @@ _loop1_81_rule(Parser *p) return _seq; } -// _loop0_83: ',' star_named_expression +// _loop0_84: ',' star_named_expression static asdl_seq * -_loop0_83_rule(Parser *p) +_loop0_84_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -28850,7 +29863,7 @@ _loop0_83_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_83[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_named_expression")); + D(fprintf(stderr, "%*c> _loop0_84[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_named_expression")); Token * _literal; expr_ty elem; while ( @@ -28882,7 +29895,7 @@ _loop0_83_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_83[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_84[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_named_expression")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -28899,9 +29912,9 @@ _loop0_83_rule(Parser *p) return _seq; } -// _gather_82: star_named_expression _loop0_83 +// _gather_83: star_named_expression _loop0_84 static asdl_seq * -_gather_82_rule(Parser *p) +_gather_83_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -28913,27 +29926,27 @@ _gather_82_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // star_named_expression _loop0_83 + { // star_named_expression _loop0_84 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_82[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_83")); + D(fprintf(stderr, "%*c> _gather_83[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_84")); expr_ty elem; asdl_seq * seq; if ( (elem = star_named_expression_rule(p)) // star_named_expression && - (seq = _loop0_83_rule(p)) // _loop0_83 + (seq = _loop0_84_rule(p)) // _loop0_84 ) { - D(fprintf(stderr, "%*c+ _gather_82[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_83")); + D(fprintf(stderr, "%*c+ _gather_83[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_84")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_82[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression _loop0_83")); + D(fprintf(stderr, "%*c%s _gather_83[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression _loop0_84")); } _res = NULL; done: @@ -28941,9 +29954,9 @@ _gather_82_rule(Parser *p) return _res; } -// _loop1_84: ('or' conjunction) +// _loop1_85: ('or' conjunction) static asdl_seq * -_loop1_84_rule(Parser *p) +_loop1_85_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -28969,13 +29982,13 @@ _loop1_84_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_84[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)")); - void *_tmp_230_var; + D(fprintf(stderr, "%*c> _loop1_85[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)")); + void *_tmp_250_var; while ( - (_tmp_230_var = _tmp_230_rule(p)) // 'or' conjunction + (_tmp_250_var = _tmp_250_rule(p)) // 'or' conjunction ) { - _res = _tmp_230_var; + _res = _tmp_250_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -28992,7 +30005,7 @@ _loop1_84_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_84[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_85[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('or' conjunction)")); } if (_n == 0 || p->error_indicator) { @@ -29014,9 +30027,9 @@ _loop1_84_rule(Parser *p) return _seq; } -// _loop1_85: ('and' inversion) +// _loop1_86: ('and' inversion) static asdl_seq * -_loop1_85_rule(Parser *p) +_loop1_86_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -29042,13 +30055,13 @@ _loop1_85_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_85[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)")); - void *_tmp_231_var; + D(fprintf(stderr, "%*c> _loop1_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)")); + void *_tmp_251_var; while ( - (_tmp_231_var = _tmp_231_rule(p)) // 'and' inversion + (_tmp_251_var = _tmp_251_rule(p)) // 'and' inversion ) { - _res = _tmp_231_var; + _res = _tmp_251_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -29065,7 +30078,7 @@ _loop1_85_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_85[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_86[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('and' inversion)")); } if (_n == 0 || p->error_indicator) { @@ -29087,9 +30100,9 @@ _loop1_85_rule(Parser *p) return _seq; } -// _loop1_86: compare_op_bitwise_or_pair +// _loop1_87: compare_op_bitwise_or_pair static asdl_seq * -_loop1_86_rule(Parser *p) +_loop1_87_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -29115,7 +30128,7 @@ _loop1_86_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compare_op_bitwise_or_pair")); + D(fprintf(stderr, "%*c> _loop1_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compare_op_bitwise_or_pair")); CmpopExprPair* compare_op_bitwise_or_pair_var; while ( (compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) // compare_op_bitwise_or_pair @@ -29138,7 +30151,7 @@ _loop1_86_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_86[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_87[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "compare_op_bitwise_or_pair")); } if (_n == 0 || p->error_indicator) { @@ -29160,9 +30173,9 @@ _loop1_86_rule(Parser *p) return _seq; } -// _tmp_87: '!=' +// _tmp_88: '!=' static void * -_tmp_87_rule(Parser *p) +_tmp_88_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -29179,13 +30192,13 @@ _tmp_87_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!='")); + D(fprintf(stderr, "%*c> _tmp_88[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!='")); Token * tok; if ( (tok = _PyPegen_expect_token(p, 28)) // token='!=' ) { - D(fprintf(stderr, "%*c+ _tmp_87[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!='")); + D(fprintf(stderr, "%*c+ _tmp_88[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!='")); _res = _PyPegen_check_barry_as_flufl ( p , tok ) ? NULL : tok; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -29195,7 +30208,7 @@ _tmp_87_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_87[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_88[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!='")); } _res = NULL; @@ -29204,9 +30217,9 @@ _tmp_87_rule(Parser *p) return _res; } -// _loop0_89: ',' (slice | starred_expression) +// _loop0_90: ',' (slice | starred_expression) static asdl_seq * -_loop0_89_rule(Parser *p) +_loop0_90_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -29232,13 +30245,13 @@ _loop0_89_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_89[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (slice | starred_expression)")); + D(fprintf(stderr, "%*c> _loop0_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (slice | starred_expression)")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_232_rule(p)) // slice | starred_expression + (elem = _tmp_252_rule(p)) // slice | starred_expression ) { _res = elem; @@ -29264,7 +30277,7 @@ _loop0_89_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_89[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_90[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (slice | starred_expression)")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -29281,9 +30294,9 @@ _loop0_89_rule(Parser *p) return _seq; } -// _gather_88: (slice | starred_expression) _loop0_89 +// _gather_89: (slice | starred_expression) _loop0_90 static asdl_seq * -_gather_88_rule(Parser *p) +_gather_89_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -29295,27 +30308,27 @@ _gather_88_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (slice | starred_expression) _loop0_89 + { // (slice | starred_expression) _loop0_90 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_88[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_89")); + D(fprintf(stderr, "%*c> _gather_89[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_90")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_232_rule(p)) // slice | starred_expression + (elem = _tmp_252_rule(p)) // slice | starred_expression && - (seq = _loop0_89_rule(p)) // _loop0_89 + (seq = _loop0_90_rule(p)) // _loop0_90 ) { - D(fprintf(stderr, "%*c+ _gather_88[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_89")); + D(fprintf(stderr, "%*c+ _gather_89[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_90")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_88[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(slice | starred_expression) _loop0_89")); + D(fprintf(stderr, "%*c%s _gather_89[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(slice | starred_expression) _loop0_90")); } _res = NULL; done: @@ -29323,9 +30336,9 @@ _gather_88_rule(Parser *p) return _res; } -// _tmp_90: ':' expression? +// _tmp_91: ':' expression? static void * -_tmp_90_rule(Parser *p) +_tmp_91_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -29342,7 +30355,7 @@ _tmp_90_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression?")); + D(fprintf(stderr, "%*c> _tmp_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression?")); Token * _literal; void *d; if ( @@ -29351,7 +30364,7 @@ _tmp_90_rule(Parser *p) (d = expression_rule(p), !p->error_indicator) // expression? ) { - D(fprintf(stderr, "%*c+ _tmp_90[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression?")); + D(fprintf(stderr, "%*c+ _tmp_91[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression?")); _res = d; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -29361,7 +30374,7 @@ _tmp_90_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_90[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_91[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':' expression?")); } _res = NULL; @@ -29370,9 +30383,67 @@ _tmp_90_rule(Parser *p) return _res; } -// _tmp_91: tuple | group | genexp +// _tmp_92: STRING | FSTRING_START static void * -_tmp_91_rule(Parser *p) +_tmp_92_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // STRING + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING")); + expr_ty string_var; + if ( + (string_var = _PyPegen_string_token(p)) // STRING + ) + { + D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "STRING")); + _res = string_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING")); + } + { // FSTRING_START + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_START")); + Token * fstring_start_var; + if ( + (fstring_start_var = _PyPegen_expect_token(p, FSTRING_START)) // token='FSTRING_START' + ) + { + D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_START")); + _res = fstring_start_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_START")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_93: tuple | group | genexp +static void * +_tmp_93_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -29389,18 +30460,18 @@ _tmp_91_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); + D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); expr_ty tuple_var; if ( (tuple_var = tuple_rule(p)) // tuple ) { - D(fprintf(stderr, "%*c+ _tmp_91[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); + D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); _res = tuple_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_91[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple")); } { // group @@ -29408,18 +30479,18 @@ _tmp_91_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "group")); + D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "group")); expr_ty group_var; if ( (group_var = group_rule(p)) // group ) { - D(fprintf(stderr, "%*c+ _tmp_91[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "group")); + D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "group")); _res = group_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_91[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "group")); } { // genexp @@ -29427,18 +30498,18 @@ _tmp_91_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp")); + D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp")); expr_ty genexp_var; if ( (genexp_var = genexp_rule(p)) // genexp ) { - D(fprintf(stderr, "%*c+ _tmp_91[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp")); + D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp")); _res = genexp_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_91[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "genexp")); } _res = NULL; @@ -29447,9 +30518,9 @@ _tmp_91_rule(Parser *p) return _res; } -// _tmp_92: list | listcomp +// _tmp_94: list | listcomp static void * -_tmp_92_rule(Parser *p) +_tmp_94_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -29466,18 +30537,18 @@ _tmp_92_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); + D(fprintf(stderr, "%*c> _tmp_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); expr_ty list_var; if ( (list_var = list_rule(p)) // list ) { - D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); + D(fprintf(stderr, "%*c+ _tmp_94[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); _res = list_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_94[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list")); } { // listcomp @@ -29485,18 +30556,18 @@ _tmp_92_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "listcomp")); + D(fprintf(stderr, "%*c> _tmp_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "listcomp")); expr_ty listcomp_var; if ( (listcomp_var = listcomp_rule(p)) // listcomp ) { - D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "listcomp")); + D(fprintf(stderr, "%*c+ _tmp_94[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "listcomp")); _res = listcomp_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_94[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "listcomp")); } _res = NULL; @@ -29505,9 +30576,9 @@ _tmp_92_rule(Parser *p) return _res; } -// _tmp_93: dict | set | dictcomp | setcomp +// _tmp_95: dict | set | dictcomp | setcomp static void * -_tmp_93_rule(Parser *p) +_tmp_95_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -29524,18 +30595,18 @@ _tmp_93_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dict")); + D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dict")); expr_ty dict_var; if ( (dict_var = dict_rule(p)) // dict ) { - D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dict")); + D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dict")); _res = dict_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dict")); } { // set @@ -29543,18 +30614,18 @@ _tmp_93_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "set")); + D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "set")); expr_ty set_var; if ( (set_var = set_rule(p)) // set ) { - D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "set")); + D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "set")); _res = set_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "set")); } { // dictcomp @@ -29562,18 +30633,18 @@ _tmp_93_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dictcomp")); + D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dictcomp")); expr_ty dictcomp_var; if ( (dictcomp_var = dictcomp_rule(p)) // dictcomp ) { - D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dictcomp")); + D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dictcomp")); _res = dictcomp_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dictcomp")); } { // setcomp @@ -29581,18 +30652,18 @@ _tmp_93_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "setcomp")); + D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "setcomp")); expr_ty setcomp_var; if ( (setcomp_var = setcomp_rule(p)) // setcomp ) { - D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "setcomp")); + D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "setcomp")); _res = setcomp_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "setcomp")); } _res = NULL; @@ -29601,9 +30672,9 @@ _tmp_93_rule(Parser *p) return _res; } -// _tmp_94: yield_expr | named_expression +// _tmp_96: yield_expr | named_expression static void * -_tmp_94_rule(Parser *p) +_tmp_96_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -29620,18 +30691,18 @@ _tmp_94_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_94[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_94[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // named_expression @@ -29639,18 +30710,18 @@ _tmp_94_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression")); + D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression")); expr_ty named_expression_var; if ( (named_expression_var = named_expression_rule(p)) // named_expression ) { - D(fprintf(stderr, "%*c+ _tmp_94[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression")); + D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression")); _res = named_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_94[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "named_expression")); } _res = NULL; @@ -29659,143 +30730,7 @@ _tmp_94_rule(Parser *p) return _res; } -// _loop0_95: lambda_param_no_default -static asdl_seq * -_loop0_95_rule(Parser *p) -{ - if (p->level++ == MAXSTACK) { - p->error_indicator = 1; - PyErr_NoMemory(); - } - if (p->error_indicator) { - p->level--; - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - Py_ssize_t _children_capacity = 1; - Py_ssize_t _n = 0; - { // lambda_param_no_default - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); - arg_ty lambda_param_no_default_var; - while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default - ) - { - _res = lambda_param_no_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_95[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); - } - asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); - PyMem_Free(_children); - p->level--; - return _seq; -} - -// _loop0_96: lambda_param_with_default -static asdl_seq * -_loop0_96_rule(Parser *p) -{ - if (p->level++ == MAXSTACK) { - p->error_indicator = 1; - PyErr_NoMemory(); - } - if (p->error_indicator) { - p->level--; - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - Py_ssize_t _children_capacity = 1; - Py_ssize_t _n = 0; - { // lambda_param_with_default - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); - NameDefaultPair* lambda_param_with_default_var; - while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default - ) - { - _res = lambda_param_with_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_96[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); - } - asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); - PyMem_Free(_children); - p->level--; - return _seq; -} - -// _loop0_97: lambda_param_with_default +// _loop0_97: lambda_param_no_default static asdl_seq * _loop0_97_rule(Parser *p) { @@ -29818,18 +30753,18 @@ _loop0_97_rule(Parser *p) } Py_ssize_t _children_capacity = 1; Py_ssize_t _n = 0; - { // lambda_param_with_default + { // lambda_param_no_default if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); - NameDefaultPair* lambda_param_with_default_var; + D(fprintf(stderr, "%*c> _loop0_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + arg_ty lambda_param_no_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - _res = lambda_param_with_default_var; + _res = lambda_param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -29847,7 +30782,7 @@ _loop0_97_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s _loop0_97[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); if (!_seq) { @@ -29863,9 +30798,9 @@ _loop0_97_rule(Parser *p) return _seq; } -// _loop1_98: lambda_param_no_default +// _loop0_98: lambda_param_with_default static asdl_seq * -_loop1_98_rule(Parser *p) +_loop0_98_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -29886,18 +30821,18 @@ _loop1_98_rule(Parser *p) } Py_ssize_t _children_capacity = 1; Py_ssize_t _n = 0; - { // lambda_param_no_default + { // lambda_param_with_default if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); - arg_ty lambda_param_no_default_var; + D(fprintf(stderr, "%*c> _loop0_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - _res = lambda_param_no_default_var; + _res = lambda_param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -29914,13 +30849,8 @@ _loop1_98_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_98[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - p->level--; - return NULL; + D(fprintf(stderr, "%*c%s _loop0_98[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); if (!_seq) { @@ -30004,7 +30934,7 @@ _loop0_99_rule(Parser *p) return _seq; } -// _loop1_100: lambda_param_with_default +// _loop1_100: lambda_param_no_default static asdl_seq * _loop1_100_rule(Parser *p) { @@ -30027,18 +30957,18 @@ _loop1_100_rule(Parser *p) } Py_ssize_t _children_capacity = 1; Py_ssize_t _n = 0; - { // lambda_param_with_default + { // lambda_param_no_default if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_100[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); - NameDefaultPair* lambda_param_with_default_var; + D(fprintf(stderr, "%*c> _loop1_100[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + arg_ty lambda_param_no_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - _res = lambda_param_with_default_var; + _res = lambda_param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30056,7 +30986,7 @@ _loop1_100_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s _loop1_100[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); @@ -30077,9 +31007,9 @@ _loop1_100_rule(Parser *p) return _seq; } -// _loop1_101: lambda_param_no_default +// _loop0_101: lambda_param_with_default static asdl_seq * -_loop1_101_rule(Parser *p) +_loop0_101_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -30100,18 +31030,18 @@ _loop1_101_rule(Parser *p) } Py_ssize_t _children_capacity = 1; Py_ssize_t _n = 0; - { // lambda_param_no_default + { // lambda_param_with_default if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); - arg_ty lambda_param_no_default_var; + D(fprintf(stderr, "%*c> _loop0_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - _res = lambda_param_no_default_var; + _res = lambda_param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30128,13 +31058,8 @@ _loop1_101_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_101[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - p->level--; - return NULL; + D(fprintf(stderr, "%*c%s _loop0_101[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); if (!_seq) { @@ -30150,7 +31075,7 @@ _loop1_101_rule(Parser *p) return _seq; } -// _loop1_102: lambda_param_no_default +// _loop1_102: lambda_param_with_default static asdl_seq * _loop1_102_rule(Parser *p) { @@ -30173,18 +31098,18 @@ _loop1_102_rule(Parser *p) } Py_ssize_t _children_capacity = 1; Py_ssize_t _n = 0; - { // lambda_param_no_default + { // lambda_param_with_default if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_102[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); - arg_ty lambda_param_no_default_var; + D(fprintf(stderr, "%*c> _loop1_102[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - _res = lambda_param_no_default_var; + _res = lambda_param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30202,7 +31127,7 @@ _loop1_102_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s _loop1_102[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); @@ -30223,9 +31148,9 @@ _loop1_102_rule(Parser *p) return _seq; } -// _loop0_103: lambda_param_no_default +// _loop1_103: lambda_param_no_default static asdl_seq * -_loop0_103_rule(Parser *p) +_loop1_103_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -30251,7 +31176,7 @@ _loop0_103_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_103[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop1_103[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -30274,9 +31199,14 @@ _loop0_103_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_103[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_103[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + p->level--; + return NULL; + } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); if (!_seq) { PyMem_Free(_children); @@ -30291,7 +31221,7 @@ _loop0_103_rule(Parser *p) return _seq; } -// _loop1_104: lambda_param_with_default +// _loop1_104: lambda_param_no_default static asdl_seq * _loop1_104_rule(Parser *p) { @@ -30314,18 +31244,18 @@ _loop1_104_rule(Parser *p) } Py_ssize_t _children_capacity = 1; Py_ssize_t _n = 0; - { // lambda_param_with_default + { // lambda_param_no_default if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_104[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); - NameDefaultPair* lambda_param_with_default_var; + D(fprintf(stderr, "%*c> _loop1_104[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + arg_ty lambda_param_no_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - _res = lambda_param_with_default_var; + _res = lambda_param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30343,7 +31273,7 @@ _loop1_104_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s _loop1_104[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); @@ -30505,7 +31435,7 @@ _loop1_106_rule(Parser *p) return _seq; } -// _loop0_107: lambda_param_maybe_default +// _loop0_107: lambda_param_no_default static asdl_seq * _loop0_107_rule(Parser *p) { @@ -30528,18 +31458,18 @@ _loop0_107_rule(Parser *p) } Py_ssize_t _children_capacity = 1; Py_ssize_t _n = 0; - { // lambda_param_maybe_default + { // lambda_param_no_default if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_107[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); - NameDefaultPair* lambda_param_maybe_default_var; + D(fprintf(stderr, "%*c> _loop0_107[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + arg_ty lambda_param_no_default_var; while ( - (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - _res = lambda_param_maybe_default_var; + _res = lambda_param_no_default_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30557,7 +31487,7 @@ _loop0_107_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s _loop0_107[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); if (!_seq) { @@ -30573,7 +31503,7 @@ _loop0_107_rule(Parser *p) return _seq; } -// _loop1_108: lambda_param_maybe_default +// _loop1_108: lambda_param_with_default static asdl_seq * _loop1_108_rule(Parser *p) { @@ -30596,18 +31526,18 @@ _loop1_108_rule(Parser *p) } Py_ssize_t _children_capacity = 1; Py_ssize_t _n = 0; - { // lambda_param_maybe_default + { // lambda_param_with_default if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_108[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); - NameDefaultPair* lambda_param_maybe_default_var; + D(fprintf(stderr, "%*c> _loop1_108[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - _res = lambda_param_maybe_default_var; + _res = lambda_param_with_default_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30625,7 +31555,7 @@ _loop1_108_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s _loop1_108[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } if (_n == 0 || p->error_indicator) { PyMem_Free(_children); @@ -30646,9 +31576,9 @@ _loop1_108_rule(Parser *p) return _seq; } -// _loop1_109: STRING +// _loop0_109: lambda_param_maybe_default static asdl_seq * -_loop1_109_rule(Parser *p) +_loop0_109_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -30669,18 +31599,18 @@ _loop1_109_rule(Parser *p) } Py_ssize_t _children_capacity = 1; Py_ssize_t _n = 0; - { // STRING + { // lambda_param_maybe_default if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING")); - expr_ty string_var; + D(fprintf(stderr, "%*c> _loop0_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + NameDefaultPair* lambda_param_maybe_default_var; while ( - (string_var = _PyPegen_string_token(p)) // STRING + (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default ) { - _res = string_var; + _res = lambda_param_maybe_default_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30697,13 +31627,8 @@ _loop1_109_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_109[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - p->level--; - return NULL; + D(fprintf(stderr, "%*c%s _loop0_109[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); if (!_seq) { @@ -30719,59 +31644,9 @@ _loop1_109_rule(Parser *p) return _seq; } -// _tmp_110: star_named_expression ',' star_named_expressions? -static void * -_tmp_110_rule(Parser *p) -{ - if (p->level++ == MAXSTACK) { - p->error_indicator = 1; - PyErr_NoMemory(); - } - if (p->error_indicator) { - p->level--; - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // star_named_expression ',' star_named_expressions? - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?")); - Token * _literal; - expr_ty y; - void *z; - if ( - (y = star_named_expression_rule(p)) // star_named_expression - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (z = star_named_expressions_rule(p), !p->error_indicator) // star_named_expressions? - ) - { - D(fprintf(stderr, "%*c+ _tmp_110[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?")); - _res = _PyPegen_seq_insert_in_front ( p , y , z ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - p->level--; - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_110[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions?")); - } - _res = NULL; - done: - p->level--; - return _res; -} - -// _loop0_112: ',' double_starred_kvpair +// _loop1_110: lambda_param_maybe_default static asdl_seq * -_loop0_112_rule(Parser *p) +_loop1_110_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -30792,27 +31667,18 @@ _loop0_112_rule(Parser *p) } Py_ssize_t _children_capacity = 1; Py_ssize_t _n = 0; - { // ',' double_starred_kvpair + { // lambda_param_maybe_default if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_112[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair")); - Token * _literal; - KeyValuePair* elem; + D(fprintf(stderr, "%*c> _loop1_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + NameDefaultPair* lambda_param_maybe_default_var; while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair + (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default ) { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - p->level--; - return NULL; - } + _res = lambda_param_maybe_default_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30829,8 +31695,13 @@ _loop0_112_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_112[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair")); + D(fprintf(stderr, "%*c%s _loop1_110[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + p->level--; + return NULL; } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); if (!_seq) { @@ -30846,9 +31717,9 @@ _loop0_112_rule(Parser *p) return _seq; } -// _gather_111: double_starred_kvpair _loop0_112 -static asdl_seq * -_gather_111_rule(Parser *p) +// _tmp_111: yield_expr | star_expressions +static void * +_tmp_111_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -30858,112 +31729,55 @@ _gather_111_rule(Parser *p) p->level--; return NULL; } - asdl_seq * _res = NULL; + void * _res = NULL; int _mark = p->mark; - { // double_starred_kvpair _loop0_112 + { // yield_expr if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_112")); - KeyValuePair* elem; - asdl_seq * seq; + D(fprintf(stderr, "%*c> _tmp_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + expr_ty yield_expr_var; if ( - (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair - && - (seq = _loop0_112_rule(p)) // _loop0_112 + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _gather_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_112")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); + D(fprintf(stderr, "%*c+ _tmp_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_111[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_112")); - } - _res = NULL; - done: - p->level--; - return _res; -} - -// _loop1_113: for_if_clause -static asdl_seq * -_loop1_113_rule(Parser *p) -{ - if (p->level++ == MAXSTACK) { - p->error_indicator = 1; - PyErr_NoMemory(); - } - if (p->error_indicator) { - p->level--; - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; + D(fprintf(stderr, "%*c%s _tmp_111[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } - Py_ssize_t _children_capacity = 1; - Py_ssize_t _n = 0; - { // for_if_clause + { // star_expressions if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_113[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause")); - comprehension_ty for_if_clause_var; - while ( - (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause + D(fprintf(stderr, "%*c> _tmp_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + expr_ty star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - _res = for_if_clause_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; + D(fprintf(stderr, "%*c+ _tmp_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + _res = star_expressions_var; + goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_113[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "for_if_clause")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - p->level--; - return NULL; - } - asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - p->level--; - return NULL; + D(fprintf(stderr, "%*c%s _tmp_111[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } - for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); - PyMem_Free(_children); + _res = NULL; + done: p->level--; - return _seq; + return _res; } -// _loop0_114: ('if' disjunction) +// _loop0_112: fstring_format_spec static asdl_seq * -_loop0_114_rule(Parser *p) +_loop0_112_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -30984,18 +31798,18 @@ _loop0_114_rule(Parser *p) } Py_ssize_t _children_capacity = 1; Py_ssize_t _n = 0; - { // ('if' disjunction) + { // fstring_format_spec if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_233_var; + D(fprintf(stderr, "%*c> _loop0_112[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_format_spec")); + expr_ty fstring_format_spec_var; while ( - (_tmp_233_var = _tmp_233_rule(p)) // 'if' disjunction + (fstring_format_spec_var = fstring_format_spec_rule(p)) // fstring_format_spec ) { - _res = _tmp_233_var; + _res = fstring_format_spec_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -31012,8 +31826,8 @@ _loop0_114_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_114[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)")); + D(fprintf(stderr, "%*c%s _loop0_112[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_format_spec")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); if (!_seq) { @@ -31029,9 +31843,9 @@ _loop0_114_rule(Parser *p) return _seq; } -// _loop0_115: ('if' disjunction) +// _loop1_113: (fstring | string) static asdl_seq * -_loop0_115_rule(Parser *p) +_loop1_113_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -31052,18 +31866,18 @@ _loop0_115_rule(Parser *p) } Py_ssize_t _children_capacity = 1; Py_ssize_t _n = 0; - { // ('if' disjunction) + { // (fstring | string) if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_234_var; + D(fprintf(stderr, "%*c> _loop1_113[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(fstring | string)")); + void *_tmp_253_var; while ( - (_tmp_234_var = _tmp_234_rule(p)) // 'if' disjunction + (_tmp_253_var = _tmp_253_rule(p)) // fstring | string ) { - _res = _tmp_234_var; + _res = _tmp_253_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -31080,8 +31894,13 @@ _loop0_115_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_115[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)")); + D(fprintf(stderr, "%*c%s _loop1_113[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(fstring | string)")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + p->level--; + return NULL; } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); if (!_seq) { @@ -31097,9 +31916,9 @@ _loop0_115_rule(Parser *p) return _seq; } -// _tmp_116: assignment_expression | expression !':=' +// _tmp_114: star_named_expression ',' star_named_expressions? static void * -_tmp_116_rule(Parser *p) +_tmp_114_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -31111,45 +31930,35 @@ _tmp_116_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // assignment_expression - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); - expr_ty assignment_expression_var; - if ( - (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression - ) - { - D(fprintf(stderr, "%*c+ _tmp_116[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); - _res = assignment_expression_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_116[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression")); - } - { // expression !':=' + { // star_named_expression ',' star_named_expressions? if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); - expr_ty expression_var; + D(fprintf(stderr, "%*c> _tmp_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?")); + Token * _literal; + expr_ty y; + void *z; if ( - (expression_var = expression_rule(p)) // expression + (y = star_named_expression_rule(p)) // star_named_expression && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (z = star_named_expressions_rule(p), !p->error_indicator) // star_named_expressions? ) { - D(fprintf(stderr, "%*c+ _tmp_116[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); - _res = expression_var; + D(fprintf(stderr, "%*c+ _tmp_114[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?")); + _res = _PyPegen_seq_insert_in_front ( p , y , z ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_116[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c%s _tmp_114[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions?")); } _res = NULL; done: @@ -31157,9 +31966,9 @@ _tmp_116_rule(Parser *p) return _res; } -// _loop0_118: ',' (starred_expression | (assignment_expression | expression !':=') !'=') +// _loop0_116: ',' double_starred_kvpair static asdl_seq * -_loop0_118_rule(Parser *p) +_loop0_116_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -31180,18 +31989,18 @@ _loop0_118_rule(Parser *p) } Py_ssize_t _children_capacity = 1; Py_ssize_t _n = 0; - { // ',' (starred_expression | (assignment_expression | expression !':=') !'=') + { // ',' double_starred_kvpair if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')")); + D(fprintf(stderr, "%*c> _loop0_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair")); Token * _literal; - void *elem; + KeyValuePair* elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_235_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair ) { _res = elem; @@ -31217,8 +32026,8 @@ _loop0_118_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_118[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')")); + D(fprintf(stderr, "%*c%s _loop0_116[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); if (!_seq) { @@ -31234,10 +32043,9 @@ _loop0_118_rule(Parser *p) return _seq; } -// _gather_117: -// | (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_118 +// _gather_115: double_starred_kvpair _loop0_116 static asdl_seq * -_gather_117_rule(Parser *p) +_gather_115_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -31249,27 +32057,27 @@ _gather_117_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_118 + { // double_starred_kvpair _loop0_116 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_118")); - void *elem; + D(fprintf(stderr, "%*c> _gather_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_116")); + KeyValuePair* elem; asdl_seq * seq; if ( - (elem = _tmp_235_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair && - (seq = _loop0_118_rule(p)) // _loop0_118 + (seq = _loop0_116_rule(p)) // _loop0_116 ) { - D(fprintf(stderr, "%*c+ _gather_117[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_118")); + D(fprintf(stderr, "%*c+ _gather_115[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_116")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_117[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_118")); + D(fprintf(stderr, "%*c%s _gather_115[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_116")); } _res = NULL; done: @@ -31277,9 +32085,9 @@ _gather_117_rule(Parser *p) return _res; } -// _tmp_119: ',' kwargs -static void * -_tmp_119_rule(Parser *p) +// _loop1_117: for_if_clause +static asdl_seq * +_loop1_117_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -31289,44 +32097,70 @@ _tmp_119_rule(Parser *p) p->level--; return NULL; } - void * _res = NULL; + void *_res = NULL; int _mark = p->mark; - { // ',' kwargs + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + Py_ssize_t _children_capacity = 1; + Py_ssize_t _n = 0; + { // for_if_clause if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_119[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwargs")); - Token * _literal; - asdl_seq* k; - if ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (k = kwargs_rule(p)) // kwargs + D(fprintf(stderr, "%*c> _loop1_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause")); + comprehension_ty for_if_clause_var; + while ( + (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause ) { - D(fprintf(stderr, "%*c+ _tmp_119[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' kwargs")); - _res = k; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - p->level--; - return NULL; + _res = for_if_clause_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + _children = _new_children; } - goto done; + _children[_n++] = _res; + _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_119[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwargs")); + D(fprintf(stderr, "%*c%s _loop1_117[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "for_if_clause")); } - _res = NULL; - done: + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + p->level--; + return NULL; + } + asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); + PyMem_Free(_children); p->level--; - return _res; + return _seq; } -// _loop0_121: ',' kwarg_or_starred +// _loop0_118: ('if' disjunction) static asdl_seq * -_loop0_121_rule(Parser *p) +_loop0_118_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -31347,27 +32181,18 @@ _loop0_121_rule(Parser *p) } Py_ssize_t _children_capacity = 1; Py_ssize_t _n = 0; - { // ',' kwarg_or_starred + { // ('if' disjunction) if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); - Token * _literal; - KeywordOrStarred* elem; + D(fprintf(stderr, "%*c> _loop0_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); + void *_tmp_254_var; while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred + (_tmp_254_var = _tmp_254_rule(p)) // 'if' disjunction ) { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - p->level--; - return NULL; - } + _res = _tmp_254_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -31384,8 +32209,8 @@ _loop0_121_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_121[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred")); + D(fprintf(stderr, "%*c%s _loop0_118[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); if (!_seq) { @@ -31401,9 +32226,9 @@ _loop0_121_rule(Parser *p) return _seq; } -// _gather_120: kwarg_or_starred _loop0_121 +// _loop0_119: ('if' disjunction) static asdl_seq * -_gather_120_rule(Parser *p) +_loop0_119_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -31413,29 +32238,115 @@ _gather_120_rule(Parser *p) p->level--; return NULL; } - asdl_seq * _res = NULL; + void *_res = NULL; int _mark = p->mark; - { // kwarg_or_starred _loop0_121 + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + Py_ssize_t _children_capacity = 1; + Py_ssize_t _n = 0; + { // ('if' disjunction) if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_121")); - KeywordOrStarred* elem; - asdl_seq * seq; + D(fprintf(stderr, "%*c> _loop0_119[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); + void *_tmp_255_var; + while ( + (_tmp_255_var = _tmp_255_rule(p)) // 'if' disjunction + ) + { + _res = _tmp_255_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_119[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)")); + } + asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); + PyMem_Free(_children); + p->level--; + return _seq; +} + +// _tmp_120: assignment_expression | expression !':=' +static void * +_tmp_120_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // assignment_expression + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + expr_ty assignment_expression_var; if ( - (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred + (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression + ) + { + D(fprintf(stderr, "%*c+ _tmp_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + _res = assignment_expression_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_120[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression")); + } + { // expression !':=' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); + expr_ty expression_var; + if ( + (expression_var = expression_rule(p)) // expression && - (seq = _loop0_121_rule(p)) // _loop0_121 + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' ) { - D(fprintf(stderr, "%*c+ _gather_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_121")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); + D(fprintf(stderr, "%*c+ _tmp_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); + _res = expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_120[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_121")); + D(fprintf(stderr, "%*c%s _tmp_120[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='")); } _res = NULL; done: @@ -31443,9 +32354,9 @@ _gather_120_rule(Parser *p) return _res; } -// _loop0_123: ',' kwarg_or_double_starred +// _loop0_122: ',' (starred_expression | (assignment_expression | expression !':=') !'=') static asdl_seq * -_loop0_123_rule(Parser *p) +_loop0_122_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -31466,18 +32377,18 @@ _loop0_123_rule(Parser *p) } Py_ssize_t _children_capacity = 1; Py_ssize_t _n = 0; - { // ',' kwarg_or_double_starred + { // ',' (starred_expression | (assignment_expression | expression !':=') !'=') if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); + D(fprintf(stderr, "%*c> _loop0_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')")); Token * _literal; - KeywordOrStarred* elem; + void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred + (elem = _tmp_256_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' ) { _res = elem; @@ -31503,8 +32414,8 @@ _loop0_123_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_123[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred")); + D(fprintf(stderr, "%*c%s _loop0_122[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); if (!_seq) { @@ -31520,9 +32431,10 @@ _loop0_123_rule(Parser *p) return _seq; } -// _gather_122: kwarg_or_double_starred _loop0_123 +// _gather_121: +// | (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_122 static asdl_seq * -_gather_122_rule(Parser *p) +_gather_121_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -31534,27 +32446,74 @@ _gather_122_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // kwarg_or_double_starred _loop0_123 + { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_122 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_123")); - KeywordOrStarred* elem; + D(fprintf(stderr, "%*c> _gather_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_122")); + void *elem; asdl_seq * seq; if ( - (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred + (elem = _tmp_256_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' && - (seq = _loop0_123_rule(p)) // _loop0_123 + (seq = _loop0_122_rule(p)) // _loop0_122 ) { - D(fprintf(stderr, "%*c+ _gather_122[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_123")); + D(fprintf(stderr, "%*c+ _gather_121[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_122")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_122[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_123")); + D(fprintf(stderr, "%*c%s _gather_121[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_122")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_123: ',' kwargs +static void * +_tmp_123_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ',' kwargs + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwargs")); + Token * _literal; + asdl_seq* k; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (k = kwargs_rule(p)) // kwargs + ) + { + D(fprintf(stderr, "%*c+ _tmp_123[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' kwargs")); + _res = k; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_123[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwargs")); } _res = NULL; done: @@ -31800,9 +32759,247 @@ _gather_126_rule(Parser *p) return _res; } -// _loop0_128: (',' star_target) +// _loop0_129: ',' kwarg_or_starred static asdl_seq * -_loop0_128_rule(Parser *p) +_loop0_129_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + Py_ssize_t _children_capacity = 1; + Py_ssize_t _n = 0; + { // ',' kwarg_or_starred + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); + Token * _literal; + KeywordOrStarred* elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + p->level--; + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_129[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred")); + } + asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); + PyMem_Free(_children); + p->level--; + return _seq; +} + +// _gather_128: kwarg_or_starred _loop0_129 +static asdl_seq * +_gather_128_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // kwarg_or_starred _loop0_129 + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _gather_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_129")); + KeywordOrStarred* elem; + asdl_seq * seq; + if ( + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred + && + (seq = _loop0_129_rule(p)) // _loop0_129 + ) + { + D(fprintf(stderr, "%*c+ _gather_128[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_129")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_128[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_129")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _loop0_131: ',' kwarg_or_double_starred +static asdl_seq * +_loop0_131_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + Py_ssize_t _children_capacity = 1; + Py_ssize_t _n = 0; + { // ',' kwarg_or_double_starred + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); + Token * _literal; + KeywordOrStarred* elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + p->level--; + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_131[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred")); + } + asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); + PyMem_Free(_children); + p->level--; + return _seq; +} + +// _gather_130: kwarg_or_double_starred _loop0_131 +static asdl_seq * +_gather_130_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // kwarg_or_double_starred _loop0_131 + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _gather_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_131")); + KeywordOrStarred* elem; + asdl_seq * seq; + if ( + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred + && + (seq = _loop0_131_rule(p)) // _loop0_131 + ) + { + D(fprintf(stderr, "%*c+ _gather_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_131")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_130[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_131")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _loop0_132: (',' star_target) +static asdl_seq * +_loop0_132_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -31828,13 +33025,13 @@ _loop0_128_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); - void *_tmp_236_var; + D(fprintf(stderr, "%*c> _loop0_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); + void *_tmp_257_var; while ( - (_tmp_236_var = _tmp_236_rule(p)) // ',' star_target + (_tmp_257_var = _tmp_257_rule(p)) // ',' star_target ) { - _res = _tmp_236_var; + _res = _tmp_257_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -31851,7 +33048,7 @@ _loop0_128_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_128[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_132[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_target)")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -31868,9 +33065,9 @@ _loop0_128_rule(Parser *p) return _seq; } -// _loop0_130: ',' star_target +// _loop0_134: ',' star_target static asdl_seq * -_loop0_130_rule(Parser *p) +_loop0_134_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -31896,7 +33093,7 @@ _loop0_130_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _loop0_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty elem; while ( @@ -31928,7 +33125,7 @@ _loop0_130_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_130[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_134[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -31945,9 +33142,9 @@ _loop0_130_rule(Parser *p) return _seq; } -// _gather_129: star_target _loop0_130 +// _gather_133: star_target _loop0_134 static asdl_seq * -_gather_129_rule(Parser *p) +_gather_133_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -31959,27 +33156,27 @@ _gather_129_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // star_target _loop0_130 + { // star_target _loop0_134 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target _loop0_130")); + D(fprintf(stderr, "%*c> _gather_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target _loop0_134")); expr_ty elem; asdl_seq * seq; if ( (elem = star_target_rule(p)) // star_target && - (seq = _loop0_130_rule(p)) // _loop0_130 + (seq = _loop0_134_rule(p)) // _loop0_134 ) { - D(fprintf(stderr, "%*c+ _gather_129[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target _loop0_130")); + D(fprintf(stderr, "%*c+ _gather_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target _loop0_134")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_129[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target _loop0_130")); + D(fprintf(stderr, "%*c%s _gather_133[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target _loop0_134")); } _res = NULL; done: @@ -31987,9 +33184,9 @@ _gather_129_rule(Parser *p) return _res; } -// _loop1_131: (',' star_target) +// _loop1_135: (',' star_target) static asdl_seq * -_loop1_131_rule(Parser *p) +_loop1_135_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32015,13 +33212,13 @@ _loop1_131_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); - void *_tmp_237_var; + D(fprintf(stderr, "%*c> _loop1_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); + void *_tmp_258_var; while ( - (_tmp_237_var = _tmp_237_rule(p)) // ',' star_target + (_tmp_258_var = _tmp_258_rule(p)) // ',' star_target ) { - _res = _tmp_237_var; + _res = _tmp_258_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -32038,7 +33235,7 @@ _loop1_131_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_131[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_135[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_target)")); } if (_n == 0 || p->error_indicator) { @@ -32060,9 +33257,9 @@ _loop1_131_rule(Parser *p) return _seq; } -// _tmp_132: !'*' star_target +// _tmp_136: !'*' star_target static void * -_tmp_132_rule(Parser *p) +_tmp_136_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32079,7 +33276,7 @@ _tmp_132_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); + D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); expr_ty star_target_var; if ( _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) // token='*' @@ -32087,12 +33284,12 @@ _tmp_132_rule(Parser *p) (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); + D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); _res = star_target_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "!'*' star_target")); } _res = NULL; @@ -32101,9 +33298,9 @@ _tmp_132_rule(Parser *p) return _res; } -// _loop0_134: ',' del_target +// _loop0_138: ',' del_target static asdl_seq * -_loop0_134_rule(Parser *p) +_loop0_138_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32129,7 +33326,7 @@ _loop0_134_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' del_target")); + D(fprintf(stderr, "%*c> _loop0_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' del_target")); Token * _literal; expr_ty elem; while ( @@ -32161,7 +33358,7 @@ _loop0_134_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_134[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_138[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' del_target")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -32178,9 +33375,9 @@ _loop0_134_rule(Parser *p) return _seq; } -// _gather_133: del_target _loop0_134 +// _gather_137: del_target _loop0_138 static asdl_seq * -_gather_133_rule(Parser *p) +_gather_137_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32192,27 +33389,27 @@ _gather_133_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // del_target _loop0_134 + { // del_target _loop0_138 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_target _loop0_134")); + D(fprintf(stderr, "%*c> _gather_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_target _loop0_138")); expr_ty elem; asdl_seq * seq; if ( (elem = del_target_rule(p)) // del_target && - (seq = _loop0_134_rule(p)) // _loop0_134 + (seq = _loop0_138_rule(p)) // _loop0_138 ) { - D(fprintf(stderr, "%*c+ _gather_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_target _loop0_134")); + D(fprintf(stderr, "%*c+ _gather_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_target _loop0_138")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_133[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_target _loop0_134")); + D(fprintf(stderr, "%*c%s _gather_137[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_target _loop0_138")); } _res = NULL; done: @@ -32220,9 +33417,9 @@ _gather_133_rule(Parser *p) return _res; } -// _loop0_136: ',' expression +// _loop0_140: ',' expression static asdl_seq * -_loop0_136_rule(Parser *p) +_loop0_140_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32248,7 +33445,7 @@ _loop0_136_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _loop0_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty elem; while ( @@ -32280,7 +33477,7 @@ _loop0_136_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_136[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_140[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -32297,9 +33494,9 @@ _loop0_136_rule(Parser *p) return _seq; } -// _gather_135: expression _loop0_136 +// _gather_139: expression _loop0_140 static asdl_seq * -_gather_135_rule(Parser *p) +_gather_139_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32311,27 +33508,27 @@ _gather_135_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // expression _loop0_136 + { // expression _loop0_140 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_136")); + D(fprintf(stderr, "%*c> _gather_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_140")); expr_ty elem; asdl_seq * seq; if ( (elem = expression_rule(p)) // expression && - (seq = _loop0_136_rule(p)) // _loop0_136 + (seq = _loop0_140_rule(p)) // _loop0_140 ) { - D(fprintf(stderr, "%*c+ _gather_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_136")); + D(fprintf(stderr, "%*c+ _gather_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_140")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_135[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_136")); + D(fprintf(stderr, "%*c%s _gather_139[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_140")); } _res = NULL; done: @@ -32339,9 +33536,9 @@ _gather_135_rule(Parser *p) return _res; } -// _loop0_138: ',' expression +// _loop0_142: ',' expression static asdl_seq * -_loop0_138_rule(Parser *p) +_loop0_142_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32367,7 +33564,7 @@ _loop0_138_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _loop0_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty elem; while ( @@ -32399,7 +33596,7 @@ _loop0_138_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_138[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_142[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -32416,9 +33613,9 @@ _loop0_138_rule(Parser *p) return _seq; } -// _gather_137: expression _loop0_138 +// _gather_141: expression _loop0_142 static asdl_seq * -_gather_137_rule(Parser *p) +_gather_141_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32430,27 +33627,27 @@ _gather_137_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // expression _loop0_138 + { // expression _loop0_142 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_138")); + D(fprintf(stderr, "%*c> _gather_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_142")); expr_ty elem; asdl_seq * seq; if ( (elem = expression_rule(p)) // expression && - (seq = _loop0_138_rule(p)) // _loop0_138 + (seq = _loop0_142_rule(p)) // _loop0_142 ) { - D(fprintf(stderr, "%*c+ _gather_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_138")); + D(fprintf(stderr, "%*c+ _gather_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_142")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_137[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_138")); + D(fprintf(stderr, "%*c%s _gather_141[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_142")); } _res = NULL; done: @@ -32458,9 +33655,9 @@ _gather_137_rule(Parser *p) return _res; } -// _loop0_140: ',' expression +// _loop0_144: ',' expression static asdl_seq * -_loop0_140_rule(Parser *p) +_loop0_144_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32486,7 +33683,7 @@ _loop0_140_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _loop0_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty elem; while ( @@ -32518,7 +33715,7 @@ _loop0_140_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_140[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_144[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -32535,9 +33732,9 @@ _loop0_140_rule(Parser *p) return _seq; } -// _gather_139: expression _loop0_140 +// _gather_143: expression _loop0_144 static asdl_seq * -_gather_139_rule(Parser *p) +_gather_143_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32549,27 +33746,27 @@ _gather_139_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // expression _loop0_140 + { // expression _loop0_144 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_140")); + D(fprintf(stderr, "%*c> _gather_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_144")); expr_ty elem; asdl_seq * seq; if ( (elem = expression_rule(p)) // expression && - (seq = _loop0_140_rule(p)) // _loop0_140 + (seq = _loop0_144_rule(p)) // _loop0_144 ) { - D(fprintf(stderr, "%*c+ _gather_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_140")); + D(fprintf(stderr, "%*c+ _gather_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_144")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_139[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_140")); + D(fprintf(stderr, "%*c%s _gather_143[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_144")); } _res = NULL; done: @@ -32577,9 +33774,9 @@ _gather_139_rule(Parser *p) return _res; } -// _loop0_142: ',' expression +// _loop0_146: ',' expression static asdl_seq * -_loop0_142_rule(Parser *p) +_loop0_146_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32605,7 +33802,7 @@ _loop0_142_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _loop0_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty elem; while ( @@ -32637,7 +33834,7 @@ _loop0_142_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_142[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_146[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -32654,9 +33851,9 @@ _loop0_142_rule(Parser *p) return _seq; } -// _gather_141: expression _loop0_142 +// _gather_145: expression _loop0_146 static asdl_seq * -_gather_141_rule(Parser *p) +_gather_145_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32668,27 +33865,27 @@ _gather_141_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // expression _loop0_142 + { // expression _loop0_146 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_142")); + D(fprintf(stderr, "%*c> _gather_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_146")); expr_ty elem; asdl_seq * seq; if ( (elem = expression_rule(p)) // expression && - (seq = _loop0_142_rule(p)) // _loop0_142 + (seq = _loop0_146_rule(p)) // _loop0_146 ) { - D(fprintf(stderr, "%*c+ _gather_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_142")); + D(fprintf(stderr, "%*c+ _gather_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_146")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_141[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_142")); + D(fprintf(stderr, "%*c%s _gather_145[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_146")); } _res = NULL; done: @@ -32696,9 +33893,9 @@ _gather_141_rule(Parser *p) return _res; } -// _tmp_143: NEWLINE INDENT +// _tmp_147: NEWLINE INDENT static void * -_tmp_143_rule(Parser *p) +_tmp_147_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32715,7 +33912,7 @@ _tmp_143_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); + D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); Token * indent_var; Token * newline_var; if ( @@ -32724,12 +33921,12 @@ _tmp_143_rule(Parser *p) (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' ) { - D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); + D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); _res = _PyPegen_dummy_name(p, newline_var, indent_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE INDENT")); } _res = NULL; @@ -32738,9 +33935,9 @@ _tmp_143_rule(Parser *p) return _res; } -// _tmp_144: args | expression for_if_clauses +// _tmp_148: args | expression for_if_clauses static void * -_tmp_144_rule(Parser *p) +_tmp_148_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32757,18 +33954,18 @@ _tmp_144_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args")); + D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args")); expr_ty args_var; if ( (args_var = args_rule(p)) // args ) { - D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args")); + D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args")); _res = args_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args")); } { // expression for_if_clauses @@ -32776,7 +33973,7 @@ _tmp_144_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); + D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); expr_ty expression_var; asdl_comprehension_seq* for_if_clauses_var; if ( @@ -32785,12 +33982,12 @@ _tmp_144_rule(Parser *p) (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { - D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); + D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); _res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression for_if_clauses")); } _res = NULL; @@ -32799,9 +33996,9 @@ _tmp_144_rule(Parser *p) return _res; } -// _tmp_145: args ',' +// _tmp_149: args ',' static void * -_tmp_145_rule(Parser *p) +_tmp_149_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32818,7 +34015,7 @@ _tmp_145_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ','")); + D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ','")); Token * _literal; expr_ty args_var; if ( @@ -32827,12 +34024,12 @@ _tmp_145_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ','")); + D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ','")); _res = _PyPegen_dummy_name(p, args_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ','")); } _res = NULL; @@ -32841,9 +34038,9 @@ _tmp_145_rule(Parser *p) return _res; } -// _tmp_146: ',' | ')' +// _tmp_150: ',' | ')' static void * -_tmp_146_rule(Parser *p) +_tmp_150_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32860,18 +34057,18 @@ _tmp_146_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // ')' @@ -32879,18 +34076,18 @@ _tmp_146_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } _res = NULL; @@ -32899,9 +34096,9 @@ _tmp_146_rule(Parser *p) return _res; } -// _tmp_147: 'True' | 'False' | 'None' +// _tmp_151: 'True' | 'False' | 'None' static void * -_tmp_147_rule(Parser *p) +_tmp_151_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32918,18 +34115,18 @@ _tmp_147_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 600)) // token='True' + (_keyword = _PyPegen_expect_token(p, 601)) // token='True' ) { - D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'")); } { // 'False' @@ -32937,18 +34134,18 @@ _tmp_147_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 602)) // token='False' + (_keyword = _PyPegen_expect_token(p, 603)) // token='False' ) { - D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'")); } { // 'None' @@ -32956,18 +34153,18 @@ _tmp_147_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 601)) // token='None' + (_keyword = _PyPegen_expect_token(p, 602)) // token='None' ) { - D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'")); } _res = NULL; @@ -32976,9 +34173,9 @@ _tmp_147_rule(Parser *p) return _res; } -// _tmp_148: NAME '=' +// _tmp_152: NAME '=' static void * -_tmp_148_rule(Parser *p) +_tmp_152_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -32995,7 +34192,7 @@ _tmp_148_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '='")); + D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '='")); Token * _literal; expr_ty name_var; if ( @@ -33004,12 +34201,12 @@ _tmp_148_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '='")); + D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '='")); _res = _PyPegen_dummy_name(p, name_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME '='")); } _res = NULL; @@ -33018,9 +34215,9 @@ _tmp_148_rule(Parser *p) return _res; } -// _tmp_149: NAME STRING | SOFT_KEYWORD +// _tmp_153: NAME STRING | SOFT_KEYWORD static void * -_tmp_149_rule(Parser *p) +_tmp_153_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33037,7 +34234,7 @@ _tmp_149_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME STRING")); + D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME STRING")); expr_ty name_var; expr_ty string_var; if ( @@ -33046,12 +34243,12 @@ _tmp_149_rule(Parser *p) (string_var = _PyPegen_string_token(p)) // STRING ) { - D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME STRING")); + D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME STRING")); _res = _PyPegen_dummy_name(p, name_var, string_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME STRING")); } { // SOFT_KEYWORD @@ -33059,18 +34256,18 @@ _tmp_149_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); + D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); expr_ty soft_keyword_var; if ( (soft_keyword_var = _PyPegen_soft_keyword_token(p)) // SOFT_KEYWORD ) { - D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); + D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); _res = soft_keyword_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "SOFT_KEYWORD")); } _res = NULL; @@ -33079,9 +34276,9 @@ _tmp_149_rule(Parser *p) return _res; } -// _tmp_150: 'else' | ':' +// _tmp_154: 'else' | ':' static void * -_tmp_150_rule(Parser *p) +_tmp_154_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33098,18 +34295,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'")); + D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 644)) // token='else' + (_keyword = _PyPegen_expect_token(p, 645)) // token='else' ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'")); + D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'else'")); } { // ':' @@ -33117,18 +34314,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } _res = NULL; @@ -33137,9 +34334,67 @@ _tmp_150_rule(Parser *p) return _res; } -// _tmp_151: '=' | ':=' +// _tmp_155: FSTRING_MIDDLE | fstring_replacement_field static void * -_tmp_151_rule(Parser *p) +_tmp_155_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // FSTRING_MIDDLE + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE")); + Token * fstring_middle_var; + if ( + (fstring_middle_var = _PyPegen_expect_token(p, FSTRING_MIDDLE)) // token='FSTRING_MIDDLE' + ) + { + D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE")); + _res = fstring_middle_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_MIDDLE")); + } + { // fstring_replacement_field + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field")); + expr_ty fstring_replacement_field_var; + if ( + (fstring_replacement_field_var = fstring_replacement_field_rule(p)) // fstring_replacement_field + ) + { + D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field")); + _res = fstring_replacement_field_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_replacement_field")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_156: '=' | ':=' +static void * +_tmp_156_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33156,18 +34411,18 @@ _tmp_151_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='")); } { // ':=' @@ -33175,18 +34430,18 @@ _tmp_151_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 53)) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':='")); } _res = NULL; @@ -33195,9 +34450,9 @@ _tmp_151_rule(Parser *p) return _res; } -// _tmp_152: list | tuple | genexp | 'True' | 'None' | 'False' +// _tmp_157: list | tuple | genexp | 'True' | 'None' | 'False' static void * -_tmp_152_rule(Parser *p) +_tmp_157_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33214,18 +34469,18 @@ _tmp_152_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); + D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); expr_ty list_var; if ( (list_var = list_rule(p)) // list ) { - D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); + D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); _res = list_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list")); } { // tuple @@ -33233,18 +34488,18 @@ _tmp_152_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); + D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); expr_ty tuple_var; if ( (tuple_var = tuple_rule(p)) // tuple ) { - D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); + D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); _res = tuple_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple")); } { // genexp @@ -33252,18 +34507,18 @@ _tmp_152_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp")); + D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp")); expr_ty genexp_var; if ( (genexp_var = genexp_rule(p)) // genexp ) { - D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp")); + D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp")); _res = genexp_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "genexp")); } { // 'True' @@ -33271,18 +34526,18 @@ _tmp_152_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 600)) // token='True' + (_keyword = _PyPegen_expect_token(p, 601)) // token='True' ) { - D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'")); } { // 'None' @@ -33290,18 +34545,18 @@ _tmp_152_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 601)) // token='None' + (_keyword = _PyPegen_expect_token(p, 602)) // token='None' ) { - D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'")); } { // 'False' @@ -33309,18 +34564,18 @@ _tmp_152_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 602)) // token='False' + (_keyword = _PyPegen_expect_token(p, 603)) // token='False' ) { - D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'")); } _res = NULL; @@ -33329,9 +34584,9 @@ _tmp_152_rule(Parser *p) return _res; } -// _tmp_153: '=' | ':=' +// _tmp_158: '=' | ':=' static void * -_tmp_153_rule(Parser *p) +_tmp_158_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33348,18 +34603,18 @@ _tmp_153_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='")); } { // ':=' @@ -33367,18 +34622,18 @@ _tmp_153_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 53)) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':='")); } _res = NULL; @@ -33387,9 +34642,9 @@ _tmp_153_rule(Parser *p) return _res; } -// _loop0_154: star_named_expressions +// _loop0_159: star_named_expressions static asdl_seq * -_loop0_154_rule(Parser *p) +_loop0_159_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33415,7 +34670,7 @@ _loop0_154_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions")); + D(fprintf(stderr, "%*c> _loop0_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions")); asdl_expr_seq* star_named_expressions_var; while ( (star_named_expressions_var = star_named_expressions_rule(p)) // star_named_expressions @@ -33438,7 +34693,7 @@ _loop0_154_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_154[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_159[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expressions")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33455,9 +34710,9 @@ _loop0_154_rule(Parser *p) return _seq; } -// _loop0_155: (star_targets '=') +// _loop0_160: (star_targets '=') static asdl_seq * -_loop0_155_rule(Parser *p) +_loop0_160_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33483,13 +34738,13 @@ _loop0_155_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_238_var; + D(fprintf(stderr, "%*c> _loop0_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_259_var; while ( - (_tmp_238_var = _tmp_238_rule(p)) // star_targets '=' + (_tmp_259_var = _tmp_259_rule(p)) // star_targets '=' ) { - _res = _tmp_238_var; + _res = _tmp_259_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -33506,7 +34761,7 @@ _loop0_155_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_155[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_160[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33523,9 +34778,9 @@ _loop0_155_rule(Parser *p) return _seq; } -// _loop0_156: (star_targets '=') +// _loop0_161: (star_targets '=') static asdl_seq * -_loop0_156_rule(Parser *p) +_loop0_161_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33551,13 +34806,13 @@ _loop0_156_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_239_var; + D(fprintf(stderr, "%*c> _loop0_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_260_var; while ( - (_tmp_239_var = _tmp_239_rule(p)) // star_targets '=' + (_tmp_260_var = _tmp_260_rule(p)) // star_targets '=' ) { - _res = _tmp_239_var; + _res = _tmp_260_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -33574,7 +34829,7 @@ _loop0_156_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_156[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_161[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33591,9 +34846,9 @@ _loop0_156_rule(Parser *p) return _seq; } -// _tmp_157: yield_expr | star_expressions +// _tmp_162: yield_expr | star_expressions static void * -_tmp_157_rule(Parser *p) +_tmp_162_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33610,18 +34865,18 @@ _tmp_157_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_162[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_162[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -33629,18 +34884,18 @@ _tmp_157_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_162[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_162[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -33649,9 +34904,9 @@ _tmp_157_rule(Parser *p) return _res; } -// _tmp_158: '[' | '(' | '{' +// _tmp_163: '[' | '(' | '{' static void * -_tmp_158_rule(Parser *p) +_tmp_163_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33668,18 +34923,18 @@ _tmp_158_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c> _tmp_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c+ _tmp_163[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_163[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '(' @@ -33687,18 +34942,18 @@ _tmp_158_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c> _tmp_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' ) { - D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c+ _tmp_163[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_163[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('")); } { // '{' @@ -33706,18 +34961,18 @@ _tmp_158_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c> _tmp_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c+ _tmp_163[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_163[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; @@ -33726,9 +34981,9 @@ _tmp_158_rule(Parser *p) return _res; } -// _tmp_159: '[' | '{' +// _tmp_164: '[' | '{' static void * -_tmp_159_rule(Parser *p) +_tmp_164_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33745,18 +35000,18 @@ _tmp_159_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c> _tmp_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c+ _tmp_164[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_164[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '{' @@ -33764,18 +35019,18 @@ _tmp_159_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c> _tmp_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c+ _tmp_164[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_164[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; @@ -33784,9 +35039,9 @@ _tmp_159_rule(Parser *p) return _res; } -// _tmp_160: '[' | '{' +// _tmp_165: '[' | '{' static void * -_tmp_160_rule(Parser *p) +_tmp_165_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33803,18 +35058,18 @@ _tmp_160_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c> _tmp_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c+ _tmp_165[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_165[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '{' @@ -33822,18 +35077,18 @@ _tmp_160_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c> _tmp_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c+ _tmp_165[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_165[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; @@ -33842,9 +35097,9 @@ _tmp_160_rule(Parser *p) return _res; } -// _tmp_161: slash_no_default | slash_with_default +// _tmp_166: slash_no_default | slash_with_default static void * -_tmp_161_rule(Parser *p) +_tmp_166_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33861,18 +35116,18 @@ _tmp_161_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); asdl_arg_seq* slash_no_default_var; if ( (slash_no_default_var = slash_no_default_rule(p)) // slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); _res = slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default")); } { // slash_with_default @@ -33880,18 +35135,18 @@ _tmp_161_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); SlashWithDefault* slash_with_default_var; if ( (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); _res = slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default")); } _res = NULL; @@ -33900,9 +35155,9 @@ _tmp_161_rule(Parser *p) return _res; } -// _loop0_162: param_maybe_default +// _loop0_167: param_maybe_default static asdl_seq * -_loop0_162_rule(Parser *p) +_loop0_167_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33928,7 +35183,7 @@ _loop0_162_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -33951,7 +35206,7 @@ _loop0_162_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_162[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_167[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33968,9 +35223,9 @@ _loop0_162_rule(Parser *p) return _seq; } -// _loop0_163: param_no_default +// _loop0_168: param_no_default static asdl_seq * -_loop0_163_rule(Parser *p) +_loop0_168_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -33996,7 +35251,7 @@ _loop0_163_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -34019,7 +35274,7 @@ _loop0_163_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_163[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_168[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34036,9 +35291,9 @@ _loop0_163_rule(Parser *p) return _seq; } -// _loop0_164: param_no_default +// _loop0_169: param_no_default static asdl_seq * -_loop0_164_rule(Parser *p) +_loop0_169_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34064,7 +35319,7 @@ _loop0_164_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -34087,7 +35342,7 @@ _loop0_164_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_164[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_169[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34104,9 +35359,9 @@ _loop0_164_rule(Parser *p) return _seq; } -// _loop1_165: param_no_default +// _loop1_170: param_no_default static asdl_seq * -_loop1_165_rule(Parser *p) +_loop1_170_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34132,7 +35387,7 @@ _loop1_165_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop1_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -34155,7 +35410,7 @@ _loop1_165_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_165[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_170[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } if (_n == 0 || p->error_indicator) { @@ -34177,9 +35432,9 @@ _loop1_165_rule(Parser *p) return _seq; } -// _tmp_166: slash_no_default | slash_with_default +// _tmp_171: slash_no_default | slash_with_default static void * -_tmp_166_rule(Parser *p) +_tmp_171_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34196,18 +35451,18 @@ _tmp_166_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); asdl_arg_seq* slash_no_default_var; if ( (slash_no_default_var = slash_no_default_rule(p)) // slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); _res = slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default")); } { // slash_with_default @@ -34215,18 +35470,18 @@ _tmp_166_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); SlashWithDefault* slash_with_default_var; if ( (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); _res = slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default")); } _res = NULL; @@ -34235,9 +35490,9 @@ _tmp_166_rule(Parser *p) return _res; } -// _loop0_167: param_maybe_default +// _loop0_172: param_maybe_default static asdl_seq * -_loop0_167_rule(Parser *p) +_loop0_172_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34263,7 +35518,7 @@ _loop0_167_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -34286,7 +35541,7 @@ _loop0_167_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_167[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_172[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34303,9 +35558,9 @@ _loop0_167_rule(Parser *p) return _seq; } -// _tmp_168: ',' | param_no_default +// _tmp_173: ',' | param_no_default static void * -_tmp_168_rule(Parser *p) +_tmp_173_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34322,18 +35577,18 @@ _tmp_168_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // param_no_default @@ -34341,18 +35596,18 @@ _tmp_168_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; if ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); _res = param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } _res = NULL; @@ -34361,9 +35616,9 @@ _tmp_168_rule(Parser *p) return _res; } -// _loop0_169: param_maybe_default +// _loop0_174: param_maybe_default static asdl_seq * -_loop0_169_rule(Parser *p) +_loop0_174_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34389,7 +35644,7 @@ _loop0_169_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -34412,7 +35667,7 @@ _loop0_169_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_169[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_174[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34429,9 +35684,9 @@ _loop0_169_rule(Parser *p) return _seq; } -// _loop1_170: param_maybe_default +// _loop1_175: param_maybe_default static asdl_seq * -_loop1_170_rule(Parser *p) +_loop1_175_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34457,7 +35712,7 @@ _loop1_170_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop1_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -34480,7 +35735,7 @@ _loop1_170_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_170[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_175[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } if (_n == 0 || p->error_indicator) { @@ -34502,9 +35757,9 @@ _loop1_170_rule(Parser *p) return _seq; } -// _tmp_171: ')' | ',' +// _tmp_176: ')' | ',' static void * -_tmp_171_rule(Parser *p) +_tmp_176_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34521,18 +35776,18 @@ _tmp_171_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ',' @@ -34540,18 +35795,18 @@ _tmp_171_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -34560,9 +35815,9 @@ _tmp_171_rule(Parser *p) return _res; } -// _tmp_172: ')' | ',' (')' | '**') +// _tmp_177: ')' | ',' (')' | '**') static void * -_tmp_172_rule(Parser *p) +_tmp_177_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34579,18 +35834,18 @@ _tmp_172_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_177[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_172[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_177[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_172[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_177[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ',' (')' | '**') @@ -34598,21 +35853,21 @@ _tmp_172_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + D(fprintf(stderr, "%*c> _tmp_177[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); Token * _literal; - void *_tmp_240_var; + void *_tmp_261_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_240_var = _tmp_240_rule(p)) // ')' | '**' + (_tmp_261_var = _tmp_261_rule(p)) // ')' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_172[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_240_var); + D(fprintf(stderr, "%*c+ _tmp_177[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_261_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_172[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_177[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (')' | '**')")); } _res = NULL; @@ -34621,9 +35876,9 @@ _tmp_172_rule(Parser *p) return _res; } -// _tmp_173: param_no_default | ',' +// _tmp_178: param_no_default | ',' static void * -_tmp_173_rule(Parser *p) +_tmp_178_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34640,18 +35895,18 @@ _tmp_173_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; if ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); _res = param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } { // ',' @@ -34659,18 +35914,18 @@ _tmp_173_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -34679,9 +35934,9 @@ _tmp_173_rule(Parser *p) return _res; } -// _loop0_174: param_maybe_default +// _loop0_179: param_maybe_default static asdl_seq * -_loop0_174_rule(Parser *p) +_loop0_179_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34707,7 +35962,7 @@ _loop0_174_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_179[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -34730,7 +35985,7 @@ _loop0_174_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_174[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_179[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34747,9 +36002,9 @@ _loop0_174_rule(Parser *p) return _seq; } -// _tmp_175: param_no_default | ',' +// _tmp_180: param_no_default | ',' static void * -_tmp_175_rule(Parser *p) +_tmp_180_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34766,18 +36021,18 @@ _tmp_175_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _tmp_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; if ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_175[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_180[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); _res = param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_175[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_180[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } { // ',' @@ -34785,18 +36040,18 @@ _tmp_175_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_175[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_180[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_175[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_180[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -34805,9 +36060,9 @@ _tmp_175_rule(Parser *p) return _res; } -// _tmp_176: '*' | '**' | '/' +// _tmp_181: '*' | '**' | '/' static void * -_tmp_176_rule(Parser *p) +_tmp_181_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34824,18 +36079,18 @@ _tmp_176_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c> _tmp_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' ) { - D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c+ _tmp_181[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_181[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'")); } { // '**' @@ -34843,18 +36098,18 @@ _tmp_176_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_181[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_181[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } { // '/' @@ -34862,18 +36117,18 @@ _tmp_176_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c> _tmp_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 17)) // token='/' ) { - D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c+ _tmp_181[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_181[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/'")); } _res = NULL; @@ -34882,9 +36137,9 @@ _tmp_176_rule(Parser *p) return _res; } -// _loop1_177: param_with_default +// _loop1_182: param_with_default static asdl_seq * -_loop1_177_rule(Parser *p) +_loop1_182_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34910,7 +36165,7 @@ _loop1_177_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_177[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop1_182[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -34933,7 +36188,7 @@ _loop1_177_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_177[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_182[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -34955,9 +36210,9 @@ _loop1_177_rule(Parser *p) return _seq; } -// _tmp_178: lambda_slash_no_default | lambda_slash_with_default +// _tmp_183: lambda_slash_no_default | lambda_slash_with_default static void * -_tmp_178_rule(Parser *p) +_tmp_183_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -34974,18 +36229,18 @@ _tmp_178_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); asdl_arg_seq* lambda_slash_no_default_var; if ( (lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_183[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); _res = lambda_slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_183[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default")); } { // lambda_slash_with_default @@ -34993,18 +36248,18 @@ _tmp_178_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); SlashWithDefault* lambda_slash_with_default_var; if ( (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_183[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); _res = lambda_slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_183[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default")); } _res = NULL; @@ -35013,9 +36268,9 @@ _tmp_178_rule(Parser *p) return _res; } -// _loop0_179: lambda_param_maybe_default +// _loop0_184: lambda_param_maybe_default static asdl_seq * -_loop0_179_rule(Parser *p) +_loop0_184_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35041,7 +36296,7 @@ _loop0_179_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_179[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -35064,7 +36319,7 @@ _loop0_179_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_179[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_184[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35081,9 +36336,9 @@ _loop0_179_rule(Parser *p) return _seq; } -// _loop0_180: lambda_param_no_default +// _loop0_185: lambda_param_no_default static asdl_seq * -_loop0_180_rule(Parser *p) +_loop0_185_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35109,7 +36364,7 @@ _loop0_180_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop0_185[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -35132,7 +36387,7 @@ _loop0_180_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_180[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_185[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35149,9 +36404,9 @@ _loop0_180_rule(Parser *p) return _seq; } -// _loop0_181: lambda_param_no_default +// _loop0_186: lambda_param_no_default static asdl_seq * -_loop0_181_rule(Parser *p) +_loop0_186_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35177,7 +36432,7 @@ _loop0_181_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop0_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -35200,7 +36455,7 @@ _loop0_181_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_181[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_186[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35217,9 +36472,9 @@ _loop0_181_rule(Parser *p) return _seq; } -// _loop0_183: ',' lambda_param +// _loop0_188: ',' lambda_param static asdl_seq * -_loop0_183_rule(Parser *p) +_loop0_188_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35245,7 +36500,7 @@ _loop0_183_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' lambda_param")); + D(fprintf(stderr, "%*c> _loop0_188[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' lambda_param")); Token * _literal; arg_ty elem; while ( @@ -35277,7 +36532,7 @@ _loop0_183_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_183[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_188[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' lambda_param")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35294,9 +36549,9 @@ _loop0_183_rule(Parser *p) return _seq; } -// _gather_182: lambda_param _loop0_183 +// _gather_187: lambda_param _loop0_188 static asdl_seq * -_gather_182_rule(Parser *p) +_gather_187_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35308,27 +36563,27 @@ _gather_182_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // lambda_param _loop0_183 + { // lambda_param _loop0_188 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_182[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_183")); + D(fprintf(stderr, "%*c> _gather_187[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_188")); arg_ty elem; asdl_seq * seq; if ( (elem = lambda_param_rule(p)) // lambda_param && - (seq = _loop0_183_rule(p)) // _loop0_183 + (seq = _loop0_188_rule(p)) // _loop0_188 ) { - D(fprintf(stderr, "%*c+ _gather_182[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_183")); + D(fprintf(stderr, "%*c+ _gather_187[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_188")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_182[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param _loop0_183")); + D(fprintf(stderr, "%*c%s _gather_187[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param _loop0_188")); } _res = NULL; done: @@ -35336,9 +36591,9 @@ _gather_182_rule(Parser *p) return _res; } -// _tmp_184: lambda_slash_no_default | lambda_slash_with_default +// _tmp_189: lambda_slash_no_default | lambda_slash_with_default static void * -_tmp_184_rule(Parser *p) +_tmp_189_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35355,18 +36610,18 @@ _tmp_184_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); asdl_arg_seq* lambda_slash_no_default_var; if ( (lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_189[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); _res = lambda_slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_189[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default")); } { // lambda_slash_with_default @@ -35374,18 +36629,18 @@ _tmp_184_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); SlashWithDefault* lambda_slash_with_default_var; if ( (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_189[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); _res = lambda_slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_189[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default")); } _res = NULL; @@ -35394,9 +36649,9 @@ _tmp_184_rule(Parser *p) return _res; } -// _loop0_185: lambda_param_maybe_default +// _loop0_190: lambda_param_maybe_default static asdl_seq * -_loop0_185_rule(Parser *p) +_loop0_190_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35422,7 +36677,7 @@ _loop0_185_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_185[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -35445,7 +36700,7 @@ _loop0_185_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_185[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_190[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35462,9 +36717,9 @@ _loop0_185_rule(Parser *p) return _seq; } -// _tmp_186: ',' | lambda_param_no_default +// _tmp_191: ',' | lambda_param_no_default static void * -_tmp_186_rule(Parser *p) +_tmp_191_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35481,18 +36736,18 @@ _tmp_186_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_186[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_186[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // lambda_param_no_default @@ -35500,18 +36755,18 @@ _tmp_186_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; if ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_186[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); _res = lambda_param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_186[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } _res = NULL; @@ -35520,9 +36775,9 @@ _tmp_186_rule(Parser *p) return _res; } -// _loop0_187: lambda_param_maybe_default +// _loop0_192: lambda_param_maybe_default static asdl_seq * -_loop0_187_rule(Parser *p) +_loop0_192_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35548,7 +36803,7 @@ _loop0_187_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_187[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -35571,7 +36826,7 @@ _loop0_187_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_187[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_192[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35588,9 +36843,9 @@ _loop0_187_rule(Parser *p) return _seq; } -// _loop1_188: lambda_param_maybe_default +// _loop1_193: lambda_param_maybe_default static asdl_seq * -_loop1_188_rule(Parser *p) +_loop1_193_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35616,7 +36871,7 @@ _loop1_188_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_188[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop1_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -35639,7 +36894,7 @@ _loop1_188_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_188[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_193[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } if (_n == 0 || p->error_indicator) { @@ -35661,9 +36916,9 @@ _loop1_188_rule(Parser *p) return _seq; } -// _loop1_189: lambda_param_with_default +// _loop1_194: lambda_param_with_default static asdl_seq * -_loop1_189_rule(Parser *p) +_loop1_194_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35689,7 +36944,7 @@ _loop1_189_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + D(fprintf(stderr, "%*c> _loop1_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -35712,7 +36967,7 @@ _loop1_189_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_189[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_194[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -35734,9 +36989,9 @@ _loop1_189_rule(Parser *p) return _seq; } -// _tmp_190: ':' | ',' (':' | '**') +// _tmp_195: ':' | ',' (':' | '**') static void * -_tmp_190_rule(Parser *p) +_tmp_195_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35753,18 +37008,18 @@ _tmp_190_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_190[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_190[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // ',' (':' | '**') @@ -35772,21 +37027,21 @@ _tmp_190_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); Token * _literal; - void *_tmp_241_var; + void *_tmp_262_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_241_var = _tmp_241_rule(p)) // ':' | '**' + (_tmp_262_var = _tmp_262_rule(p)) // ':' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_190[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_241_var); + D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_262_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_190[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (':' | '**')")); } _res = NULL; @@ -35795,9 +37050,9 @@ _tmp_190_rule(Parser *p) return _res; } -// _tmp_191: lambda_param_no_default | ',' +// _tmp_196: lambda_param_no_default | ',' static void * -_tmp_191_rule(Parser *p) +_tmp_196_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35814,18 +37069,18 @@ _tmp_191_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _tmp_196[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; if ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_196[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); _res = lambda_param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_196[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } { // ',' @@ -35833,18 +37088,18 @@ _tmp_191_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_196[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_196[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_196[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -35853,9 +37108,9 @@ _tmp_191_rule(Parser *p) return _res; } -// _loop0_192: lambda_param_maybe_default +// _loop0_197: lambda_param_maybe_default static asdl_seq * -_loop0_192_rule(Parser *p) +_loop0_197_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35881,7 +37136,7 @@ _loop0_192_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_197[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -35904,7 +37159,7 @@ _loop0_192_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_192[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_197[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35921,9 +37176,9 @@ _loop0_192_rule(Parser *p) return _seq; } -// _tmp_193: lambda_param_no_default | ',' +// _tmp_198: lambda_param_no_default | ',' static void * -_tmp_193_rule(Parser *p) +_tmp_198_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35940,18 +37195,18 @@ _tmp_193_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _tmp_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; if ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); _res = lambda_param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_198[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } { // ',' @@ -35959,18 +37214,18 @@ _tmp_193_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_198[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -35979,9 +37234,9 @@ _tmp_193_rule(Parser *p) return _res; } -// _tmp_194: '*' | '**' | '/' +// _tmp_199: '*' | '**' | '/' static void * -_tmp_194_rule(Parser *p) +_tmp_199_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -35998,18 +37253,18 @@ _tmp_194_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c> _tmp_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' ) { - D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c+ _tmp_199[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_199[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'")); } { // '**' @@ -36017,18 +37272,18 @@ _tmp_194_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_199[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_199[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } { // '/' @@ -36036,18 +37291,18 @@ _tmp_194_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c> _tmp_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 17)) // token='/' ) { - D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c+ _tmp_199[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_199[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/'")); } _res = NULL; @@ -36056,9 +37311,9 @@ _tmp_194_rule(Parser *p) return _res; } -// _tmp_195: ',' | ')' | ':' +// _tmp_200: ',' | ')' | ':' static void * -_tmp_195_rule(Parser *p) +_tmp_200_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36075,18 +37330,18 @@ _tmp_195_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_200[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // ')' @@ -36094,18 +37349,18 @@ _tmp_195_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_200[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ':' @@ -36113,18 +37368,18 @@ _tmp_195_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_200[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } _res = NULL; @@ -36133,9 +37388,9 @@ _tmp_195_rule(Parser *p) return _res; } -// _loop0_197: ',' (expression ['as' star_target]) +// _loop0_202: ',' (expression ['as' star_target]) static asdl_seq * -_loop0_197_rule(Parser *p) +_loop0_202_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36161,13 +37416,13 @@ _loop0_197_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_197[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_242_rule(p)) // expression ['as' star_target] + (elem = _tmp_263_rule(p)) // expression ['as' star_target] ) { _res = elem; @@ -36193,7 +37448,7 @@ _loop0_197_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_197[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_202[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expression ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36210,9 +37465,9 @@ _loop0_197_rule(Parser *p) return _seq; } -// _gather_196: (expression ['as' star_target]) _loop0_197 +// _gather_201: (expression ['as' star_target]) _loop0_202 static asdl_seq * -_gather_196_rule(Parser *p) +_gather_201_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36224,27 +37479,27 @@ _gather_196_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expression ['as' star_target]) _loop0_197 + { // (expression ['as' star_target]) _loop0_202 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_196[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_197")); + D(fprintf(stderr, "%*c> _gather_201[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_202")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_242_rule(p)) // expression ['as' star_target] + (elem = _tmp_263_rule(p)) // expression ['as' star_target] && - (seq = _loop0_197_rule(p)) // _loop0_197 + (seq = _loop0_202_rule(p)) // _loop0_202 ) { - D(fprintf(stderr, "%*c+ _gather_196[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_197")); + D(fprintf(stderr, "%*c+ _gather_201[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_202")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_196[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_197")); + D(fprintf(stderr, "%*c%s _gather_201[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_202")); } _res = NULL; done: @@ -36252,9 +37507,9 @@ _gather_196_rule(Parser *p) return _res; } -// _loop0_199: ',' (expressions ['as' star_target]) +// _loop0_204: ',' (expressions ['as' star_target]) static asdl_seq * -_loop0_199_rule(Parser *p) +_loop0_204_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36280,13 +37535,13 @@ _loop0_199_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_243_rule(p)) // expressions ['as' star_target] + (elem = _tmp_264_rule(p)) // expressions ['as' star_target] ) { _res = elem; @@ -36312,7 +37567,7 @@ _loop0_199_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_199[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_204[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expressions ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36329,9 +37584,9 @@ _loop0_199_rule(Parser *p) return _seq; } -// _gather_198: (expressions ['as' star_target]) _loop0_199 +// _gather_203: (expressions ['as' star_target]) _loop0_204 static asdl_seq * -_gather_198_rule(Parser *p) +_gather_203_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36343,27 +37598,27 @@ _gather_198_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expressions ['as' star_target]) _loop0_199 + { // (expressions ['as' star_target]) _loop0_204 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_199")); + D(fprintf(stderr, "%*c> _gather_203[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_204")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_243_rule(p)) // expressions ['as' star_target] + (elem = _tmp_264_rule(p)) // expressions ['as' star_target] && - (seq = _loop0_199_rule(p)) // _loop0_199 + (seq = _loop0_204_rule(p)) // _loop0_204 ) { - D(fprintf(stderr, "%*c+ _gather_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_199")); + D(fprintf(stderr, "%*c+ _gather_203[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_204")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_198[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_199")); + D(fprintf(stderr, "%*c%s _gather_203[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_204")); } _res = NULL; done: @@ -36371,9 +37626,9 @@ _gather_198_rule(Parser *p) return _res; } -// _loop0_201: ',' (expression ['as' star_target]) +// _loop0_206: ',' (expression ['as' star_target]) static asdl_seq * -_loop0_201_rule(Parser *p) +_loop0_206_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36399,13 +37654,13 @@ _loop0_201_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_201[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_206[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_244_rule(p)) // expression ['as' star_target] + (elem = _tmp_265_rule(p)) // expression ['as' star_target] ) { _res = elem; @@ -36431,7 +37686,7 @@ _loop0_201_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_201[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_206[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expression ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36448,9 +37703,9 @@ _loop0_201_rule(Parser *p) return _seq; } -// _gather_200: (expression ['as' star_target]) _loop0_201 +// _gather_205: (expression ['as' star_target]) _loop0_206 static asdl_seq * -_gather_200_rule(Parser *p) +_gather_205_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36462,27 +37717,27 @@ _gather_200_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expression ['as' star_target]) _loop0_201 + { // (expression ['as' star_target]) _loop0_206 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_201")); + D(fprintf(stderr, "%*c> _gather_205[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_206")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_244_rule(p)) // expression ['as' star_target] + (elem = _tmp_265_rule(p)) // expression ['as' star_target] && - (seq = _loop0_201_rule(p)) // _loop0_201 + (seq = _loop0_206_rule(p)) // _loop0_206 ) { - D(fprintf(stderr, "%*c+ _gather_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_201")); + D(fprintf(stderr, "%*c+ _gather_205[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_206")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_200[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_201")); + D(fprintf(stderr, "%*c%s _gather_205[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_206")); } _res = NULL; done: @@ -36490,9 +37745,9 @@ _gather_200_rule(Parser *p) return _res; } -// _loop0_203: ',' (expressions ['as' star_target]) +// _loop0_208: ',' (expressions ['as' star_target]) static asdl_seq * -_loop0_203_rule(Parser *p) +_loop0_208_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36518,13 +37773,13 @@ _loop0_203_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_203[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_208[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_245_rule(p)) // expressions ['as' star_target] + (elem = _tmp_266_rule(p)) // expressions ['as' star_target] ) { _res = elem; @@ -36550,7 +37805,7 @@ _loop0_203_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_203[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_208[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expressions ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36567,9 +37822,9 @@ _loop0_203_rule(Parser *p) return _seq; } -// _gather_202: (expressions ['as' star_target]) _loop0_203 +// _gather_207: (expressions ['as' star_target]) _loop0_208 static asdl_seq * -_gather_202_rule(Parser *p) +_gather_207_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36581,27 +37836,27 @@ _gather_202_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expressions ['as' star_target]) _loop0_203 + { // (expressions ['as' star_target]) _loop0_208 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_203")); + D(fprintf(stderr, "%*c> _gather_207[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_208")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_245_rule(p)) // expressions ['as' star_target] + (elem = _tmp_266_rule(p)) // expressions ['as' star_target] && - (seq = _loop0_203_rule(p)) // _loop0_203 + (seq = _loop0_208_rule(p)) // _loop0_208 ) { - D(fprintf(stderr, "%*c+ _gather_202[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_203")); + D(fprintf(stderr, "%*c+ _gather_207[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_208")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_202[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_203")); + D(fprintf(stderr, "%*c%s _gather_207[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_208")); } _res = NULL; done: @@ -36609,9 +37864,9 @@ _gather_202_rule(Parser *p) return _res; } -// _tmp_204: 'except' | 'finally' +// _tmp_209: 'except' | 'finally' static void * -_tmp_204_rule(Parser *p) +_tmp_209_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36628,18 +37883,18 @@ _tmp_204_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'")); + D(fprintf(stderr, "%*c> _tmp_209[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 636)) // token='except' + (_keyword = _PyPegen_expect_token(p, 637)) // token='except' ) { - D(fprintf(stderr, "%*c+ _tmp_204[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'")); + D(fprintf(stderr, "%*c+ _tmp_209[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_204[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_209[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except'")); } { // 'finally' @@ -36647,18 +37902,18 @@ _tmp_204_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'")); + D(fprintf(stderr, "%*c> _tmp_209[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 632)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 633)) // token='finally' ) { - D(fprintf(stderr, "%*c+ _tmp_204[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'")); + D(fprintf(stderr, "%*c+ _tmp_209[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_204[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_209[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'finally'")); } _res = NULL; @@ -36667,9 +37922,9 @@ _tmp_204_rule(Parser *p) return _res; } -// _loop0_205: block +// _loop0_210: block static asdl_seq * -_loop0_205_rule(Parser *p) +_loop0_210_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36695,7 +37950,7 @@ _loop0_205_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_205[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); + D(fprintf(stderr, "%*c> _loop0_210[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); asdl_stmt_seq* block_var; while ( (block_var = block_rule(p)) // block @@ -36718,7 +37973,7 @@ _loop0_205_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_205[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_210[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "block")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36735,9 +37990,9 @@ _loop0_205_rule(Parser *p) return _seq; } -// _loop1_206: except_block +// _loop1_211: except_block static asdl_seq * -_loop1_206_rule(Parser *p) +_loop1_211_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36763,7 +38018,7 @@ _loop1_206_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_206[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block")); + D(fprintf(stderr, "%*c> _loop1_211[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block")); excepthandler_ty except_block_var; while ( (except_block_var = except_block_rule(p)) // except_block @@ -36786,7 +38041,7 @@ _loop1_206_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_206[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_211[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block")); } if (_n == 0 || p->error_indicator) { @@ -36808,9 +38063,9 @@ _loop1_206_rule(Parser *p) return _seq; } -// _tmp_207: 'as' NAME +// _tmp_212: 'as' NAME static void * -_tmp_207_rule(Parser *p) +_tmp_212_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36827,21 +38082,21 @@ _tmp_207_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_207[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_212[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_207[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_212[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_207[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_212[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -36850,9 +38105,9 @@ _tmp_207_rule(Parser *p) return _res; } -// _loop0_208: block +// _loop0_213: block static asdl_seq * -_loop0_208_rule(Parser *p) +_loop0_213_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36878,7 +38133,7 @@ _loop0_208_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_208[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); + D(fprintf(stderr, "%*c> _loop0_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); asdl_stmt_seq* block_var; while ( (block_var = block_rule(p)) // block @@ -36901,7 +38156,7 @@ _loop0_208_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_208[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_213[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "block")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36918,9 +38173,9 @@ _loop0_208_rule(Parser *p) return _seq; } -// _loop1_209: except_star_block +// _loop1_214: except_star_block static asdl_seq * -_loop1_209_rule(Parser *p) +_loop1_214_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -36946,7 +38201,7 @@ _loop1_209_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_209[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block")); + D(fprintf(stderr, "%*c> _loop1_214[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block")); excepthandler_ty except_star_block_var; while ( (except_star_block_var = except_star_block_rule(p)) // except_star_block @@ -36969,7 +38224,7 @@ _loop1_209_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_209[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_214[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_star_block")); } if (_n == 0 || p->error_indicator) { @@ -36991,9 +38246,9 @@ _loop1_209_rule(Parser *p) return _seq; } -// _tmp_210: expression ['as' NAME] +// _tmp_215: expression ['as' NAME] static void * -_tmp_210_rule(Parser *p) +_tmp_215_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37010,22 +38265,22 @@ _tmp_210_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_210[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]")); + D(fprintf(stderr, "%*c> _tmp_215[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_246_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var = _tmp_267_rule(p), !p->error_indicator) // ['as' NAME] ) { - D(fprintf(stderr, "%*c+ _tmp_210[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]")); + D(fprintf(stderr, "%*c+ _tmp_215[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]")); _res = _PyPegen_dummy_name(p, expression_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_210[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_215[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' NAME]")); } _res = NULL; @@ -37034,9 +38289,9 @@ _tmp_210_rule(Parser *p) return _res; } -// _tmp_211: 'as' NAME +// _tmp_216: 'as' NAME static void * -_tmp_211_rule(Parser *p) +_tmp_216_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37053,21 +38308,21 @@ _tmp_211_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_211[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_216[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_211[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_216[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_211[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_216[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -37076,9 +38331,9 @@ _tmp_211_rule(Parser *p) return _res; } -// _tmp_212: 'as' NAME +// _tmp_217: 'as' NAME static void * -_tmp_212_rule(Parser *p) +_tmp_217_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37095,21 +38350,21 @@ _tmp_212_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_212[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_217[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_212[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_217[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_212[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_217[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -37118,9 +38373,9 @@ _tmp_212_rule(Parser *p) return _res; } -// _tmp_213: NEWLINE | ':' +// _tmp_218: NEWLINE | ':' static void * -_tmp_213_rule(Parser *p) +_tmp_218_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37137,18 +38392,18 @@ _tmp_213_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + D(fprintf(stderr, "%*c> _tmp_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); Token * newline_var; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + D(fprintf(stderr, "%*c+ _tmp_218[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); _res = newline_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_213[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_218[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); } { // ':' @@ -37156,18 +38411,18 @@ _tmp_213_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_218[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_213[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_218[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } _res = NULL; @@ -37176,9 +38431,9 @@ _tmp_213_rule(Parser *p) return _res; } -// _tmp_214: 'as' NAME +// _tmp_219: 'as' NAME static void * -_tmp_214_rule(Parser *p) +_tmp_219_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37195,21 +38450,21 @@ _tmp_214_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_214[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_219[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_214[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_219[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_214[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_219[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -37218,9 +38473,9 @@ _tmp_214_rule(Parser *p) return _res; } -// _tmp_215: 'as' NAME +// _tmp_220: 'as' NAME static void * -_tmp_215_rule(Parser *p) +_tmp_220_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37237,21 +38492,21 @@ _tmp_215_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_215[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_220[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_215[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_220[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_215[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_220[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -37260,9 +38515,9 @@ _tmp_215_rule(Parser *p) return _res; } -// _tmp_216: positional_patterns ',' +// _tmp_221: positional_patterns ',' static void * -_tmp_216_rule(Parser *p) +_tmp_221_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37279,7 +38534,7 @@ _tmp_216_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_216[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); + D(fprintf(stderr, "%*c> _tmp_221[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); Token * _literal; asdl_pattern_seq* positional_patterns_var; if ( @@ -37288,12 +38543,12 @@ _tmp_216_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_216[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); + D(fprintf(stderr, "%*c+ _tmp_221[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); _res = _PyPegen_dummy_name(p, positional_patterns_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_216[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_221[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "positional_patterns ','")); } _res = NULL; @@ -37302,9 +38557,9 @@ _tmp_216_rule(Parser *p) return _res; } -// _tmp_217: '->' expression +// _tmp_222: '->' expression static void * -_tmp_217_rule(Parser *p) +_tmp_222_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37321,7 +38576,7 @@ _tmp_217_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_217[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); Token * _literal; expr_ty expression_var; if ( @@ -37330,12 +38585,12 @@ _tmp_217_rule(Parser *p) (expression_var = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_217[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); _res = _PyPegen_dummy_name(p, _literal, expression_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_217[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression")); } _res = NULL; @@ -37344,9 +38599,9 @@ _tmp_217_rule(Parser *p) return _res; } -// _tmp_218: '(' arguments? ')' +// _tmp_223: '(' arguments? ')' static void * -_tmp_218_rule(Parser *p) +_tmp_223_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37363,7 +38618,7 @@ _tmp_218_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); Token * _literal; Token * _literal_1; void *_opt_var; @@ -37376,12 +38631,12 @@ _tmp_218_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_218[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); _res = _PyPegen_dummy_name(p, _literal, _opt_var, _literal_1); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_218[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'")); } _res = NULL; @@ -37390,9 +38645,9 @@ _tmp_218_rule(Parser *p) return _res; } -// _tmp_219: '(' arguments? ')' +// _tmp_224: '(' arguments? ')' static void * -_tmp_219_rule(Parser *p) +_tmp_224_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37409,7 +38664,7 @@ _tmp_219_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_219[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c> _tmp_224[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); Token * _literal; Token * _literal_1; void *_opt_var; @@ -37422,12 +38677,12 @@ _tmp_219_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_219[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c+ _tmp_224[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); _res = _PyPegen_dummy_name(p, _literal, _opt_var, _literal_1); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_219[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_224[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'")); } _res = NULL; @@ -37436,9 +38691,9 @@ _tmp_219_rule(Parser *p) return _res; } -// _loop0_221: ',' double_starred_kvpair +// _loop0_226: ',' double_starred_kvpair static asdl_seq * -_loop0_221_rule(Parser *p) +_loop0_226_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37464,7 +38719,7 @@ _loop0_221_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_221[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair")); + D(fprintf(stderr, "%*c> _loop0_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair")); Token * _literal; KeyValuePair* elem; while ( @@ -37496,7 +38751,7 @@ _loop0_221_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_221[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_226[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -37513,9 +38768,9 @@ _loop0_221_rule(Parser *p) return _seq; } -// _gather_220: double_starred_kvpair _loop0_221 +// _gather_225: double_starred_kvpair _loop0_226 static asdl_seq * -_gather_220_rule(Parser *p) +_gather_225_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37527,27 +38782,27 @@ _gather_220_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // double_starred_kvpair _loop0_221 + { // double_starred_kvpair _loop0_226 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_220[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_221")); + D(fprintf(stderr, "%*c> _gather_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_226")); KeyValuePair* elem; asdl_seq * seq; if ( (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair && - (seq = _loop0_221_rule(p)) // _loop0_221 + (seq = _loop0_226_rule(p)) // _loop0_226 ) { - D(fprintf(stderr, "%*c+ _gather_220[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_221")); + D(fprintf(stderr, "%*c+ _gather_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_226")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_220[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_221")); + D(fprintf(stderr, "%*c%s _gather_225[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_226")); } _res = NULL; done: @@ -37555,9 +38810,9 @@ _gather_220_rule(Parser *p) return _res; } -// _tmp_222: '}' | ',' +// _tmp_227: '}' | ',' static void * -_tmp_222_rule(Parser *p) +_tmp_227_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37574,18 +38829,18 @@ _tmp_222_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 26)) // token='}' ) { - D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); } { // ',' @@ -37593,18 +38848,18 @@ _tmp_222_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -37613,9 +38868,9 @@ _tmp_222_rule(Parser *p) return _res; } -// _tmp_223: '}' | ',' +// _tmp_228: '}' | ',' static void * -_tmp_223_rule(Parser *p) +_tmp_228_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37632,18 +38887,18 @@ _tmp_223_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c> _tmp_228[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 26)) // token='}' ) { - D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c+ _tmp_228[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_228[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); } { // ',' @@ -37651,18 +38906,18 @@ _tmp_223_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_228[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_228[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_228[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -37671,9 +38926,898 @@ _tmp_223_rule(Parser *p) return _res; } -// _tmp_224: star_targets '=' +// _tmp_229: yield_expr | star_expressions static void * -_tmp_224_rule(Parser *p) +_tmp_229_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // yield_expr + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_229[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + expr_ty yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + D(fprintf(stderr, "%*c+ _tmp_229[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + _res = yield_expr_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_229[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); + } + { // star_expressions + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_229[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + expr_ty star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + D(fprintf(stderr, "%*c+ _tmp_229[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + _res = star_expressions_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_229[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_230: yield_expr | star_expressions +static void * +_tmp_230_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // yield_expr + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + expr_ty yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + D(fprintf(stderr, "%*c+ _tmp_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + _res = yield_expr_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_230[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); + } + { // star_expressions + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + expr_ty star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + D(fprintf(stderr, "%*c+ _tmp_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + _res = star_expressions_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_230[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_231: '=' | '!' | ':' | '}' +static void * +_tmp_231_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '=' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + ) + { + D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='")); + } + { // '!' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 54)) // token='!' + ) + { + D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!'")); + } + { // ':' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); + } + { // '}' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 26)) // token='}' + ) + { + D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_232: yield_expr | star_expressions +static void * +_tmp_232_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // yield_expr + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + expr_ty yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + _res = yield_expr_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); + } + { // star_expressions + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + expr_ty star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + _res = star_expressions_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_233: '!' | ':' | '}' +static void * +_tmp_233_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '!' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 54)) // token='!' + ) + { + D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!'")); + } + { // ':' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); + } + { // '}' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 26)) // token='}' + ) + { + D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_234: yield_expr | star_expressions +static void * +_tmp_234_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // yield_expr + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + expr_ty yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + _res = yield_expr_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); + } + { // star_expressions + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + expr_ty star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + _res = star_expressions_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_235: yield_expr | star_expressions +static void * +_tmp_235_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // yield_expr + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + expr_ty yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + _res = yield_expr_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); + } + { // star_expressions + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + expr_ty star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + _res = star_expressions_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_236: '!' NAME +static void * +_tmp_236_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '!' NAME + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME")); + Token * _literal; + expr_ty name_var; + if ( + (_literal = _PyPegen_expect_token(p, 54)) // token='!' + && + (name_var = _PyPegen_name_token(p)) // NAME + ) + { + D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME")); + _res = _PyPegen_dummy_name(p, _literal, name_var); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' NAME")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_237: ':' | '}' +static void * +_tmp_237_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ':' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); + } + { // '}' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 26)) // token='}' + ) + { + D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_238: yield_expr | star_expressions +static void * +_tmp_238_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // yield_expr + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + expr_ty yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + _res = yield_expr_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); + } + { // star_expressions + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + expr_ty star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + _res = star_expressions_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_239: '!' NAME +static void * +_tmp_239_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '!' NAME + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_239[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME")); + Token * _literal; + expr_ty name_var; + if ( + (_literal = _PyPegen_expect_token(p, 54)) // token='!' + && + (name_var = _PyPegen_name_token(p)) // NAME + ) + { + D(fprintf(stderr, "%*c+ _tmp_239[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME")); + _res = _PyPegen_dummy_name(p, _literal, name_var); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_239[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' NAME")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _loop0_240: fstring_format_spec +static asdl_seq * +_loop0_240_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + Py_ssize_t _children_capacity = 1; + Py_ssize_t _n = 0; + { // fstring_format_spec + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_format_spec")); + expr_ty fstring_format_spec_var; + while ( + (fstring_format_spec_var = fstring_format_spec_rule(p)) // fstring_format_spec + ) + { + _res = fstring_format_spec_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_240[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_format_spec")); + } + asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); + PyMem_Free(_children); + p->level--; + return _seq; +} + +// _tmp_241: yield_expr | star_expressions +static void * +_tmp_241_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // yield_expr + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + expr_ty yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + _res = yield_expr_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); + } + { // star_expressions + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + expr_ty star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + _res = star_expressions_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_242: '!' NAME +static void * +_tmp_242_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '!' NAME + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_242[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME")); + Token * _literal; + expr_ty name_var; + if ( + (_literal = _PyPegen_expect_token(p, 54)) // token='!' + && + (name_var = _PyPegen_name_token(p)) // NAME + ) + { + D(fprintf(stderr, "%*c+ _tmp_242[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME")); + _res = _PyPegen_dummy_name(p, _literal, name_var); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_242[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' NAME")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_243: ':' | '}' +static void * +_tmp_243_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ':' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); + } + { // '}' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 26)) // token='}' + ) + { + D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_244: star_targets '=' +static void * +_tmp_244_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37690,7 +39834,7 @@ _tmp_224_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_224[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_244[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty z; if ( @@ -37699,7 +39843,7 @@ _tmp_224_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_224[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_244[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37709,7 +39853,7 @@ _tmp_224_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_224[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_244[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -37718,9 +39862,9 @@ _tmp_224_rule(Parser *p) return _res; } -// _tmp_225: '.' | '...' +// _tmp_245: '.' | '...' static void * -_tmp_225_rule(Parser *p) +_tmp_245_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37737,18 +39881,18 @@ _tmp_225_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_245[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_225[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_245[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -37756,18 +39900,18 @@ _tmp_225_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_245[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_225[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_245[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -37776,9 +39920,9 @@ _tmp_225_rule(Parser *p) return _res; } -// _tmp_226: '.' | '...' +// _tmp_246: '.' | '...' static void * -_tmp_226_rule(Parser *p) +_tmp_246_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37795,18 +39939,18 @@ _tmp_226_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_226[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_226[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -37814,18 +39958,18 @@ _tmp_226_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_226[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_226[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -37834,9 +39978,9 @@ _tmp_226_rule(Parser *p) return _res; } -// _tmp_227: '@' named_expression NEWLINE +// _tmp_247: '@' named_expression NEWLINE static void * -_tmp_227_rule(Parser *p) +_tmp_247_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37853,7 +39997,7 @@ _tmp_227_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); Token * _literal; expr_ty f; Token * newline_var; @@ -37865,7 +40009,7 @@ _tmp_227_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); _res = f; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37875,7 +40019,7 @@ _tmp_227_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@' named_expression NEWLINE")); } _res = NULL; @@ -37884,9 +40028,9 @@ _tmp_227_rule(Parser *p) return _res; } -// _tmp_228: ',' expression +// _tmp_248: ',' expression static void * -_tmp_228_rule(Parser *p) +_tmp_248_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37903,7 +40047,7 @@ _tmp_228_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_228[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _tmp_248[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty c; if ( @@ -37912,7 +40056,7 @@ _tmp_228_rule(Parser *p) (c = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_228[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c+ _tmp_248[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37922,7 +40066,7 @@ _tmp_228_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_228[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_248[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } _res = NULL; @@ -37931,9 +40075,9 @@ _tmp_228_rule(Parser *p) return _res; } -// _tmp_229: ',' star_expression +// _tmp_249: ',' star_expression static void * -_tmp_229_rule(Parser *p) +_tmp_249_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37950,7 +40094,7 @@ _tmp_229_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_229[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); Token * _literal; expr_ty c; if ( @@ -37959,7 +40103,7 @@ _tmp_229_rule(Parser *p) (c = star_expression_rule(p)) // star_expression ) { - D(fprintf(stderr, "%*c+ _tmp_229[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -37969,7 +40113,7 @@ _tmp_229_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_229[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); } _res = NULL; @@ -37978,9 +40122,9 @@ _tmp_229_rule(Parser *p) return _res; } -// _tmp_230: 'or' conjunction +// _tmp_250: 'or' conjunction static void * -_tmp_230_rule(Parser *p) +_tmp_250_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -37997,7 +40141,7 @@ _tmp_230_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); Token * _keyword; expr_ty c; if ( @@ -38006,7 +40150,7 @@ _tmp_230_rule(Parser *p) (c = conjunction_rule(p)) // conjunction ) { - D(fprintf(stderr, "%*c+ _tmp_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -38016,7 +40160,7 @@ _tmp_230_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_230[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'or' conjunction")); } _res = NULL; @@ -38025,9 +40169,9 @@ _tmp_230_rule(Parser *p) return _res; } -// _tmp_231: 'and' inversion +// _tmp_251: 'and' inversion static void * -_tmp_231_rule(Parser *p) +_tmp_251_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38044,7 +40188,7 @@ _tmp_231_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c> _tmp_251[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); Token * _keyword; expr_ty c; if ( @@ -38053,7 +40197,7 @@ _tmp_231_rule(Parser *p) (c = inversion_rule(p)) // inversion ) { - D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c+ _tmp_251[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -38063,7 +40207,7 @@ _tmp_231_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_251[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'and' inversion")); } _res = NULL; @@ -38072,9 +40216,9 @@ _tmp_231_rule(Parser *p) return _res; } -// _tmp_232: slice | starred_expression +// _tmp_252: slice | starred_expression static void * -_tmp_232_rule(Parser *p) +_tmp_252_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38091,18 +40235,18 @@ _tmp_232_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice")); + D(fprintf(stderr, "%*c> _tmp_252[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice")); expr_ty slice_var; if ( (slice_var = slice_rule(p)) // slice ) { - D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice")); + D(fprintf(stderr, "%*c+ _tmp_252[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice")); _res = slice_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_252[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice")); } { // starred_expression @@ -38110,18 +40254,18 @@ _tmp_232_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c> _tmp_252[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); expr_ty starred_expression_var; if ( (starred_expression_var = starred_expression_rule(p)) // starred_expression ) { - D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c+ _tmp_252[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); _res = starred_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_252[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression")); } _res = NULL; @@ -38130,9 +40274,67 @@ _tmp_232_rule(Parser *p) return _res; } -// _tmp_233: 'if' disjunction +// _tmp_253: fstring | string static void * -_tmp_233_rule(Parser *p) +_tmp_253_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + p->error_indicator = 1; + PyErr_NoMemory(); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // fstring + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_253[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring")); + expr_ty fstring_var; + if ( + (fstring_var = fstring_rule(p)) // fstring + ) + { + D(fprintf(stderr, "%*c+ _tmp_253[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring")); + _res = fstring_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_253[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring")); + } + { // string + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_253[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "string")); + expr_ty string_var; + if ( + (string_var = string_rule(p)) // string + ) + { + D(fprintf(stderr, "%*c+ _tmp_253[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "string")); + _res = string_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_253[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "string")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_254: 'if' disjunction +static void * +_tmp_254_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38149,16 +40351,16 @@ _tmp_233_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_254[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 641)) // token='if' + (_keyword = _PyPegen_expect_token(p, 642)) // token='if' && (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_254[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -38168,7 +40370,7 @@ _tmp_233_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_254[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -38177,9 +40379,9 @@ _tmp_233_rule(Parser *p) return _res; } -// _tmp_234: 'if' disjunction +// _tmp_255: 'if' disjunction static void * -_tmp_234_rule(Parser *p) +_tmp_255_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38196,16 +40398,16 @@ _tmp_234_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_255[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 641)) // token='if' + (_keyword = _PyPegen_expect_token(p, 642)) // token='if' && (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_255[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -38215,7 +40417,7 @@ _tmp_234_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_255[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -38224,9 +40426,9 @@ _tmp_234_rule(Parser *p) return _res; } -// _tmp_235: starred_expression | (assignment_expression | expression !':=') !'=' +// _tmp_256: starred_expression | (assignment_expression | expression !':=') !'=' static void * -_tmp_235_rule(Parser *p) +_tmp_256_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38243,18 +40445,18 @@ _tmp_235_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c> _tmp_256[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); expr_ty starred_expression_var; if ( (starred_expression_var = starred_expression_rule(p)) // starred_expression ) { - D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c+ _tmp_256[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); _res = starred_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_256[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression")); } { // (assignment_expression | expression !':=') !'=' @@ -38262,20 +40464,20 @@ _tmp_235_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); - void *_tmp_247_var; + D(fprintf(stderr, "%*c> _tmp_256[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); + void *_tmp_268_var; if ( - (_tmp_247_var = _tmp_247_rule(p)) // assignment_expression | expression !':=' + (_tmp_268_var = _tmp_268_rule(p)) // assignment_expression | expression !':=' && _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); - _res = _tmp_247_var; + D(fprintf(stderr, "%*c+ _tmp_256[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); + _res = _tmp_268_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_256[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(assignment_expression | expression !':=') !'='")); } _res = NULL; @@ -38284,9 +40486,9 @@ _tmp_235_rule(Parser *p) return _res; } -// _tmp_236: ',' star_target +// _tmp_257: ',' star_target static void * -_tmp_236_rule(Parser *p) +_tmp_257_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38303,7 +40505,7 @@ _tmp_236_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _tmp_257[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty c; if ( @@ -38312,7 +40514,7 @@ _tmp_236_rule(Parser *p) (c = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c+ _tmp_257[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -38322,7 +40524,7 @@ _tmp_236_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_257[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } _res = NULL; @@ -38331,9 +40533,9 @@ _tmp_236_rule(Parser *p) return _res; } -// _tmp_237: ',' star_target +// _tmp_258: ',' star_target static void * -_tmp_237_rule(Parser *p) +_tmp_258_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38350,7 +40552,7 @@ _tmp_237_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _tmp_258[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty c; if ( @@ -38359,7 +40561,7 @@ _tmp_237_rule(Parser *p) (c = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c+ _tmp_258[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -38369,7 +40571,7 @@ _tmp_237_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_258[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } _res = NULL; @@ -38378,9 +40580,9 @@ _tmp_237_rule(Parser *p) return _res; } -// _tmp_238: star_targets '=' +// _tmp_259: star_targets '=' static void * -_tmp_238_rule(Parser *p) +_tmp_259_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38397,7 +40599,7 @@ _tmp_238_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_259[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -38406,12 +40608,12 @@ _tmp_238_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_259[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_259[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -38420,9 +40622,9 @@ _tmp_238_rule(Parser *p) return _res; } -// _tmp_239: star_targets '=' +// _tmp_260: star_targets '=' static void * -_tmp_239_rule(Parser *p) +_tmp_260_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38439,7 +40641,7 @@ _tmp_239_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_239[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_260[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -38448,12 +40650,12 @@ _tmp_239_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_239[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_260[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_239[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_260[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -38462,9 +40664,9 @@ _tmp_239_rule(Parser *p) return _res; } -// _tmp_240: ')' | '**' +// _tmp_261: ')' | '**' static void * -_tmp_240_rule(Parser *p) +_tmp_261_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38481,18 +40683,18 @@ _tmp_240_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_261[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_240[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_261[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_240[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_261[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // '**' @@ -38500,18 +40702,18 @@ _tmp_240_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_261[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_240[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_261[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_240[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_261[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; @@ -38520,9 +40722,9 @@ _tmp_240_rule(Parser *p) return _res; } -// _tmp_241: ':' | '**' +// _tmp_262: ':' | '**' static void * -_tmp_241_rule(Parser *p) +_tmp_262_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38539,18 +40741,18 @@ _tmp_241_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_262[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_262[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_262[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // '**' @@ -38558,18 +40760,18 @@ _tmp_241_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_262[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_262[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_262[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; @@ -38578,9 +40780,9 @@ _tmp_241_rule(Parser *p) return _res; } -// _tmp_242: expression ['as' star_target] +// _tmp_263: expression ['as' star_target] static void * -_tmp_242_rule(Parser *p) +_tmp_263_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38597,22 +40799,22 @@ _tmp_242_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_242[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_263[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_248_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_269_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_242[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_263[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); _res = _PyPegen_dummy_name(p, expression_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_242[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_263[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' star_target]")); } _res = NULL; @@ -38621,9 +40823,9 @@ _tmp_242_rule(Parser *p) return _res; } -// _tmp_243: expressions ['as' star_target] +// _tmp_264: expressions ['as' star_target] static void * -_tmp_243_rule(Parser *p) +_tmp_264_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38640,22 +40842,22 @@ _tmp_243_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_264[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expressions_var; if ( (expressions_var = expressions_rule(p)) // expressions && - (_opt_var = _tmp_249_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_270_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_264[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); _res = _PyPegen_dummy_name(p, expressions_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_264[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions ['as' star_target]")); } _res = NULL; @@ -38664,9 +40866,9 @@ _tmp_243_rule(Parser *p) return _res; } -// _tmp_244: expression ['as' star_target] +// _tmp_265: expression ['as' star_target] static void * -_tmp_244_rule(Parser *p) +_tmp_265_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38683,22 +40885,22 @@ _tmp_244_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_244[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_265[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_250_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_271_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_244[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_265[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); _res = _PyPegen_dummy_name(p, expression_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_244[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_265[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' star_target]")); } _res = NULL; @@ -38707,9 +40909,9 @@ _tmp_244_rule(Parser *p) return _res; } -// _tmp_245: expressions ['as' star_target] +// _tmp_266: expressions ['as' star_target] static void * -_tmp_245_rule(Parser *p) +_tmp_266_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38726,22 +40928,22 @@ _tmp_245_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_266[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expressions_var; if ( (expressions_var = expressions_rule(p)) // expressions && - (_opt_var = _tmp_251_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_272_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_245[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_266[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); _res = _PyPegen_dummy_name(p, expressions_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_245[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_266[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions ['as' star_target]")); } _res = NULL; @@ -38750,9 +40952,9 @@ _tmp_245_rule(Parser *p) return _res; } -// _tmp_246: 'as' NAME +// _tmp_267: 'as' NAME static void * -_tmp_246_rule(Parser *p) +_tmp_267_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38769,21 +40971,21 @@ _tmp_246_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_267[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_267[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_267[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -38792,9 +40994,9 @@ _tmp_246_rule(Parser *p) return _res; } -// _tmp_247: assignment_expression | expression !':=' +// _tmp_268: assignment_expression | expression !':=' static void * -_tmp_247_rule(Parser *p) +_tmp_268_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38811,18 +41013,18 @@ _tmp_247_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c> _tmp_268[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); expr_ty assignment_expression_var; if ( (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression ) { - D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c+ _tmp_268[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); _res = assignment_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_268[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression")); } { // expression !':=' @@ -38830,7 +41032,7 @@ _tmp_247_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c> _tmp_268[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression @@ -38838,12 +41040,12 @@ _tmp_247_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c+ _tmp_268[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); _res = expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_268[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='")); } _res = NULL; @@ -38852,9 +41054,9 @@ _tmp_247_rule(Parser *p) return _res; } -// _tmp_248: 'as' star_target +// _tmp_269: 'as' star_target static void * -_tmp_248_rule(Parser *p) +_tmp_269_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38871,21 +41073,21 @@ _tmp_248_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_248[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_269[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_248[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_269[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_248[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_269[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -38894,9 +41096,9 @@ _tmp_248_rule(Parser *p) return _res; } -// _tmp_249: 'as' star_target +// _tmp_270: 'as' star_target static void * -_tmp_249_rule(Parser *p) +_tmp_270_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38913,21 +41115,21 @@ _tmp_249_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_270[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_270[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_270[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -38936,9 +41138,9 @@ _tmp_249_rule(Parser *p) return _res; } -// _tmp_250: 'as' star_target +// _tmp_271: 'as' star_target static void * -_tmp_250_rule(Parser *p) +_tmp_271_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38955,21 +41157,21 @@ _tmp_250_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_271[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_271[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_271[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -38978,9 +41180,9 @@ _tmp_250_rule(Parser *p) return _res; } -// _tmp_251: 'as' star_target +// _tmp_272: 'as' star_target static void * -_tmp_251_rule(Parser *p) +_tmp_272_rule(Parser *p) { if (p->level++ == MAXSTACK) { p->error_indicator = 1; @@ -38997,21 +41199,21 @@ _tmp_251_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_251[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_272[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 639)) // token='as' + (_keyword = _PyPegen_expect_token(p, 640)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_251[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_272[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_251[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_272[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; diff --git a/Parser/pegen.c b/Parser/pegen.c index 94dd9de8a431c1..da410ea84ecb8e 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -155,6 +155,16 @@ initialize_token(Parser *p, Token *parser_token, struct token *new_token, int to return -1; } + parser_token->metadata = NULL; + if (new_token->metadata != NULL) { + if (_PyArena_AddPyObject(p->arena, new_token->metadata) < 0) { + Py_DECREF(parser_token->metadata); + return -1; + } + parser_token->metadata = new_token->metadata; + new_token->metadata = NULL; + } + parser_token->level = new_token->level; parser_token->lineno = new_token->lineno; parser_token->col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + new_token->col_offset @@ -198,6 +208,7 @@ int _PyPegen_fill_token(Parser *p) { struct token new_token; + new_token.metadata = NULL; int type = _PyTokenizer_Get(p->tok, &new_token); // Record and skip '# type: ignore' comments @@ -206,14 +217,14 @@ _PyPegen_fill_token(Parser *p) char *tag = PyMem_Malloc(len + 1); if (tag == NULL) { PyErr_NoMemory(); - return -1; + goto error; } strncpy(tag, new_token.start, len); tag[len] = '\0'; // Ownership of tag passes to the growable array if (!growable_comment_array_add(&p->type_ignore_comments, p->tok->lineno, tag)) { PyErr_NoMemory(); - return -1; + goto error; } type = _PyTokenizer_Get(p->tok, &new_token); } @@ -234,11 +245,14 @@ _PyPegen_fill_token(Parser *p) // Check if we are at the limit of the token array capacity and resize if needed if ((p->fill == p->size) && (_resize_tokens_array(p) != 0)) { - return -1; + goto error; } Token *t = p->tokens[p->fill]; return initialize_token(p, t, &new_token, type); +error: + Py_XDECREF(new_token.metadata); + return -1; } #if defined(Py_DEBUG) @@ -250,7 +264,7 @@ _PyPegen_fill_token(Parser *p) #define memo_statistics _PyRuntime.parser.memo_statistics void -_PyPegen_clear_memo_statistics() +_PyPegen_clear_memo_statistics(void) { for (int i = 0; i < NSTATISTICS; i++) { memo_statistics[i] = 0; @@ -258,7 +272,7 @@ _PyPegen_clear_memo_statistics() } PyObject * -_PyPegen_get_memo_statistics() +_PyPegen_get_memo_statistics(void) { PyObject *ret = PyList_New(NSTATISTICS); if (ret == NULL) { @@ -359,7 +373,7 @@ _PyPegen_expect_token(Parser *p, int type) } Token *t = p->tokens[p->mark]; if (t->type != type) { - return NULL; + return NULL; } p->mark += 1; return t; diff --git a/Parser/pegen.h b/Parser/pegen.h index ad5c97f5f7e5d1..8800e9f97f5e04 100644 --- a/Parser/pegen.h +++ b/Parser/pegen.h @@ -39,6 +39,7 @@ typedef struct { int level; int lineno, col_offset, end_lineno, end_col_offset; Memo *memo; + PyObject *metadata; } Token; typedef struct { @@ -118,6 +119,11 @@ typedef struct { int is_keyword; } KeywordOrStarred; +typedef struct { + void *result; + PyObject *metadata; +} ResultTokenWithMetadata; + // Internal parser functions #if defined(Py_DEBUG) void _PyPegen_clear_memo_statistics(void); @@ -138,6 +144,7 @@ void* _PyPegen_expect_forced_result(Parser *p, void* result, const char* expecte Token *_PyPegen_expect_forced_token(Parser *p, int type, const char* expected); expr_ty _PyPegen_expect_soft_keyword(Parser *p, const char *keyword); expr_ty _PyPegen_soft_keyword_token(Parser *p); +expr_ty _PyPegen_fstring_middle_token(Parser* p); Token *_PyPegen_get_last_nonnwhitespace_token(Parser *); int _PyPegen_fill_token(Parser *p); expr_ty _PyPegen_name_token(Parser *p); @@ -155,7 +162,7 @@ typedef enum { int _Pypegen_raise_decode_error(Parser *p); void _PyPegen_raise_tokenizer_init_error(PyObject *filename); int _Pypegen_tokenizer_error(Parser *p); -void *_PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...); +void *_PyPegen_raise_error(Parser *p, PyObject *errtype, int use_mark, const char *errmsg, ...); void *_PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, Py_ssize_t lineno, Py_ssize_t col_offset, Py_ssize_t end_lineno, Py_ssize_t end_col_offset, @@ -175,8 +182,9 @@ RAISE_ERROR_KNOWN_LOCATION(Parser *p, PyObject *errtype, va_end(va); return NULL; } -#define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, msg, ##__VA_ARGS__) -#define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, msg, ##__VA_ARGS__) +#define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 0, msg, ##__VA_ARGS__) +#define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, 0, msg, ##__VA_ARGS__) +#define RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 1, msg, ##__VA_ARGS__) #define RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, msg, ...) \ RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError, (a)->lineno, (a)->col_offset, (b)->end_lineno, (b)->end_col_offset, msg, ##__VA_ARGS__) #define RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, msg, ...) \ @@ -308,6 +316,8 @@ StarEtc *_PyPegen_star_etc(Parser *, arg_ty, asdl_seq *, arg_ty); arguments_ty _PyPegen_make_arguments(Parser *, asdl_arg_seq *, SlashWithDefault *, asdl_arg_seq *, asdl_seq *, StarEtc *); arguments_ty _PyPegen_empty_arguments(Parser *); +expr_ty _PyPegen_formatted_value(Parser *, expr_ty, Token *, ResultTokenWithMetadata *, ResultTokenWithMetadata *, Token *, + int, int, int, int, PyArena *); AugOperator *_PyPegen_augoperator(Parser*, operator_ty type); stmt_ty _PyPegen_function_def_decorators(Parser *, asdl_expr_seq *, stmt_ty); stmt_ty _PyPegen_class_def_decorators(Parser *, asdl_expr_seq *, stmt_ty); @@ -317,12 +327,18 @@ asdl_keyword_seq *_PyPegen_seq_delete_starred_exprs(Parser *, asdl_seq *); expr_ty _PyPegen_collect_call_seqs(Parser *, asdl_expr_seq *, asdl_seq *, int lineno, int col_offset, int end_lineno, int end_col_offset, PyArena *arena); -expr_ty _PyPegen_concatenate_strings(Parser *p, asdl_seq *); +expr_ty _PyPegen_constant_from_token(Parser* p, Token* tok); +expr_ty _PyPegen_constant_from_string(Parser* p, Token* tok); +expr_ty _PyPegen_concatenate_strings(Parser *p, asdl_expr_seq *, int, int, int, int, PyArena *); +expr_ty _PyPegen_FetchRawForm(Parser *p, int, int, int, int); expr_ty _PyPegen_ensure_imaginary(Parser *p, expr_ty); expr_ty _PyPegen_ensure_real(Parser *p, expr_ty); asdl_seq *_PyPegen_join_sequences(Parser *, asdl_seq *, asdl_seq *); int _PyPegen_check_barry_as_flufl(Parser *, Token *); int _PyPegen_check_legacy_stmt(Parser *p, expr_ty t); +ResultTokenWithMetadata *_PyPegen_check_fstring_conversion(Parser *p, Token *, expr_ty t); +ResultTokenWithMetadata *_PyPegen_setup_full_format_spec(Parser *, Token *, asdl_expr_seq *, int, int, + int, int, PyArena *); mod_ty _PyPegen_make_module(Parser *, asdl_stmt_seq *); void *_PyPegen_arguments_parsing_error(Parser *, expr_ty); expr_ty _PyPegen_get_last_comprehension_item(comprehension_ty comprehension); @@ -338,6 +354,9 @@ void *_PyPegen_run_parser(Parser *); mod_ty _PyPegen_run_parser_from_string(const char *, int, PyObject *, PyCompilerFlags *, PyArena *); asdl_stmt_seq *_PyPegen_interactive_exit(Parser *); +// TODO: move to the correct place in this file +expr_ty _PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* expr, Token*b); + // Generated function in parse.c - function definition in python.gram void *_PyPegen_parse(Parser *); diff --git a/Parser/pegen_errors.c b/Parser/pegen_errors.c index 6ea7600119b643..1f227da0194e3c 100644 --- a/Parser/pegen_errors.c +++ b/Parser/pegen_errors.c @@ -165,6 +165,7 @@ _PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) { int ret = 0; struct token new_token; + new_token.metadata = NULL; for (;;) { switch (_PyTokenizer_Get(p->tok, &new_token)) { @@ -192,7 +193,11 @@ _PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) { exit: - if (PyErr_Occurred()) { + Py_XDECREF(new_token.metadata); + // If we're in an f-string, we want the syntax error in the expression part + // to propagate, so that tokenizer errors (like expecting '}') that happen afterwards + // do not swallow it. + if (PyErr_Occurred() && p->tok->tok_mode_stack_index <= 0) { Py_XDECREF(value); Py_XDECREF(type); Py_XDECREF(traceback); @@ -205,7 +210,7 @@ _PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) { // PARSER ERRORS void * -_PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...) +_PyPegen_raise_error(Parser *p, PyObject *errtype, int use_mark, const char *errmsg, ...) { if (p->fill == 0) { va_list va; @@ -214,8 +219,13 @@ _PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...) va_end(va); return NULL; } - - Token *t = p->known_err_token != NULL ? p->known_err_token : p->tokens[p->fill - 1]; + if (use_mark && p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + return NULL; + } + Token *t = p->known_err_token != NULL + ? p->known_err_token + : p->tokens[use_mark ? p->mark : p->fill - 1]; Py_ssize_t col_offset; Py_ssize_t end_col_offset = -1; if (t->col_offset == -1) { diff --git a/Parser/string_parser.c b/Parser/string_parser.c index c096bea7426e5c..d4ce33850f7c58 100644 --- a/Parser/string_parser.c +++ b/Parser/string_parser.c @@ -135,7 +135,9 @@ decode_unicode_with_escapes(Parser *parser, const char *s, size_t len, Token *t) const char *first_invalid_escape; v = _PyUnicode_DecodeUnicodeEscapeInternal(s, len, NULL, NULL, &first_invalid_escape); - if (v != NULL && first_invalid_escape != NULL) { + // HACK: later we can simply pass the line no, since we don't preserve the tokens + // when we are decoding the string but we preserve the line numbers. + if (v != NULL && first_invalid_escape != NULL && t != NULL) { if (warn_invalid_escape_sequence(parser, first_invalid_escape, t) < 0) { /* We have not decref u before because first_invalid_escape points inside u. */ @@ -166,43 +168,43 @@ decode_bytes_with_escapes(Parser *p, const char *s, Py_ssize_t len, Token *t) return result; } -/* s must include the bracketing quote characters, and r, b, u, - &/or f prefixes (if any), and embedded escape sequences (if any). - _PyPegen_parsestr parses it, and sets *result to decoded Python string object. - If the string is an f-string, set *fstr and *fstrlen to the unparsed - string object. Return 0 if no errors occurred. */ -int -_PyPegen_parsestr(Parser *p, int *bytesmode, int *rawmode, PyObject **result, - const char **fstr, Py_ssize_t *fstrlen, Token *t) +PyObject * +_PyPegen_decode_string(Parser *p, int raw, const char *s, size_t len, Token *t) +{ + if (raw) { + return PyUnicode_DecodeUTF8Stateful(s, len, NULL, NULL); + } + return decode_unicode_with_escapes(p, s, len, t); +} + +/* s must include the bracketing quote characters, and r, b &/or f prefixes + (if any), and embedded escape sequences (if any). (f-strings are handled by the parser) + _PyPegen_parse_string parses it, and returns the decoded Python string object. */ +PyObject * +_PyPegen_parse_string(Parser *p, Token *t) { const char *s = PyBytes_AsString(t->bytes); if (s == NULL) { - return -1; + return NULL; } size_t len; int quote = Py_CHARMASK(*s); - int fmode = 0; - *bytesmode = 0; - *rawmode = 0; - *result = NULL; - *fstr = NULL; + int bytesmode = 0; + int rawmode = 0; + if (Py_ISALPHA(quote)) { - while (!*bytesmode || !*rawmode) { + while (!bytesmode || !rawmode) { if (quote == 'b' || quote == 'B') { quote =(unsigned char)*++s; - *bytesmode = 1; + bytesmode = 1; } else if (quote == 'u' || quote == 'U') { quote = (unsigned char)*++s; } else if (quote == 'r' || quote == 'R') { quote = (unsigned char)*++s; - *rawmode = 1; - } - else if (quote == 'f' || quote == 'F') { - quote = (unsigned char)*++s; - fmode = 1; + rawmode = 1; } else { break; @@ -210,32 +212,21 @@ _PyPegen_parsestr(Parser *p, int *bytesmode, int *rawmode, PyObject **result, } } - /* fstrings are only allowed in Python 3.6 and greater */ - if (fmode && p->feature_version < 6) { - p->error_indicator = 1; - RAISE_SYNTAX_ERROR("Format strings are only supported in Python 3.6 and greater"); - return -1; - } - - if (fmode && *bytesmode) { - PyErr_BadInternalCall(); - return -1; - } if (quote != '\'' && quote != '\"') { PyErr_BadInternalCall(); - return -1; + return NULL; } /* Skip the leading quote char. */ s++; len = strlen(s); if (len > INT_MAX) { PyErr_SetString(PyExc_OverflowError, "string to parse is too long"); - return -1; + return NULL; } if (s[--len] != quote) { /* Last quote char must match the first. */ PyErr_BadInternalCall(); - return -1; + return NULL; } if (len >= 4 && s[0] == quote && s[1] == quote) { /* A triple quoted string. We've already skipped one quote at @@ -246,1037 +237,28 @@ _PyPegen_parsestr(Parser *p, int *bytesmode, int *rawmode, PyObject **result, /* And check that the last two match. */ if (s[--len] != quote || s[--len] != quote) { PyErr_BadInternalCall(); - return -1; + return NULL; } } - if (fmode) { - /* Just return the bytes. The caller will parse the resulting - string. */ - *fstr = s; - *fstrlen = len; - return 0; - } - - /* Not an f-string. */ /* Avoid invoking escape decoding routines if possible. */ - *rawmode = *rawmode || strchr(s, '\\') == NULL; - if (*bytesmode) { + rawmode = rawmode || strchr(s, '\\') == NULL; + if (bytesmode) { /* Disallow non-ASCII characters. */ const char *ch; for (ch = s; *ch; ch++) { if (Py_CHARMASK(*ch) >= 0x80) { - RAISE_SYNTAX_ERROR( + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + t, "bytes can only contain ASCII " "literal characters"); - return -1; - } - } - if (*rawmode) { - *result = PyBytes_FromStringAndSize(s, len); - } - else { - *result = decode_bytes_with_escapes(p, s, len, t); - } - } - else { - if (*rawmode) { - *result = PyUnicode_DecodeUTF8Stateful(s, len, NULL, NULL); - } - else { - *result = decode_unicode_with_escapes(p, s, len, t); - } - } - return *result == NULL ? -1 : 0; -} - - - -// FSTRING STUFF - -/* Fix locations for the given node and its children. - - `parent` is the enclosing node. - `expr_start` is the starting position of the expression (pointing to the open brace). - `n` is the node which locations are going to be fixed relative to parent. - `expr_str` is the child node's string representation, including braces. -*/ -static bool -fstring_find_expr_location(Token *parent, const char* expr_start, char *expr_str, int *p_lines, int *p_cols) -{ - *p_lines = 0; - *p_cols = 0; - assert(expr_start != NULL && *expr_start == '{'); - if (parent && parent->bytes) { - const char *parent_str = PyBytes_AsString(parent->bytes); - if (!parent_str) { - return false; - } - // The following is needed, in order to correctly shift the column - // offset, in the case that (disregarding any whitespace) a newline - // immediately follows the opening curly brace of the fstring expression. - bool newline_after_brace = 1; - const char *start = expr_start + 1; - while (start && *start != '}' && *start != '\n') { - if (*start != ' ' && *start != '\t' && *start != '\f') { - newline_after_brace = 0; - break; - } - start++; - } - - // Account for the characters from the last newline character to our - // left until the beginning of expr_start. - if (!newline_after_brace) { - start = expr_start; - while (start > parent_str && *start != '\n') { - start--; - } - *p_cols += (int)(expr_start - start); - if (*start == '\n') { - *p_cols -= 1; - } - } - /* adjust the start based on the number of newlines encountered - before the f-string expression */ - for (const char *p = parent_str; p < expr_start; p++) { - if (*p == '\n') { - (*p_lines)++; - } - } - } - return true; -} - - -/* Compile this expression in to an expr_ty. Add parens around the - expression, in order to allow leading spaces in the expression. */ -static expr_ty -fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end, - Token *t) -{ - expr_ty expr = NULL; - char *str; - Py_ssize_t len; - const char *s; - expr_ty result = NULL; - - assert(expr_end >= expr_start); - assert(*(expr_start-1) == '{'); - assert(*expr_end == '}' || *expr_end == '!' || *expr_end == ':' || - *expr_end == '='); - - /* If the substring is all whitespace, it's an error. We need to catch this - here, and not when we call PyParser_SimpleParseStringFlagsFilename, - because turning the expression '' in to '()' would go from being invalid - to valid. */ - for (s = expr_start; s != expr_end; s++) { - char c = *s; - /* The Python parser ignores only the following whitespace - characters (\r already is converted to \n). */ - if (!(c == ' ' || c == '\t' || c == '\n' || c == '\f')) { - break; - } - } - - if (s == expr_end) { - if (*expr_end == '!' || *expr_end == ':' || *expr_end == '=') { - RAISE_SYNTAX_ERROR("f-string: expression required before '%c'", *expr_end); - return NULL; - } - RAISE_SYNTAX_ERROR("f-string: empty expression not allowed"); - return NULL; - } - - len = expr_end - expr_start; - /* Allocate 3 extra bytes: open paren, close paren, null byte. */ - str = PyMem_Calloc(len + 3, sizeof(char)); - if (str == NULL) { - PyErr_NoMemory(); - return NULL; - } - - // The call to fstring_find_expr_location is responsible for finding the column offset - // the generated AST nodes need to be shifted to the right, which is equal to the number - // of the f-string characters before the expression starts. - memcpy(str+1, expr_start, len); - int lines, cols; - if (!fstring_find_expr_location(t, expr_start-1, str+1, &lines, &cols)) { - PyMem_Free(str); - return NULL; - } - - // The parentheses are needed in order to allow for leading whitespace within - // the f-string expression. This consequently gets parsed as a group (see the - // group rule in python.gram). - str[0] = '('; - str[len+1] = ')'; - - struct tok_state* tok = _PyTokenizer_FromString(str, 1); - if (tok == NULL) { - PyMem_Free(str); - return NULL; - } - tok->filename = Py_NewRef(p->tok->filename); - tok->lineno = t->lineno + lines - 1; - - Parser *p2 = _PyPegen_Parser_New(tok, Py_fstring_input, p->flags, p->feature_version, - NULL, p->arena); - - p2->starting_lineno = t->lineno + lines; - p2->starting_col_offset = lines != 0 ? cols : t->col_offset + cols; - - expr = _PyPegen_run_parser(p2); - - if (expr == NULL) { - goto exit; - } - result = expr; - -exit: - PyMem_Free(str); - _PyPegen_Parser_Free(p2); - _PyTokenizer_Free(tok); - return result; -} - -/* Return -1 on error. - - Return 0 if we reached the end of the literal. - - Return 1 if we haven't reached the end of the literal, but we want - the caller to process the literal up to this point. Used for - doubled braces. -*/ -static int -fstring_find_literal(Parser *p, const char **str, const char *end, int raw, - PyObject **literal, int recurse_lvl, Token *t) -{ - /* Get any literal string. It ends when we hit an un-doubled left - brace (which isn't part of a unicode name escape such as - "\N{EULER CONSTANT}"), or the end of the string. */ - - const char *s = *str; - const char *literal_start = s; - int result = 0; - - assert(*literal == NULL); - while (s < end) { - char ch = *s++; - if (!raw && ch == '\\' && s < end) { - ch = *s++; - if (ch == 'N') { - /* We need to look at and skip matching braces for "\N{name}" - sequences because otherwise we'll think the opening '{' - starts an expression, which is not the case with "\N". - Keep looking for either a matched '{' '}' pair, or the end - of the string. */ - - if (s < end && *s++ == '{') { - while (s < end && *s++ != '}') { - } - continue; - } - - /* This is an invalid "\N" sequence, since it's a "\N" not - followed by a "{". Just keep parsing this literal. This - error will be caught later by - decode_unicode_with_escapes(). */ - continue; - } - if (ch == '{' && warn_invalid_escape_sequence(p, s-1, t) < 0) { - return -1; - } - } - if (ch == '{' || ch == '}') { - /* Check for doubled braces, but only at the top level. If - we checked at every level, then f'{0:{3}}' would fail - with the two closing braces. */ - if (recurse_lvl == 0) { - if (s < end && *s == ch) { - /* We're going to tell the caller that the literal ends - here, but that they should continue scanning. But also - skip over the second brace when we resume scanning. */ - *str = s + 1; - result = 1; - goto done; - } - - /* Where a single '{' is the start of a new expression, a - single '}' is not allowed. */ - if (ch == '}') { - *str = s - 1; - RAISE_SYNTAX_ERROR("f-string: single '}' is not allowed"); - return -1; - } - } - /* We're either at a '{', which means we're starting another - expression; or a '}', which means we're at the end of this - f-string (for a nested format_spec). */ - s--; - break; - } - } - *str = s; - assert(s <= end); - assert(s == end || *s == '{' || *s == '}'); -done: - if (literal_start != s) { - if (raw) { - *literal = PyUnicode_DecodeUTF8Stateful(literal_start, - s - literal_start, - NULL, NULL); - } - else { - *literal = decode_unicode_with_escapes(p, literal_start, - s - literal_start, t); - } - if (!*literal) { - return -1; - } - } - return result; -} - -/* Forward declaration because parsing is recursive. */ -static expr_ty -fstring_parse(Parser *p, const char **str, const char *end, int raw, int recurse_lvl, - Token *first_token, Token* t, Token *last_token); - -/* Parse the f-string at *str, ending at end. We know *str starts an - expression (so it must be a '{'). Returns the FormattedValue node, which - includes the expression, conversion character, format_spec expression, and - optionally the text of the expression (if = is used). - - Note that I don't do a perfect job here: I don't make sure that a - closing brace doesn't match an opening paren, for example. It - doesn't need to error on all invalid expressions, just correctly - find the end of all valid ones. Any errors inside the expression - will be caught when we parse it later. - - *expression is set to the expression. For an '=' "debug" expression, - *expr_text is set to the debug text (the original text of the expression, - including the '=' and any whitespace around it, as a string object). If - not a debug expression, *expr_text set to NULL. */ -static int -fstring_find_expr(Parser *p, const char **str, const char *end, int raw, int recurse_lvl, - PyObject **expr_text, expr_ty *expression, Token *first_token, - Token *t, Token *last_token) -{ - /* Return -1 on error, else 0. */ - - const char *expr_start; - const char *expr_end; - expr_ty simple_expression; - expr_ty format_spec = NULL; /* Optional format specifier. */ - int conversion = -1; /* The conversion char. Use default if not - specified, or !r if using = and no format - spec. */ - - /* 0 if we're not in a string, else the quote char we're trying to - match (single or double quote). */ - char quote_char = 0; - - /* If we're inside a string, 1=normal, 3=triple-quoted. */ - int string_type = 0; - - /* Keep track of nesting level for braces/parens/brackets in - expressions. */ - Py_ssize_t nested_depth = 0; - char parenstack[MAXLEVEL]; - - *expr_text = NULL; - - /* Can only nest one level deep. */ - if (recurse_lvl >= 2) { - RAISE_SYNTAX_ERROR("f-string: expressions nested too deeply"); - goto error; - } - - /* The first char must be a left brace, or we wouldn't have gotten - here. Skip over it. */ - assert(**str == '{'); - *str += 1; - - expr_start = *str; - for (; *str < end; (*str)++) { - char ch; - - /* Loop invariants. */ - assert(nested_depth >= 0); - assert(*str >= expr_start && *str < end); - if (quote_char) { - assert(string_type == 1 || string_type == 3); - } else { - assert(string_type == 0); - } - - ch = **str; - /* Nowhere inside an expression is a backslash allowed. */ - if (ch == '\\') { - /* Error: can't include a backslash character, inside - parens or strings or not. */ - RAISE_SYNTAX_ERROR( - "f-string expression part " - "cannot include a backslash"); - goto error; - } - if (quote_char) { - /* We're inside a string. See if we're at the end. */ - /* This code needs to implement the same non-error logic - as tok_get from tokenizer.c, at the letter_quote - label. To actually share that code would be a - nightmare. But, it's unlikely to change and is small, - so duplicate it here. Note we don't need to catch all - of the errors, since they'll be caught when parsing the - expression. We just need to match the non-error - cases. Thus we can ignore \n in single-quoted strings, - for example. Or non-terminated strings. */ - if (ch == quote_char) { - /* Does this match the string_type (single or triple - quoted)? */ - if (string_type == 3) { - if (*str+2 < end && *(*str+1) == ch && *(*str+2) == ch) { - /* We're at the end of a triple quoted string. */ - *str += 2; - string_type = 0; - quote_char = 0; - continue; - } - } else { - /* We're at the end of a normal string. */ - quote_char = 0; - string_type = 0; - continue; - } - } - } else if (ch == '\'' || ch == '"') { - /* Is this a triple quoted string? */ - if (*str+2 < end && *(*str+1) == ch && *(*str+2) == ch) { - string_type = 3; - *str += 2; - } else { - /* Start of a normal string. */ - string_type = 1; - } - /* Start looking for the end of the string. */ - quote_char = ch; - } else if (ch == '[' || ch == '{' || ch == '(') { - if (nested_depth >= MAXLEVEL) { - RAISE_SYNTAX_ERROR("f-string: too many nested parenthesis"); - goto error; - } - parenstack[nested_depth] = ch; - nested_depth++; - } else if (ch == '#') { - /* Error: can't include a comment character, inside parens - or not. */ - RAISE_SYNTAX_ERROR("f-string expression part cannot include '#'"); - goto error; - } else if (nested_depth == 0 && - (ch == '!' || ch == ':' || ch == '}' || - ch == '=' || ch == '>' || ch == '<')) { - /* See if there's a next character. */ - if (*str+1 < end) { - char next = *(*str+1); - - /* For "!=". since '=' is not an allowed conversion character, - nothing is lost in this test. */ - if ((ch == '!' && next == '=') || /* != */ - (ch == '=' && next == '=') || /* == */ - (ch == '<' && next == '=') || /* <= */ - (ch == '>' && next == '=') /* >= */ - ) { - *str += 1; - continue; - } - } - /* Don't get out of the loop for these, if they're single - chars (not part of 2-char tokens). If by themselves, they - don't end an expression (unlike say '!'). */ - if (ch == '>' || ch == '<') { - continue; - } - - /* Normal way out of this loop. */ - break; - } else if (ch == ']' || ch == '}' || ch == ')') { - if (!nested_depth) { - RAISE_SYNTAX_ERROR("f-string: unmatched '%c'", ch); - goto error; - } - nested_depth--; - int opening = (unsigned char)parenstack[nested_depth]; - if (!((opening == '(' && ch == ')') || - (opening == '[' && ch == ']') || - (opening == '{' && ch == '}'))) - { - RAISE_SYNTAX_ERROR( - "f-string: closing parenthesis '%c' " - "does not match opening parenthesis '%c'", - ch, opening); - goto error; - } - } else { - /* Just consume this char and loop around. */ - } - } - expr_end = *str; - /* If we leave the above loop in a string or with mismatched parens, we - don't really care. We'll get a syntax error when compiling the - expression. But, we can produce a better error message, so let's just - do that.*/ - if (quote_char) { - RAISE_SYNTAX_ERROR("f-string: unterminated string"); - goto error; - } - if (nested_depth) { - int opening = (unsigned char)parenstack[nested_depth - 1]; - RAISE_SYNTAX_ERROR("f-string: unmatched '%c'", opening); - goto error; - } - - if (*str >= end) { - goto unexpected_end_of_string; - } - - /* Compile the expression as soon as possible, so we show errors - related to the expression before errors related to the - conversion or format_spec. */ - simple_expression = fstring_compile_expr(p, expr_start, expr_end, t); - if (!simple_expression) { - goto error; - } - - /* Check for =, which puts the text value of the expression in - expr_text. */ - if (**str == '=') { - if (p->feature_version < 8) { - RAISE_SYNTAX_ERROR("f-string: self documenting expressions are " - "only supported in Python 3.8 and greater"); - goto error; - } - *str += 1; - - /* Skip over ASCII whitespace. No need to test for end of string - here, since we know there's at least a trailing quote somewhere - ahead. */ - while (Py_ISSPACE(**str)) { - *str += 1; - } - if (*str >= end) { - goto unexpected_end_of_string; - } - /* Set *expr_text to the text of the expression. */ - *expr_text = PyUnicode_FromStringAndSize(expr_start, *str-expr_start); - if (!*expr_text) { - goto error; - } - } - - /* Check for a conversion char, if present. */ - if (**str == '!') { - *str += 1; - const char *conv_start = *str; - while (1) { - if (*str >= end) { - goto unexpected_end_of_string; - } - if (**str == '}' || **str == ':') { - break; - } - *str += 1; - } - if (*str == conv_start) { - RAISE_SYNTAX_ERROR( - "f-string: missed conversion character"); - goto error; - } - - conversion = (unsigned char)*conv_start; - /* Validate the conversion. */ - if ((*str != conv_start + 1) || - !(conversion == 's' || conversion == 'r' || conversion == 'a')) - { - PyObject *conv_obj = PyUnicode_FromStringAndSize(conv_start, - *str-conv_start); - if (conv_obj) { - RAISE_SYNTAX_ERROR( - "f-string: invalid conversion character %R: " - "expected 's', 'r', or 'a'", - conv_obj); - Py_DECREF(conv_obj); - } - goto error; - } - - } - - /* Check for the format spec, if present. */ - assert(*str < end); - if (**str == ':') { - *str += 1; - if (*str >= end) { - goto unexpected_end_of_string; - } - - /* Parse the format spec. */ - format_spec = fstring_parse(p, str, end, raw, recurse_lvl+1, - first_token, t, last_token); - if (!format_spec) { - goto error; - } - } - - if (*str >= end || **str != '}') { - goto unexpected_end_of_string; - } - - /* We're at a right brace. Consume it. */ - assert(*str < end); - assert(**str == '}'); - *str += 1; - - /* If we're in = mode (detected by non-NULL expr_text), and have no format - spec and no explicit conversion, set the conversion to 'r'. */ - if (*expr_text && format_spec == NULL && conversion == -1) { - conversion = 'r'; - } - - /* And now create the FormattedValue node that represents this - entire expression with the conversion and format spec. */ - //TODO: Fix this - *expression = _PyAST_FormattedValue(simple_expression, conversion, - format_spec, first_token->lineno, - first_token->col_offset, - last_token->end_lineno, - last_token->end_col_offset, p->arena); - if (!*expression) { - goto error; - } - - return 0; - -unexpected_end_of_string: - RAISE_SYNTAX_ERROR("f-string: expecting '}'"); - /* Falls through to error. */ - -error: - Py_XDECREF(*expr_text); - return -1; - -} - -/* Return -1 on error. - - Return 0 if we have a literal (possible zero length) and an - expression (zero length if at the end of the string. - - Return 1 if we have a literal, but no expression, and we want the - caller to call us again. This is used to deal with doubled - braces. - - When called multiple times on the string 'a{{b{0}c', this function - will return: - - 1. the literal 'a{' with no expression, and a return value - of 1. Despite the fact that there's no expression, the return - value of 1 means we're not finished yet. - - 2. the literal 'b' and the expression '0', with a return value of - 0. The fact that there's an expression means we're not finished. - - 3. literal 'c' with no expression and a return value of 0. The - combination of the return value of 0 with no expression means - we're finished. -*/ -static int -fstring_find_literal_and_expr(Parser *p, const char **str, const char *end, int raw, - int recurse_lvl, PyObject **literal, - PyObject **expr_text, expr_ty *expression, - Token *first_token, Token *t, Token *last_token) -{ - int result; - - assert(*literal == NULL && *expression == NULL); - - /* Get any literal string. */ - result = fstring_find_literal(p, str, end, raw, literal, recurse_lvl, t); - if (result < 0) { - goto error; - } - - assert(result == 0 || result == 1); - - if (result == 1) { - /* We have a literal, but don't look at the expression. */ - return 1; - } - - if (*str >= end || **str == '}') { - /* We're at the end of the string or the end of a nested - f-string: no expression. The top-level error case where we - expect to be at the end of the string but we're at a '}' is - handled later. */ - return 0; - } - - /* We must now be the start of an expression, on a '{'. */ - assert(**str == '{'); - - if (fstring_find_expr(p, str, end, raw, recurse_lvl, expr_text, - expression, first_token, t, last_token) < 0) { - goto error; - } - - return 0; - -error: - Py_CLEAR(*literal); - return -1; -} - -#ifdef NDEBUG -#define ExprList_check_invariants(l) -#else -static void -ExprList_check_invariants(ExprList *l) -{ - /* Check our invariants. Make sure this object is "live", and - hasn't been deallocated. */ - assert(l->size >= 0); - assert(l->p != NULL); - if (l->size <= EXPRLIST_N_CACHED) { - assert(l->data == l->p); - } -} -#endif - -static void -ExprList_Init(ExprList *l) -{ - l->allocated = EXPRLIST_N_CACHED; - l->size = 0; - - /* Until we start allocating dynamically, p points to data. */ - l->p = l->data; - - ExprList_check_invariants(l); -} - -static int -ExprList_Append(ExprList *l, expr_ty exp) -{ - ExprList_check_invariants(l); - if (l->size >= l->allocated) { - /* We need to alloc (or realloc) the memory. */ - Py_ssize_t new_size = l->allocated * 2; - - /* See if we've ever allocated anything dynamically. */ - if (l->p == l->data) { - Py_ssize_t i; - /* We're still using the cached data. Switch to - alloc-ing. */ - l->p = PyMem_Malloc(sizeof(expr_ty) * new_size); - if (!l->p) { - return -1; - } - /* Copy the cached data into the new buffer. */ - for (i = 0; i < l->size; i++) { - l->p[i] = l->data[i]; - } - } else { - /* Just realloc. */ - expr_ty *tmp = PyMem_Realloc(l->p, sizeof(expr_ty) * new_size); - if (!tmp) { - PyMem_Free(l->p); - l->p = NULL; - return -1; - } - l->p = tmp; - } - - l->allocated = new_size; - assert(l->allocated == 2 * l->size); - } - - l->p[l->size++] = exp; - - ExprList_check_invariants(l); - return 0; -} - -static void -ExprList_Dealloc(ExprList *l) -{ - ExprList_check_invariants(l); - - /* If there's been an error, or we've never dynamically allocated, - do nothing. */ - if (!l->p || l->p == l->data) { - /* Do nothing. */ - } else { - /* We have dynamically allocated. Free the memory. */ - PyMem_Free(l->p); - } - l->p = NULL; - l->size = -1; -} - -static asdl_expr_seq * -ExprList_Finish(ExprList *l, PyArena *arena) -{ - asdl_expr_seq *seq; - - ExprList_check_invariants(l); - - /* Allocate the asdl_seq and copy the expressions in to it. */ - seq = _Py_asdl_expr_seq_new(l->size, arena); - if (seq) { - Py_ssize_t i; - for (i = 0; i < l->size; i++) { - asdl_seq_SET(seq, i, l->p[i]); - } - } - ExprList_Dealloc(l); - return seq; -} - -#ifdef NDEBUG -#define FstringParser_check_invariants(state) -#else -static void -FstringParser_check_invariants(FstringParser *state) -{ - if (state->last_str) { - assert(PyUnicode_CheckExact(state->last_str)); - } - ExprList_check_invariants(&state->expr_list); -} -#endif - -void -_PyPegen_FstringParser_Init(FstringParser *state) -{ - state->last_str = NULL; - state->fmode = 0; - ExprList_Init(&state->expr_list); - FstringParser_check_invariants(state); -} - -void -_PyPegen_FstringParser_Dealloc(FstringParser *state) -{ - FstringParser_check_invariants(state); - - Py_XDECREF(state->last_str); - ExprList_Dealloc(&state->expr_list); -} - -/* Make a Constant node, but decref the PyUnicode object being added. */ -static expr_ty -make_str_node_and_del(Parser *p, PyObject **str, Token* first_token, Token *last_token) -{ - PyObject *s = *str; - PyObject *kind = NULL; - *str = NULL; - assert(PyUnicode_CheckExact(s)); - if (_PyArena_AddPyObject(p->arena, s) < 0) { - Py_DECREF(s); - return NULL; - } - const char* the_str = PyBytes_AsString(first_token->bytes); - if (the_str && the_str[0] == 'u') { - kind = _PyPegen_new_identifier(p, "u"); - } - - if (kind == NULL && PyErr_Occurred()) { - return NULL; - } - - return _PyAST_Constant(s, kind, first_token->lineno, first_token->col_offset, - last_token->end_lineno, last_token->end_col_offset, - p->arena); - -} - - -/* Add a non-f-string (that is, a regular literal string). str is - decref'd. */ -int -_PyPegen_FstringParser_ConcatAndDel(FstringParser *state, PyObject *str) -{ - FstringParser_check_invariants(state); - - assert(PyUnicode_CheckExact(str)); - - if (PyUnicode_GET_LENGTH(str) == 0) { - Py_DECREF(str); - return 0; - } - - if (!state->last_str) { - /* We didn't have a string before, so just remember this one. */ - state->last_str = str; - } else { - /* Concatenate this with the previous string. */ - PyUnicode_AppendAndDel(&state->last_str, str); - if (!state->last_str) { - return -1; - } - } - FstringParser_check_invariants(state); - return 0; -} - -/* Parse an f-string. The f-string is in *str to end, with no - 'f' or quotes. */ -int -_PyPegen_FstringParser_ConcatFstring(Parser *p, FstringParser *state, const char **str, - const char *end, int raw, int recurse_lvl, - Token *first_token, Token* t, Token *last_token) -{ - FstringParser_check_invariants(state); - state->fmode = 1; - - /* Parse the f-string. */ - while (1) { - PyObject *literal = NULL; - PyObject *expr_text = NULL; - expr_ty expression = NULL; - - /* If there's a zero length literal in front of the - expression, literal will be NULL. If we're at the end of - the f-string, expression will be NULL (unless result == 1, - see below). */ - int result = fstring_find_literal_and_expr(p, str, end, raw, recurse_lvl, - &literal, &expr_text, - &expression, first_token, t, last_token); - if (result < 0) { - return -1; - } - - /* Add the literal, if any. */ - if (literal && _PyPegen_FstringParser_ConcatAndDel(state, literal) < 0) { - Py_XDECREF(expr_text); - return -1; - } - /* Add the expr_text, if any. */ - if (expr_text && _PyPegen_FstringParser_ConcatAndDel(state, expr_text) < 0) { - return -1; - } - - /* We've dealt with the literal and expr_text, their ownership has - been transferred to the state object. Don't look at them again. */ - - /* See if we should just loop around to get the next literal - and expression, while ignoring the expression this - time. This is used for un-doubling braces, as an - optimization. */ - if (result == 1) { - continue; - } - - if (!expression) { - /* We're done with this f-string. */ - break; - } - - /* We know we have an expression. Convert any existing string - to a Constant node. */ - if (state->last_str) { - /* Convert the existing last_str literal to a Constant node. */ - expr_ty last_str = make_str_node_and_del(p, &state->last_str, first_token, last_token); - if (!last_str || ExprList_Append(&state->expr_list, last_str) < 0) { - return -1; - } - } - - if (ExprList_Append(&state->expr_list, expression) < 0) { - return -1; - } - } - - /* If recurse_lvl is zero, then we must be at the end of the - string. Otherwise, we must be at a right brace. */ - - if (recurse_lvl == 0 && *str < end-1) { - RAISE_SYNTAX_ERROR("f-string: unexpected end of string"); - return -1; - } - if (recurse_lvl != 0 && **str != '}') { - RAISE_SYNTAX_ERROR("f-string: expecting '}'"); - return -1; - } - - FstringParser_check_invariants(state); - return 0; -} - -/* Convert the partial state reflected in last_str and expr_list to an - expr_ty. The expr_ty can be a Constant, or a JoinedStr. */ -expr_ty -_PyPegen_FstringParser_Finish(Parser *p, FstringParser *state, Token* first_token, - Token *last_token) -{ - asdl_expr_seq *seq; - - FstringParser_check_invariants(state); - - /* If we're just a constant string with no expressions, return - that. */ - if (!state->fmode) { - assert(!state->expr_list.size); - if (!state->last_str) { - /* Create a zero length string. */ - state->last_str = PyUnicode_FromStringAndSize(NULL, 0); - if (!state->last_str) { - goto error; + return NULL; } } - return make_str_node_and_del(p, &state->last_str, first_token, last_token); - } - - /* Create a Constant node out of last_str, if needed. It will be the - last node in our expression list. */ - if (state->last_str) { - expr_ty str = make_str_node_and_del(p, &state->last_str, first_token, last_token); - if (!str || ExprList_Append(&state->expr_list, str) < 0) { - goto error; + if (rawmode) { + return PyBytes_FromStringAndSize(s, len); } + return decode_bytes_with_escapes(p, s, len, t); } - /* This has already been freed. */ - assert(state->last_str == NULL); - - seq = ExprList_Finish(&state->expr_list, p->arena); - if (!seq) { - goto error; - } - - return _PyAST_JoinedStr(seq, first_token->lineno, first_token->col_offset, - last_token->end_lineno, last_token->end_col_offset, - p->arena); - -error: - _PyPegen_FstringParser_Dealloc(state); - return NULL; -} - -/* Given an f-string (with no 'f' or quotes) that's in *str and ends - at end, parse it into an expr_ty. Return NULL on error. Adjust - str to point past the parsed portion. */ -static expr_ty -fstring_parse(Parser *p, const char **str, const char *end, int raw, - int recurse_lvl, Token *first_token, Token* t, Token *last_token) -{ - FstringParser state; - - _PyPegen_FstringParser_Init(&state); - if (_PyPegen_FstringParser_ConcatFstring(p, &state, str, end, raw, recurse_lvl, - first_token, t, last_token) < 0) { - _PyPegen_FstringParser_Dealloc(&state); - return NULL; - } - - return _PyPegen_FstringParser_Finish(p, &state, t, t); + return _PyPegen_decode_string(p, rawmode, s, len, t); } diff --git a/Parser/string_parser.h b/Parser/string_parser.h index 4a22f3d3086f47..0b34de1b4e41e9 100644 --- a/Parser/string_parser.h +++ b/Parser/string_parser.h @@ -5,42 +5,7 @@ #include <pycore_ast.h> #include "pegen.h" -#define EXPRLIST_N_CACHED 64 - -typedef struct { - /* Incrementally build an array of expr_ty, so be used in an - asdl_seq. Cache some small but reasonably sized number of - expr_ty's, and then after that start dynamically allocating, - doubling the number allocated each time. Note that the f-string - f'{0}a{1}' contains 3 expr_ty's: 2 FormattedValue's, and one - Constant for the literal 'a'. So you add expr_ty's about twice as - fast as you add expressions in an f-string. */ - - Py_ssize_t allocated; /* Number we've allocated. */ - Py_ssize_t size; /* Number we've used. */ - expr_ty *p; /* Pointer to the memory we're actually - using. Will point to 'data' until we - start dynamically allocating. */ - expr_ty data[EXPRLIST_N_CACHED]; -} ExprList; - -/* The FstringParser is designed to add a mix of strings and - f-strings, and concat them together as needed. Ultimately, it - generates an expr_ty. */ -typedef struct { - PyObject *last_str; - ExprList expr_list; - int fmode; -} FstringParser; - -void _PyPegen_FstringParser_Init(FstringParser *); -int _PyPegen_parsestr(Parser *, int *, int *, PyObject **, - const char **, Py_ssize_t *, Token *); -int _PyPegen_FstringParser_ConcatFstring(Parser *, FstringParser *, const char **, - const char *, int, int, Token *, Token *, - Token *); -int _PyPegen_FstringParser_ConcatAndDel(FstringParser *, PyObject *); -expr_ty _PyPegen_FstringParser_Finish(Parser *, FstringParser *, Token *, Token *); -void _PyPegen_FstringParser_Dealloc(FstringParser *); +PyObject *_PyPegen_parse_string(Parser *, Token *); +PyObject *_PyPegen_decode_string(Parser *, int, const char *, size_t, Token *); #endif diff --git a/Parser/token.c b/Parser/token.c index 6299ad2f563144..82267fbfcd0c54 100644 --- a/Parser/token.c +++ b/Parser/token.c @@ -60,12 +60,16 @@ const char * const _PyParser_TokenNames[] = { "RARROW", "ELLIPSIS", "COLONEQUAL", + "EXCLAMATION", "OP", "AWAIT", "ASYNC", "TYPE_IGNORE", "TYPE_COMMENT", "SOFT_KEYWORD", + "FSTRING_START", + "FSTRING_MIDDLE", + "FSTRING_END", "<ERRORTOKEN>", "<COMMENT>", "<NL>", @@ -79,6 +83,7 @@ int _PyToken_OneChar(int c1) { switch (c1) { + case '!': return EXCLAMATION; case '%': return PERCENT; case '&': return AMPER; case '(': return LPAR; diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index 463c0e00ca1411..d2f9fee110ebf5 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -11,11 +11,6 @@ #include "tokenizer.h" #include "errcode.h" -#include "unicodeobject.h" -#include "bytesobject.h" -#include "fileobject.h" -#include "abstract.h" - /* Alternate tab spacing */ #define ALTTABSIZE 1 @@ -43,6 +38,24 @@ tok->lineno++; \ tok->col_offset = 0; +#define INSIDE_FSTRING(tok) (tok->tok_mode_stack_index > 0) +#define INSIDE_FSTRING_EXPR(tok) (tok->curly_bracket_expr_start_depth >= 0) +#ifdef Py_DEBUG +static inline tokenizer_mode* TOK_GET_MODE(struct tok_state* tok) { + assert(tok->tok_mode_stack_index >= 0); + assert(tok->tok_mode_stack_index < MAXFSTRINGLEVEL); + return &(tok->tok_mode_stack[tok->tok_mode_stack_index]); +} +static inline tokenizer_mode* TOK_NEXT_MODE(struct tok_state* tok) { + assert(tok->tok_mode_stack_index >= 0); + assert(tok->tok_mode_stack_index + 1 < MAXFSTRINGLEVEL); + return &(tok->tok_mode_stack[++tok->tok_mode_stack_index]); +} +#else +#define TOK_GET_MODE(tok) (&(tok->tok_mode_stack[tok->tok_mode_stack_index])) +#define TOK_NEXT_MODE(tok) (&(tok->tok_mode_stack[++tok->tok_mode_stack_index])) +#endif + /* Forward */ static struct tok_state *tok_new(void); static int tok_nextc(struct tok_state *tok); @@ -98,6 +111,9 @@ tok_new(void) tok->interactive_underflow = IUNDERFLOW_NORMAL; tok->str = NULL; tok->report_warnings = 1; + tok->tok_mode_stack[0] = (tokenizer_mode){.kind =TOK_REGULAR_MODE, .f_string_quote='\0', .f_string_quote_size = 0, .f_string_debug=0}; + tok->tok_mode_stack_index = 0; + tok->tok_report_warnings = 1; #ifdef Py_DEBUG tok->debug = _Py_GetConfig()->parser_debug; #endif @@ -345,6 +361,126 @@ tok_concatenate_interactive_new_line(struct tok_state *tok, const char *line) { return 0; } +/* Traverse and remember all f-string buffers, in order to be able to restore + them after reallocating tok->buf */ +static void +remember_fstring_buffers(struct tok_state *tok) +{ + int index; + tokenizer_mode *mode; + + for (index = tok->tok_mode_stack_index; index >= 0; --index) { + mode = &(tok->tok_mode_stack[index]); + mode->f_string_start_offset = mode->f_string_start - tok->buf; + mode->f_string_multi_line_start_offset = mode->f_string_multi_line_start - tok->buf; + } +} + +/* Traverse and restore all f-string buffers after reallocating tok->buf */ +static void +restore_fstring_buffers(struct tok_state *tok) +{ + int index; + tokenizer_mode *mode; + + for (index = tok->tok_mode_stack_index; index >= 0; --index) { + mode = &(tok->tok_mode_stack[index]); + mode->f_string_start = tok->buf + mode->f_string_start_offset; + mode->f_string_multi_line_start = tok->buf + mode->f_string_multi_line_start_offset; + } +} + +static int +set_fstring_expr(struct tok_state* tok, struct token *token, char c) { + assert(token != NULL); + assert(c == '}' || c == ':' || c == '!'); + tokenizer_mode *tok_mode = TOK_GET_MODE(tok); + + if (!tok_mode->f_string_debug || token->metadata) { + return 0; + } + + PyObject *res = PyUnicode_DecodeUTF8( + tok_mode->last_expr_buffer, + tok_mode->last_expr_size - tok_mode->last_expr_end, + NULL + ); + if (!res) { + return -1; + } + token->metadata = res; + return 0; +} + +static int +update_fstring_expr(struct tok_state *tok, char cur) +{ + assert(tok->cur != NULL); + + Py_ssize_t size = strlen(tok->cur); + tokenizer_mode *tok_mode = TOK_GET_MODE(tok); + + switch (cur) { + case 0: + if (!tok_mode->last_expr_buffer || tok_mode->last_expr_end >= 0) { + return 1; + } + char *new_buffer = PyMem_Realloc( + tok_mode->last_expr_buffer, + tok_mode->last_expr_size + size + ); + if (new_buffer == NULL) { + PyMem_Free(tok_mode->last_expr_buffer); + goto error; + } + tok_mode->last_expr_buffer = new_buffer; + strncpy(tok_mode->last_expr_buffer + tok_mode->last_expr_size, tok->cur, size); + tok_mode->last_expr_size += size; + break; + case '{': + if (tok_mode->last_expr_buffer != NULL) { + PyMem_Free(tok_mode->last_expr_buffer); + } + tok_mode->last_expr_buffer = PyMem_Malloc(size); + if (tok_mode->last_expr_buffer == NULL) { + goto error; + } + tok_mode->last_expr_size = size; + tok_mode->last_expr_end = -1; + strncpy(tok_mode->last_expr_buffer, tok->cur, size); + break; + case '}': + case '!': + case ':': + if (tok_mode->last_expr_end == -1) { + tok_mode->last_expr_end = strlen(tok->start); + } + break; + default: + Py_UNREACHABLE(); + } + return 1; +error: + tok->done = E_NOMEM; + return 0; +} + +static void +free_fstring_expressions(struct tok_state *tok) +{ + int index; + tokenizer_mode *mode; + + for (index = tok->tok_mode_stack_index; index >= 0; --index) { + mode = &(tok->tok_mode_stack[index]); + if (mode->last_expr_buffer != NULL) { + PyMem_Free(mode->last_expr_buffer); + mode->last_expr_buffer = NULL; + mode->last_expr_size = 0; + mode->last_expr_end = -1; + } + } +} /* Read a line of text from TOK into S, using the stream in TOK. Return NULL on failure, else S. @@ -372,6 +508,7 @@ tok_reserve_buf(struct tok_state *tok, Py_ssize_t size) Py_ssize_t start = tok->start == NULL ? -1 : tok->start - tok->buf; Py_ssize_t line_start = tok->start == NULL ? -1 : tok->line_start - tok->buf; Py_ssize_t multi_line_start = tok->multi_line_start - tok->buf; + remember_fstring_buffers(tok); newbuf = (char *)PyMem_Realloc(newbuf, newsize); if (newbuf == NULL) { tok->done = E_NOMEM; @@ -384,6 +521,7 @@ tok_reserve_buf(struct tok_state *tok, Py_ssize_t size) tok->start = start < 0 ? NULL : tok->buf + start; tok->line_start = line_start < 0 ? NULL : tok->buf + line_start; tok->multi_line_start = multi_line_start < 0 ? NULL : tok->buf + multi_line_start; + restore_fstring_buffers(tok); } return 1; } @@ -838,6 +976,7 @@ _PyTokenizer_Free(struct tok_state *tok) if (tok->interactive_src_start != NULL) { PyMem_Free(tok->interactive_src_start); } + free_fstring_expressions(tok); PyMem_Free(tok); } @@ -854,6 +993,9 @@ tok_readline_raw(struct tok_state *tok) if (line == NULL) { return 1; } + if (tok->tok_mode_stack_index && !update_fstring_expr(tok, 0)) { + return 0; + } if (tok->fp_interactive && tok_concatenate_interactive_new_line(tok, line) == -1) { return 0; @@ -941,6 +1083,7 @@ tok_underflow_interactive(struct tok_state *tok) { } else if (tok->start != NULL) { Py_ssize_t cur_multi_line_start = tok->multi_line_start - tok->buf; + remember_fstring_buffers(tok); size_t size = strlen(newtok); ADVANCE_LINENO(); if (!tok_reserve_buf(tok, size + 1)) { @@ -953,8 +1096,10 @@ tok_underflow_interactive(struct tok_state *tok) { PyMem_Free(newtok); tok->inp += size; tok->multi_line_start = tok->buf + cur_multi_line_start; + restore_fstring_buffers(tok); } else { + remember_fstring_buffers(tok); ADVANCE_LINENO(); PyMem_Free(tok->buf); tok->buf = newtok; @@ -962,6 +1107,7 @@ tok_underflow_interactive(struct tok_state *tok) { tok->line_start = tok->buf; tok->inp = strchr(tok->buf, '\0'); tok->end = tok->inp + 1; + restore_fstring_buffers(tok); } if (tok->done != E_OK) { if (tok->prompt != NULL) { @@ -969,6 +1115,10 @@ tok_underflow_interactive(struct tok_state *tok) { } return 0; } + + if (tok->tok_mode_stack_index && !update_fstring_expr(tok, 0)) { + return 0; + } return 1; } @@ -1073,7 +1223,7 @@ tok_nextc(struct tok_state *tok) return Py_CHARMASK(*tok->cur++); /* Fast path */ } if (tok->done != E_OK) { - return EOF; + return EOF; } if (tok->fp == NULL) { rc = tok_underflow_string(tok); @@ -1115,7 +1265,7 @@ tok_backup(struct tok_state *tok, int c) if (--tok->cur < tok->buf) { Py_FatalError("tokenizer beginning of buffer"); } - if ((int)(unsigned char)*tok->cur != c) { + if ((int)(unsigned char)*tok->cur != Py_CHARMASK(c)) { Py_FatalError("tok_backup: wrong character"); } tok->col_offset--; @@ -1172,6 +1322,7 @@ _syntaxerror_range(struct tok_state *tok, const char *format, static int syntaxerror(struct tok_state *tok, const char *format, ...) { + // This errors are cleaned on startup. Todo: Fix it. va_list vargs; va_start(vargs, format); int ret = _syntaxerror_range(tok, format, -1, -1, vargs); @@ -1234,6 +1385,41 @@ parser_warn(struct tok_state *tok, PyObject *category, const char *format, ...) return -1; } +static int +warn_invalid_escape_sequence(struct tok_state *tok, int first_invalid_escape_char) +{ + + if (!tok->tok_report_warnings) { + return 0; + } + + PyObject *msg = PyUnicode_FromFormat( + "invalid escape sequence '\\%c'", + (char) first_invalid_escape_char + ); + + if (msg == NULL) { + return -1; + } + + if (PyErr_WarnExplicitObject(PyExc_DeprecationWarning, msg, tok->filename, + tok->lineno, NULL, NULL) < 0) { + Py_DECREF(msg); + + if (PyErr_ExceptionMatches(PyExc_DeprecationWarning)) { + /* Replace the DeprecationWarning exception with a SyntaxError + to get a more accurate error report */ + PyErr_Clear(); + return syntaxerror(tok, "invalid escape sequence '\\%c'", (char) first_invalid_escape_char); + } + + return -1; + } + + Py_DECREF(msg); + return 0; +} + static int lookahead(struct tok_state *tok, const char *test) { @@ -1389,7 +1575,6 @@ tok_decimal_tail(struct tok_state *tok) return c; } -/* Get next token, after space stripping etc. */ static inline int tok_continuation_line(struct tok_state *tok) { @@ -1427,7 +1612,12 @@ token_setup(struct tok_state *tok, struct token *token, int type, const char *st { assert((start == NULL && end == NULL) || (start != NULL && end != NULL)); token->level = tok->level; - token->lineno = type == STRING ? tok->first_lineno : tok->lineno; + if (ISSTRINGLIT(type)) { + token->lineno = tok->first_lineno; + } + else { + token->lineno = tok->lineno; + } token->end_lineno = tok->lineno; token->col_offset = token->end_col_offset = -1; token->start = start; @@ -1441,7 +1631,7 @@ token_setup(struct tok_state *tok, struct token *token, int type, const char *st } static int -tok_get(struct tok_state *tok, struct token *token) +tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct token *token) { int c; int blankline, nonascii; @@ -1602,6 +1792,11 @@ tok_get(struct tok_state *tok, struct token *token) /* Skip comment, unless it's a type comment */ if (c == '#') { + + if (INSIDE_FSTRING(tok)) { + return MAKE_TOKEN(syntaxerror(tok, "f-string expression part cannot include '#'")); + } + const char *prefix, *p, *type_start; int current_starting_col_offset; @@ -1703,6 +1898,9 @@ tok_get(struct tok_state *tok, struct token *token) } c = tok_nextc(tok); if (c == '"' || c == '\'') { + if (saw_f) { + goto f_string_quote; + } goto letter_quote; } } @@ -1748,7 +1946,9 @@ tok_get(struct tok_state *tok, struct token *token) int ahead_tok_kind; memcpy(&ahead_tok, tok, sizeof(ahead_tok)); - ahead_tok_kind = tok_get(&ahead_tok, &ahead_token); + ahead_tok_kind = tok_get_normal_mode(&ahead_tok, + current_tok, + &ahead_token); if (ahead_tok_kind == NAME && ahead_tok.cur - ahead_tok.start == 3 @@ -2003,6 +2203,72 @@ tok_get(struct tok_state *tok, struct token *token) return MAKE_TOKEN(NUMBER); } + f_string_quote: + if (((tolower(*tok->start) == 'f' || tolower(*tok->start) == 'r') && (c == '\'' || c == '"'))) { + int quote = c; + int quote_size = 1; /* 1 or 3 */ + + /* Nodes of type STRING, especially multi line strings + must be handled differently in order to get both + the starting line number and the column offset right. + (cf. issue 16806) */ + tok->first_lineno = tok->lineno; + tok->multi_line_start = tok->line_start; + + /* Find the quote size and start of string */ + int after_quote = tok_nextc(tok); + if (after_quote == quote) { + int after_after_quote = tok_nextc(tok); + if (after_after_quote == quote) { + quote_size = 3; + } + else { + // TODO: Check this + tok_backup(tok, after_after_quote); + tok_backup(tok, after_quote); + } + } + if (after_quote != quote) { + tok_backup(tok, after_quote); + } + + + p_start = tok->start; + p_end = tok->cur; + if (tok->tok_mode_stack_index + 1 >= MAXFSTRINGLEVEL) { + return MAKE_TOKEN(syntaxerror(tok, "too many nested f-strings")); + } + tokenizer_mode *the_current_tok = TOK_NEXT_MODE(tok); + the_current_tok->kind = TOK_FSTRING_MODE; + the_current_tok->f_string_quote = quote; + the_current_tok->f_string_quote_size = quote_size; + the_current_tok->f_string_start = tok->start; + the_current_tok->f_string_multi_line_start = tok->line_start; + the_current_tok->f_string_start_offset = -1; + the_current_tok->f_string_multi_line_start_offset = -1; + the_current_tok->last_expr_buffer = NULL; + the_current_tok->last_expr_size = 0; + the_current_tok->last_expr_end = -1; + the_current_tok->f_string_debug = 0; + + switch (*tok->start) { + case 'F': + case 'f': + the_current_tok->f_string_raw = tolower(*(tok->start + 1)) == 'r'; + break; + case 'R': + case 'r': + the_current_tok->f_string_raw = 1; + break; + default: + Py_UNREACHABLE(); + } + + the_current_tok->curly_bracket_depth = 0; + the_current_tok->curly_bracket_expr_start_depth = -1; + return MAKE_TOKEN(FSTRING_START); + } + letter_quote: /* String */ if (c == '\'' || c == '"') { @@ -2047,6 +2313,20 @@ tok_get(struct tok_state *tok, struct token *token) tok->line_start = tok->multi_line_start; int start = tok->lineno; tok->lineno = tok->first_lineno; + + if (INSIDE_FSTRING(tok)) { + /* When we are in an f-string, before raising the + * unterminated string literal error, check whether + * does the initial quote matches with f-strings quotes + * and if it is, then this must be a missing '}' token + * so raise the proper error */ + tokenizer_mode *the_current_tok = TOK_GET_MODE(tok); + if (the_current_tok->f_string_quote == quote && + the_current_tok->f_string_quote_size == quote_size) { + return MAKE_TOKEN(syntaxerror(tok, "f-string: expecting '}'", start)); + } + } + if (quote_size == 3) { syntaxerror(tok, "unterminated triple-quoted string literal" " (detected at line %d)", start); @@ -2089,6 +2369,28 @@ tok_get(struct tok_state *tok, struct token *token) goto again; /* Read next line */ } + /* Punctuation character */ + int is_punctuation = (c == ':' || c == '}' || c == '!' || c == '{'); + if (is_punctuation && INSIDE_FSTRING(tok) && INSIDE_FSTRING_EXPR(current_tok)) { + /* This code block gets executed before the curly_bracket_depth is incremented + * by the `{` case, so for ensuring that we are on the 0th level, we need + * to adjust it manually */ + int cursor = current_tok->curly_bracket_depth - (c != '{'); + if (cursor == 0 && !update_fstring_expr(tok, c)) { + return MAKE_TOKEN(ENDMARKER); + } + if (cursor == 0 && c != '{' && set_fstring_expr(tok, token, c)) { + return MAKE_TOKEN(ERRORTOKEN); + } + + if (c == ':' && cursor == current_tok->curly_bracket_expr_start_depth) { + current_tok->kind = TOK_FSTRING_MODE; + p_start = tok->start; + p_end = tok->cur; + return MAKE_TOKEN(_PyToken_OneChar(c)); + } + } + /* Check for two-character token */ { int c2 = tok_nextc(tok); @@ -2121,11 +2423,17 @@ tok_get(struct tok_state *tok, struct token *token) tok->parenlinenostack[tok->level] = tok->lineno; tok->parencolstack[tok->level] = (int)(tok->start - tok->line_start); tok->level++; + if (INSIDE_FSTRING(tok)) { + current_tok->curly_bracket_depth++; + } break; case ')': case ']': case '}': if (!tok->level) { + if (INSIDE_FSTRING(tok) && !current_tok->curly_bracket_depth && c == '}') { + return MAKE_TOKEN(syntaxerror(tok, "f-string: single '}' is not allowed")); + } return MAKE_TOKEN(syntaxerror(tok, "unmatched '%c'", c)); } tok->level--; @@ -2134,6 +2442,18 @@ tok_get(struct tok_state *tok, struct token *token) (opening == '[' && c == ']') || (opening == '{' && c == '}'))) { + /* If the opening bracket belongs to an f-string's expression + part (e.g. f"{)}") and the closing bracket is an arbitrary + nested expression, then instead of matching a different + syntactical construct with it; we'll throw an unmatched + parentheses error. */ + if (INSIDE_FSTRING(tok) && opening == '{') { + assert(current_tok->curly_bracket_depth >= 0); + int previous_bracket = current_tok->curly_bracket_depth - 1; + if (previous_bracket == current_tok->curly_bracket_expr_start_depth) { + return MAKE_TOKEN(syntaxerror(tok, "f-string: unmatched '%c'", c)); + } + } if (tok->parenlinenostack[tok->level] != tok->lineno) { return MAKE_TOKEN(syntaxerror(tok, "closing parenthesis '%c' does not match " @@ -2147,6 +2467,17 @@ tok_get(struct tok_state *tok, struct token *token) c, opening)); } } + + if (INSIDE_FSTRING(tok)) { + current_tok->curly_bracket_depth--; + if (c == '}' && current_tok->curly_bracket_depth == current_tok->curly_bracket_expr_start_depth) { + current_tok->curly_bracket_expr_start_depth--; + current_tok->kind = TOK_FSTRING_MODE; + current_tok->f_string_debug = 0; + } + } + break; + default: break; } @@ -2156,12 +2487,205 @@ tok_get(struct tok_state *tok, struct token *token) return MAKE_TOKEN(syntaxerror(tok, "invalid non-printable character U+%s", hex)); } + if( c == '=' && INSIDE_FSTRING_EXPR(current_tok)) { + current_tok->f_string_debug = 1; + } + /* Punctuation character */ p_start = tok->start; p_end = tok->cur; return MAKE_TOKEN(_PyToken_OneChar(c)); } +static int +tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct token *token) +{ + const char *p_start = NULL; + const char *p_end = NULL; + int end_quote_size = 0; + int unicode_escape = 0; + + tok->start = tok->cur; + tok->first_lineno = tok->lineno; + tok->starting_col_offset = tok->col_offset; + + // If we start with a bracket, we defer to the normal mode as there is nothing for us to tokenize + // before it. + int start_char = tok_nextc(tok); + if (start_char == '{') { + int peek1 = tok_nextc(tok); + tok_backup(tok, peek1); + tok_backup(tok, start_char); + if (peek1 != '{') { + current_tok->curly_bracket_expr_start_depth++; + if (current_tok->curly_bracket_expr_start_depth >= MAX_EXPR_NESTING) { + return MAKE_TOKEN(syntaxerror(tok, "f-string: expressions nested too deeply")); + } + TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE; + return tok_get_normal_mode(tok, current_tok, token); + } + } + else { + tok_backup(tok, start_char); + } + + // Check if we are at the end of the string + for (int i = 0; i < current_tok->f_string_quote_size; i++) { + int quote = tok_nextc(tok); + if (quote != current_tok->f_string_quote) { + tok_backup(tok, quote); + goto f_string_middle; + } + } + + if (current_tok->last_expr_buffer != NULL) { + PyMem_Free(current_tok->last_expr_buffer); + current_tok->last_expr_buffer = NULL; + current_tok->last_expr_size = 0; + current_tok->last_expr_end = -1; + } + + p_start = tok->start; + p_end = tok->cur; + tok->tok_mode_stack_index--; + return MAKE_TOKEN(FSTRING_END); + +f_string_middle: + + while (end_quote_size != current_tok->f_string_quote_size) { + int c = tok_nextc(tok); + if (c == EOF || (current_tok->f_string_quote_size == 1 && c == '\n')) { + if (tok->decoding_erred) { + return MAKE_TOKEN(ERRORTOKEN); + } + + assert(tok->multi_line_start != NULL); + // shift the tok_state's location into + // the start of string, and report the error + // from the initial quote character + tok->cur = (char *)current_tok->f_string_start; + tok->cur++; + tok->line_start = current_tok->f_string_multi_line_start; + int start = tok->lineno; + tok->lineno = tok->first_lineno; + + if (current_tok->f_string_quote_size == 3) { + return MAKE_TOKEN(syntaxerror(tok, + "unterminated triple-quoted f-string literal" + " (detected at line %d)", start)); + } + else { + return MAKE_TOKEN(syntaxerror(tok, + "unterminated f-string literal (detected at" + " line %d)", start)); + } + } + + if (c == current_tok->f_string_quote) { + end_quote_size += 1; + continue; + } else { + end_quote_size = 0; + } + + int in_format_spec = ( + current_tok->last_expr_end != -1 + && + INSIDE_FSTRING_EXPR(current_tok) + ); + if (c == '{') { + int peek = tok_nextc(tok); + if (peek != '{' || in_format_spec) { + tok_backup(tok, peek); + tok_backup(tok, c); + current_tok->curly_bracket_expr_start_depth++; + if (current_tok->curly_bracket_expr_start_depth >= MAX_EXPR_NESTING) { + return MAKE_TOKEN(syntaxerror(tok, "f-string: expressions nested too deeply")); + } + TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE; + p_start = tok->start; + p_end = tok->cur; + } else { + p_start = tok->start; + p_end = tok->cur - 1; + } + return MAKE_TOKEN(FSTRING_MIDDLE); + } else if (c == '}') { + if (unicode_escape) { + p_start = tok->start; + p_end = tok->cur; + return MAKE_TOKEN(FSTRING_MIDDLE); + } + int peek = tok_nextc(tok); + + // The tokenizer can only be in the format spec if we have already completed the expression + // scanning (indicated by the end of the expression being set) and we are not at the top level + // of the bracket stack (-1 is the top level). Since format specifiers can't legally use double + // brackets, we can bypass it here. + if (peek == '}' && !in_format_spec) { + p_start = tok->start; + p_end = tok->cur - 1; + } else { + tok_backup(tok, peek); + tok_backup(tok, c); + TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE; + p_start = tok->start; + p_end = tok->cur; + } + return MAKE_TOKEN(FSTRING_MIDDLE); + } else if (c == '\\') { + int peek = tok_nextc(tok); + // Special case when the backslash is right before a curly + // brace. We have to restore and return the control back + // to the loop for the next iteration. + if (peek == '{' || peek == '}') { + if (!current_tok->f_string_raw) { + if (warn_invalid_escape_sequence(tok, peek)) { + return MAKE_TOKEN(ERRORTOKEN); + } + } + tok_backup(tok, peek); + continue; + } + + if (!current_tok->f_string_raw) { + if (peek == 'N') { + /* Handle named unicode escapes (\N{BULLET}) */ + peek = tok_nextc(tok); + if (peek == '{') { + unicode_escape = 1; + } else { + tok_backup(tok, peek); + } + } + } /* else { + skip the escaped character + }*/ + } + } + + // Backup the f-string quotes to emit a final FSTRING_MIDDLE and + // add the quotes to the FSTRING_END in the next tokenizer iteration. + for (int i = 0; i < current_tok->f_string_quote_size; i++) { + tok_backup(tok, current_tok->f_string_quote); + } + p_start = tok->start; + p_end = tok->cur; + return MAKE_TOKEN(FSTRING_MIDDLE); +} + + +static int +tok_get(struct tok_state *tok, struct token *token) +{ + tokenizer_mode *current_tok = TOK_GET_MODE(tok); + if (current_tok->kind == TOK_REGULAR_MODE) { + return tok_get_normal_mode(tok, current_tok, token); + } else { + return tok_get_fstring_mode(tok, current_tok, token); + } +} + int _PyTokenizer_Get(struct tok_state *tok, struct token *token) { diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h index 16a94d5f51d664..5e2171885ac75b 100644 --- a/Parser/tokenizer.h +++ b/Parser/tokenizer.h @@ -10,8 +10,9 @@ extern "C" { #include "pycore_token.h" /* For token types */ -#define MAXINDENT 100 /* Max indentation level */ -#define MAXLEVEL 200 /* Max parentheses level */ +#define MAXINDENT 100 /* Max indentation level */ +#define MAXLEVEL 200 /* Max parentheses level */ +#define MAXFSTRINGLEVEL 150 /* Max f-string nesting level */ enum decoding_state { STATE_INIT, @@ -31,8 +32,37 @@ struct token { int level; int lineno, col_offset, end_lineno, end_col_offset; const char *start, *end; + PyObject *metadata; }; +enum tokenizer_mode_kind_t { + TOK_REGULAR_MODE, + TOK_FSTRING_MODE, +}; + +#define MAX_EXPR_NESTING 3 + +typedef struct _tokenizer_mode { + enum tokenizer_mode_kind_t kind; + + int curly_bracket_depth; + int curly_bracket_expr_start_depth; + + char f_string_quote; + int f_string_quote_size; + int f_string_raw; + const char* f_string_start; + const char* f_string_multi_line_start; + + Py_ssize_t f_string_start_offset; + Py_ssize_t f_string_multi_line_start_offset; + + Py_ssize_t last_expr_size; + Py_ssize_t last_expr_end; + char* last_expr_buffer; + int f_string_debug; +} tokenizer_mode; + /* Tokenizer state */ struct tok_state { /* Input state; buf <= cur <= inp <= end */ @@ -93,6 +123,10 @@ struct tok_state { /* How to proceed when asked for a new token in interactive mode */ enum interactive_underflow_t interactive_underflow; int report_warnings; + // TODO: Factor this into its own thing + tokenizer_mode tok_mode_stack[MAXFSTRINGLEVEL]; + int tok_mode_stack_index; + int tok_report_warnings; #ifdef Py_DEBUG int debug; #endif diff --git a/Programs/_freeze_module.c b/Programs/_freeze_module.c index 90fc2dc6e87da8..e55f1d56745c4d 100644 --- a/Programs/_freeze_module.c +++ b/Programs/_freeze_module.c @@ -1,6 +1,5 @@ /* This is built as a stand-alone executable by the Makefile, and helps turn - modules into frozen modules (like Lib/importlib/_bootstrap.py - into Python/importlib.h). + modules into frozen modules. This is used directly by Tools/build/freeze_modules.py, and indirectly by "make regen-frozen". diff --git a/Programs/_testembed.c b/Programs/_testembed.c index 00717114b40286..f78ba41fe7b4eb 100644 --- a/Programs/_testembed.c +++ b/Programs/_testembed.c @@ -1911,14 +1911,13 @@ static int test_unicode_id_init(void) str1 = _PyUnicode_FromId(&PyId_test_unicode_id_init); assert(str1 != NULL); - assert(Py_REFCNT(str1) == 1); + assert(_Py_IsImmortal(str1)); str2 = PyUnicode_FromString("test_unicode_id_init"); assert(str2 != NULL); assert(PyUnicode_Compare(str1, str2) == 0); - // str1 is a borrowed reference Py_DECREF(str2); Py_Finalize(); diff --git a/Programs/test_frozenmain.h b/Programs/test_frozenmain.h index 8e5055bd7bceb1..cd9d1032629f49 100644 --- a/Programs/test_frozenmain.h +++ b/Programs/test_frozenmain.h @@ -1,39 +1,38 @@ // Auto-generated by Programs/freeze_test_frozenmain.py unsigned char M_test_frozenmain[] = { 227,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0, - 0,0,0,0,0,243,182,0,0,0,151,0,100,0,100,1, + 0,0,0,0,0,243,162,0,0,0,151,0,100,0,100,1, 108,0,90,0,100,0,100,1,108,1,90,1,2,0,101,2, - 100,2,171,1,0,0,0,0,0,0,0,0,1,0,2,0, - 101,2,100,3,101,0,106,6,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,171,2,0,0,0,0, - 0,0,0,0,1,0,2,0,101,1,106,8,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,171,0, - 0,0,0,0,0,0,0,0,100,4,25,0,0,0,0,0, - 0,0,0,0,90,5,100,5,68,0,93,23,0,0,90,6, - 2,0,101,2,100,6,101,6,155,0,100,7,101,5,101,6, - 25,0,0,0,0,0,0,0,0,0,155,0,157,4,171,1, - 0,0,0,0,0,0,0,0,1,0,140,25,4,0,121,1, - 41,8,233,0,0,0,0,78,122,18,70,114,111,122,101,110, - 32,72,101,108,108,111,32,87,111,114,108,100,122,8,115,121, - 115,46,97,114,103,118,218,6,99,111,110,102,105,103,41,5, - 218,12,112,114,111,103,114,97,109,95,110,97,109,101,218,10, - 101,120,101,99,117,116,97,98,108,101,218,15,117,115,101,95, - 101,110,118,105,114,111,110,109,101,110,116,218,17,99,111,110, - 102,105,103,117,114,101,95,99,95,115,116,100,105,111,218,14, - 98,117,102,102,101,114,101,100,95,115,116,100,105,111,122,7, - 99,111,110,102,105,103,32,122,2,58,32,41,7,218,3,115, - 121,115,218,17,95,116,101,115,116,105,110,116,101,114,110,97, - 108,99,97,112,105,218,5,112,114,105,110,116,218,4,97,114, - 103,118,218,11,103,101,116,95,99,111,110,102,105,103,115,114, - 3,0,0,0,218,3,107,101,121,169,0,243,0,0,0,0, - 250,18,116,101,115,116,95,102,114,111,122,101,110,109,97,105, - 110,46,112,121,250,8,60,109,111,100,117,108,101,62,114,18, - 0,0,0,1,0,0,0,115,100,0,0,0,240,3,1,1, - 1,243,8,0,1,11,219,0,24,225,0,5,208,6,26,213, - 0,27,217,0,5,128,106,144,35,151,40,145,40,213,0,27, - 216,9,38,208,9,26,215,9,38,209,9,38,212,9,40,168, - 24,212,9,50,128,6,240,2,6,12,2,242,0,7,1,42, - 128,67,241,14,0,5,10,208,10,40,144,67,209,10,40,152, - 54,160,35,156,59,209,10,40,214,4,41,241,15,7,1,42, - 114,16,0,0,0, + 100,2,171,1,0,0,0,0,0,0,1,0,2,0,101,2, + 100,3,101,0,106,6,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,171,2,0,0,0,0,0,0, + 1,0,2,0,101,1,106,8,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,171,0,0,0,0,0, + 0,0,100,4,25,0,0,0,90,5,100,5,68,0,93,19, + 0,0,90,6,2,0,101,2,100,6,101,6,155,0,100,7, + 101,5,101,6,25,0,0,0,155,0,157,4,171,1,0,0, + 0,0,0,0,1,0,140,21,4,0,121,1,41,8,233,0, + 0,0,0,78,122,18,70,114,111,122,101,110,32,72,101,108, + 108,111,32,87,111,114,108,100,122,8,115,121,115,46,97,114, + 103,118,218,6,99,111,110,102,105,103,41,5,218,12,112,114, + 111,103,114,97,109,95,110,97,109,101,218,10,101,120,101,99, + 117,116,97,98,108,101,218,15,117,115,101,95,101,110,118,105, + 114,111,110,109,101,110,116,218,17,99,111,110,102,105,103,117, + 114,101,95,99,95,115,116,100,105,111,218,14,98,117,102,102, + 101,114,101,100,95,115,116,100,105,111,122,7,99,111,110,102, + 105,103,32,122,2,58,32,41,7,218,3,115,121,115,218,17, + 95,116,101,115,116,105,110,116,101,114,110,97,108,99,97,112, + 105,218,5,112,114,105,110,116,218,4,97,114,103,118,218,11, + 103,101,116,95,99,111,110,102,105,103,115,114,3,0,0,0, + 218,3,107,101,121,169,0,243,0,0,0,0,250,18,116,101, + 115,116,95,102,114,111,122,101,110,109,97,105,110,46,112,121, + 250,8,60,109,111,100,117,108,101,62,114,18,0,0,0,1, + 0,0,0,115,102,0,0,0,240,3,1,1,1,243,8,0, + 1,11,219,0,24,225,0,5,208,6,26,212,0,27,217,0, + 5,128,106,144,35,151,40,145,40,212,0,27,216,9,38,208, + 9,26,215,9,38,209,9,38,211,9,40,168,24,209,9,50, + 128,6,240,2,6,12,2,242,0,7,1,42,128,67,241,14, + 0,5,10,136,71,144,67,144,53,152,2,152,54,160,35,153, + 59,152,45,208,10,40,213,4,41,241,15,7,1,42,114,16, + 0,0,0, }; diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c index 8daa9877254e2e..416dc5971bca3d 100644 --- a/Python/Python-tokenize.c +++ b/Python/Python-tokenize.c @@ -86,8 +86,8 @@ tokenizeriter_next(tokenizeriterobject *it) Py_DECREF(str); return NULL; } - const char *line_start = type == STRING ? it->tok->multi_line_start : it->tok->line_start; - int lineno = type == STRING ? it->tok->first_lineno : it->tok->lineno; + const char *line_start = ISSTRINGLIT(type) ? it->tok->multi_line_start : it->tok->line_start; + int lineno = ISSTRINGLIT(type) ? it->tok->first_lineno : it->tok->lineno; int end_lineno = it->tok->lineno; int col_offset = -1; int end_col_offset = -1; diff --git a/Python/assemble.c b/Python/assemble.c new file mode 100644 index 00000000000000..6889831ae3fe0c --- /dev/null +++ b/Python/assemble.c @@ -0,0 +1,602 @@ +#include <stdbool.h> + +#include "Python.h" +#include "pycore_code.h" // write_location_entry_start() +#include "pycore_compile.h" +#include "pycore_opcode.h" // _PyOpcode_Caches[] and opcode category macros +#include "pycore_pymem.h" // _PyMem_IsPtrFreed() + + +#define DEFAULT_CODE_SIZE 128 +#define DEFAULT_LNOTAB_SIZE 16 +#define DEFAULT_CNOTAB_SIZE 32 + +#undef SUCCESS +#undef ERROR +#define SUCCESS 0 +#define ERROR -1 + +#define RETURN_IF_ERROR(X) \ + if ((X) == -1) { \ + return ERROR; \ + } + +typedef _PyCompilerSrcLocation location; +typedef _PyCompile_Instruction instruction; +typedef _PyCompile_InstructionSequence instr_sequence; + +static inline bool +same_location(location a, location b) +{ + return a.lineno == b.lineno && + a.end_lineno == b.end_lineno && + a.col_offset == b.col_offset && + a.end_col_offset == b.end_col_offset; +} + +struct assembler { + PyObject *a_bytecode; /* bytes containing bytecode */ + int a_offset; /* offset into bytecode */ + PyObject *a_except_table; /* bytes containing exception table */ + int a_except_table_off; /* offset into exception table */ + /* Location Info */ + int a_lineno; /* lineno of last emitted instruction */ + PyObject* a_linetable; /* bytes containing location info */ + int a_location_off; /* offset of last written location info frame */ +}; + +static int +assemble_init(struct assembler *a, int firstlineno) +{ + memset(a, 0, sizeof(struct assembler)); + a->a_lineno = firstlineno; + a->a_linetable = NULL; + a->a_location_off = 0; + a->a_except_table = NULL; + a->a_bytecode = PyBytes_FromStringAndSize(NULL, DEFAULT_CODE_SIZE); + if (a->a_bytecode == NULL) { + goto error; + } + a->a_linetable = PyBytes_FromStringAndSize(NULL, DEFAULT_CNOTAB_SIZE); + if (a->a_linetable == NULL) { + goto error; + } + a->a_except_table = PyBytes_FromStringAndSize(NULL, DEFAULT_LNOTAB_SIZE); + if (a->a_except_table == NULL) { + goto error; + } + return SUCCESS; +error: + Py_XDECREF(a->a_bytecode); + Py_XDECREF(a->a_linetable); + Py_XDECREF(a->a_except_table); + return ERROR; +} + +static void +assemble_free(struct assembler *a) +{ + Py_XDECREF(a->a_bytecode); + Py_XDECREF(a->a_linetable); + Py_XDECREF(a->a_except_table); +} + +static inline void +write_except_byte(struct assembler *a, int byte) { + unsigned char *p = (unsigned char *) PyBytes_AS_STRING(a->a_except_table); + p[a->a_except_table_off++] = byte; +} + +#define CONTINUATION_BIT 64 + +static void +assemble_emit_exception_table_item(struct assembler *a, int value, int msb) +{ + assert ((msb | 128) == 128); + assert(value >= 0 && value < (1 << 30)); + if (value >= 1 << 24) { + write_except_byte(a, (value >> 24) | CONTINUATION_BIT | msb); + msb = 0; + } + if (value >= 1 << 18) { + write_except_byte(a, ((value >> 18)&0x3f) | CONTINUATION_BIT | msb); + msb = 0; + } + if (value >= 1 << 12) { + write_except_byte(a, ((value >> 12)&0x3f) | CONTINUATION_BIT | msb); + msb = 0; + } + if (value >= 1 << 6) { + write_except_byte(a, ((value >> 6)&0x3f) | CONTINUATION_BIT | msb); + msb = 0; + } + write_except_byte(a, (value&0x3f) | msb); +} + +/* See Objects/exception_handling_notes.txt for details of layout */ +#define MAX_SIZE_OF_ENTRY 20 + +static int +assemble_emit_exception_table_entry(struct assembler *a, int start, int end, + _PyCompile_ExceptHandlerInfo *handler) +{ + Py_ssize_t len = PyBytes_GET_SIZE(a->a_except_table); + if (a->a_except_table_off + MAX_SIZE_OF_ENTRY >= len) { + RETURN_IF_ERROR(_PyBytes_Resize(&a->a_except_table, len * 2)); + } + int size = end-start; + assert(end > start); + int target = handler->h_offset; + int depth = handler->h_startdepth - 1; + if (handler->h_preserve_lasti) { + depth -= 1; + } + assert(depth >= 0); + int depth_lasti = (depth<<1) | handler->h_preserve_lasti; + assemble_emit_exception_table_item(a, start, (1<<7)); + assemble_emit_exception_table_item(a, size, 0); + assemble_emit_exception_table_item(a, target, 0); + assemble_emit_exception_table_item(a, depth_lasti, 0); + return SUCCESS; +} + +static int +assemble_exception_table(struct assembler *a, instr_sequence *instrs) +{ + int ioffset = 0; + _PyCompile_ExceptHandlerInfo handler; + handler.h_offset = -1; + int start = -1; + for (int i = 0; i < instrs->s_used; i++) { + instruction *instr = &instrs->s_instrs[i]; + if (instr->i_except_handler_info.h_offset != handler.h_offset) { + if (handler.h_offset >= 0) { + RETURN_IF_ERROR( + assemble_emit_exception_table_entry(a, start, ioffset, &handler)); + } + start = ioffset; + handler = instr->i_except_handler_info; + } + ioffset += _PyCompile_InstrSize(instr->i_opcode, instr->i_oparg); + } + if (handler.h_offset >= 0) { + RETURN_IF_ERROR(assemble_emit_exception_table_entry(a, start, ioffset, &handler)); + } + return SUCCESS; +} + + +/* Code location emitting code. See locations.md for a description of the format. */ + +#define MSB 0x80 + +static void +write_location_byte(struct assembler* a, int val) +{ + PyBytes_AS_STRING(a->a_linetable)[a->a_location_off] = val&255; + a->a_location_off++; +} + + +static uint8_t * +location_pointer(struct assembler* a) +{ + return (uint8_t *)PyBytes_AS_STRING(a->a_linetable) + + a->a_location_off; +} + +static void +write_location_first_byte(struct assembler* a, int code, int length) +{ + a->a_location_off += write_location_entry_start( + location_pointer(a), code, length); +} + +static void +write_location_varint(struct assembler* a, unsigned int val) +{ + uint8_t *ptr = location_pointer(a); + a->a_location_off += write_varint(ptr, val); +} + + +static void +write_location_signed_varint(struct assembler* a, int val) +{ + uint8_t *ptr = location_pointer(a); + a->a_location_off += write_signed_varint(ptr, val); +} + +static void +write_location_info_short_form(struct assembler* a, int length, int column, int end_column) +{ + assert(length > 0 && length <= 8); + int column_low_bits = column & 7; + int column_group = column >> 3; + assert(column < 80); + assert(end_column >= column); + assert(end_column - column < 16); + write_location_first_byte(a, PY_CODE_LOCATION_INFO_SHORT0 + column_group, length); + write_location_byte(a, (column_low_bits << 4) | (end_column - column)); +} + +static void +write_location_info_oneline_form(struct assembler* a, int length, int line_delta, int column, int end_column) +{ + assert(length > 0 && length <= 8); + assert(line_delta >= 0 && line_delta < 3); + assert(column < 128); + assert(end_column < 128); + write_location_first_byte(a, PY_CODE_LOCATION_INFO_ONE_LINE0 + line_delta, length); + write_location_byte(a, column); + write_location_byte(a, end_column); +} + +static void +write_location_info_long_form(struct assembler* a, location loc, int length) +{ + assert(length > 0 && length <= 8); + write_location_first_byte(a, PY_CODE_LOCATION_INFO_LONG, length); + write_location_signed_varint(a, loc.lineno - a->a_lineno); + assert(loc.end_lineno >= loc.lineno); + write_location_varint(a, loc.end_lineno - loc.lineno); + write_location_varint(a, loc.col_offset + 1); + write_location_varint(a, loc.end_col_offset + 1); +} + +static void +write_location_info_none(struct assembler* a, int length) +{ + write_location_first_byte(a, PY_CODE_LOCATION_INFO_NONE, length); +} + +static void +write_location_info_no_column(struct assembler* a, int length, int line_delta) +{ + write_location_first_byte(a, PY_CODE_LOCATION_INFO_NO_COLUMNS, length); + write_location_signed_varint(a, line_delta); +} + +#define THEORETICAL_MAX_ENTRY_SIZE 25 /* 1 + 6 + 6 + 6 + 6 */ + + +static int +write_location_info_entry(struct assembler* a, location loc, int isize) +{ + Py_ssize_t len = PyBytes_GET_SIZE(a->a_linetable); + if (a->a_location_off + THEORETICAL_MAX_ENTRY_SIZE >= len) { + assert(len > THEORETICAL_MAX_ENTRY_SIZE); + RETURN_IF_ERROR(_PyBytes_Resize(&a->a_linetable, len*2)); + } + if (loc.lineno < 0) { + write_location_info_none(a, isize); + return SUCCESS; + } + int line_delta = loc.lineno - a->a_lineno; + int column = loc.col_offset; + int end_column = loc.end_col_offset; + assert(column >= -1); + assert(end_column >= -1); + if (column < 0 || end_column < 0) { + if (loc.end_lineno == loc.lineno || loc.end_lineno == -1) { + write_location_info_no_column(a, isize, line_delta); + a->a_lineno = loc.lineno; + return SUCCESS; + } + } + else if (loc.end_lineno == loc.lineno) { + if (line_delta == 0 && column < 80 && end_column - column < 16 && end_column >= column) { + write_location_info_short_form(a, isize, column, end_column); + return SUCCESS; + } + if (line_delta >= 0 && line_delta < 3 && column < 128 && end_column < 128) { + write_location_info_oneline_form(a, isize, line_delta, column, end_column); + a->a_lineno = loc.lineno; + return SUCCESS; + } + } + write_location_info_long_form(a, loc, isize); + a->a_lineno = loc.lineno; + return SUCCESS; +} + +static int +assemble_emit_location(struct assembler* a, location loc, int isize) +{ + if (isize == 0) { + return SUCCESS; + } + while (isize > 8) { + RETURN_IF_ERROR(write_location_info_entry(a, loc, 8)); + isize -= 8; + } + return write_location_info_entry(a, loc, isize); +} + +static int +assemble_location_info(struct assembler *a, instr_sequence *instrs, + int firstlineno) +{ + a->a_lineno = firstlineno; + location loc = NO_LOCATION; + int size = 0; + for (int i = 0; i < instrs->s_used; i++) { + instruction *instr = &instrs->s_instrs[i]; + if (!same_location(loc, instr->i_loc)) { + RETURN_IF_ERROR(assemble_emit_location(a, loc, size)); + loc = instr->i_loc; + size = 0; + } + size += _PyCompile_InstrSize(instr->i_opcode, instr->i_oparg); + } + RETURN_IF_ERROR(assemble_emit_location(a, loc, size)); + return SUCCESS; +} + +static void +write_instr(_Py_CODEUNIT *codestr, instruction *instr, int ilen) +{ + int opcode = instr->i_opcode; + assert(!IS_PSEUDO_OPCODE(opcode)); + int oparg = instr->i_oparg; + assert(HAS_ARG(opcode) || oparg == 0); + int caches = _PyOpcode_Caches[opcode]; + switch (ilen - caches) { + case 4: + codestr->op.code = EXTENDED_ARG; + codestr->op.arg = (oparg >> 24) & 0xFF; + codestr++; + /* fall through */ + case 3: + codestr->op.code = EXTENDED_ARG; + codestr->op.arg = (oparg >> 16) & 0xFF; + codestr++; + /* fall through */ + case 2: + codestr->op.code = EXTENDED_ARG; + codestr->op.arg = (oparg >> 8) & 0xFF; + codestr++; + /* fall through */ + case 1: + codestr->op.code = opcode; + codestr->op.arg = oparg & 0xFF; + codestr++; + break; + default: + Py_UNREACHABLE(); + } + while (caches--) { + codestr->op.code = CACHE; + codestr->op.arg = 0; + codestr++; + } +} + +/* assemble_emit_instr() + Extend the bytecode with a new instruction. + Update lnotab if necessary. +*/ + +static int +assemble_emit_instr(struct assembler *a, instruction *instr) +{ + Py_ssize_t len = PyBytes_GET_SIZE(a->a_bytecode); + _Py_CODEUNIT *code; + + int size = _PyCompile_InstrSize(instr->i_opcode, instr->i_oparg); + if (a->a_offset + size >= len / (int)sizeof(_Py_CODEUNIT)) { + if (len > PY_SSIZE_T_MAX / 2) { + return ERROR; + } + RETURN_IF_ERROR(_PyBytes_Resize(&a->a_bytecode, len * 2)); + } + code = (_Py_CODEUNIT *)PyBytes_AS_STRING(a->a_bytecode) + a->a_offset; + a->a_offset += size; + write_instr(code, instr, size); + return SUCCESS; +} + +static int +assemble_emit(struct assembler *a, instr_sequence *instrs, + int first_lineno, PyObject *const_cache) +{ + RETURN_IF_ERROR(assemble_init(a, first_lineno)); + + for (int i = 0; i < instrs->s_used; i++) { + instruction *instr = &instrs->s_instrs[i]; + RETURN_IF_ERROR(assemble_emit_instr(a, instr)); + } + + RETURN_IF_ERROR(assemble_location_info(a, instrs, a->a_lineno)); + + RETURN_IF_ERROR(assemble_exception_table(a, instrs)); + + RETURN_IF_ERROR(_PyBytes_Resize(&a->a_except_table, a->a_except_table_off)); + RETURN_IF_ERROR(_PyCompile_ConstCacheMergeOne(const_cache, &a->a_except_table)); + + RETURN_IF_ERROR(_PyBytes_Resize(&a->a_linetable, a->a_location_off)); + RETURN_IF_ERROR(_PyCompile_ConstCacheMergeOne(const_cache, &a->a_linetable)); + + RETURN_IF_ERROR(_PyBytes_Resize(&a->a_bytecode, a->a_offset * sizeof(_Py_CODEUNIT))); + RETURN_IF_ERROR(_PyCompile_ConstCacheMergeOne(const_cache, &a->a_bytecode)); + return SUCCESS; +} + +static PyObject * +dict_keys_inorder(PyObject *dict, Py_ssize_t offset) +{ + PyObject *tuple, *k, *v; + Py_ssize_t i, pos = 0, size = PyDict_GET_SIZE(dict); + + tuple = PyTuple_New(size); + if (tuple == NULL) + return NULL; + while (PyDict_Next(dict, &pos, &k, &v)) { + i = PyLong_AS_LONG(v); + assert((i - offset) < size); + assert((i - offset) >= 0); + PyTuple_SET_ITEM(tuple, i - offset, Py_NewRef(k)); + } + return tuple; +} + +// This is in codeobject.c. +extern void _Py_set_localsplus_info(int, PyObject *, unsigned char, + PyObject *, PyObject *); + +static void +compute_localsplus_info(_PyCompile_CodeUnitMetadata *umd, int nlocalsplus, + PyObject *names, PyObject *kinds) +{ + PyObject *k, *v; + Py_ssize_t pos = 0; + while (PyDict_Next(umd->u_varnames, &pos, &k, &v)) { + int offset = (int)PyLong_AS_LONG(v); + assert(offset >= 0); + assert(offset < nlocalsplus); + // For now we do not distinguish arg kinds. + _PyLocals_Kind kind = CO_FAST_LOCAL; + if (PyDict_Contains(umd->u_fasthidden, k)) { + kind |= CO_FAST_HIDDEN; + } + if (PyDict_GetItem(umd->u_cellvars, k) != NULL) { + kind |= CO_FAST_CELL; + } + _Py_set_localsplus_info(offset, k, kind, names, kinds); + } + int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames); + + // This counter mirrors the fix done in fix_cell_offsets(). + int numdropped = 0; + pos = 0; + while (PyDict_Next(umd->u_cellvars, &pos, &k, &v)) { + if (PyDict_GetItem(umd->u_varnames, k) != NULL) { + // Skip cells that are already covered by locals. + numdropped += 1; + continue; + } + int offset = (int)PyLong_AS_LONG(v); + assert(offset >= 0); + offset += nlocals - numdropped; + assert(offset < nlocalsplus); + _Py_set_localsplus_info(offset, k, CO_FAST_CELL, names, kinds); + } + + pos = 0; + while (PyDict_Next(umd->u_freevars, &pos, &k, &v)) { + int offset = (int)PyLong_AS_LONG(v); + assert(offset >= 0); + offset += nlocals - numdropped; + assert(offset < nlocalsplus); + _Py_set_localsplus_info(offset, k, CO_FAST_FREE, names, kinds); + } +} + +static PyCodeObject * +makecode(_PyCompile_CodeUnitMetadata *umd, struct assembler *a, PyObject *const_cache, + PyObject *constslist, int maxdepth, int nlocalsplus, int code_flags, + PyObject *filename) +{ + PyCodeObject *co = NULL; + PyObject *names = NULL; + PyObject *consts = NULL; + PyObject *localsplusnames = NULL; + PyObject *localspluskinds = NULL; + names = dict_keys_inorder(umd->u_names, 0); + if (!names) { + goto error; + } + if (_PyCompile_ConstCacheMergeOne(const_cache, &names) < 0) { + goto error; + } + + consts = PyList_AsTuple(constslist); /* PyCode_New requires a tuple */ + if (consts == NULL) { + goto error; + } + if (_PyCompile_ConstCacheMergeOne(const_cache, &consts) < 0) { + goto error; + } + + assert(umd->u_posonlyargcount < INT_MAX); + assert(umd->u_argcount < INT_MAX); + assert(umd->u_kwonlyargcount < INT_MAX); + int posonlyargcount = (int)umd->u_posonlyargcount; + int posorkwargcount = (int)umd->u_argcount; + assert(INT_MAX - posonlyargcount - posorkwargcount > 0); + int kwonlyargcount = (int)umd->u_kwonlyargcount; + + localsplusnames = PyTuple_New(nlocalsplus); + if (localsplusnames == NULL) { + goto error; + } + localspluskinds = PyBytes_FromStringAndSize(NULL, nlocalsplus); + if (localspluskinds == NULL) { + goto error; + } + compute_localsplus_info(umd, nlocalsplus, localsplusnames, localspluskinds); + + struct _PyCodeConstructor con = { + .filename = filename, + .name = umd->u_name, + .qualname = umd->u_qualname ? umd->u_qualname : umd->u_name, + .flags = code_flags, + + .code = a->a_bytecode, + .firstlineno = umd->u_firstlineno, + .linetable = a->a_linetable, + + .consts = consts, + .names = names, + + .localsplusnames = localsplusnames, + .localspluskinds = localspluskinds, + + .argcount = posonlyargcount + posorkwargcount, + .posonlyargcount = posonlyargcount, + .kwonlyargcount = kwonlyargcount, + + .stacksize = maxdepth, + + .exceptiontable = a->a_except_table, + }; + + if (_PyCode_Validate(&con) < 0) { + goto error; + } + + if (_PyCompile_ConstCacheMergeOne(const_cache, &localsplusnames) < 0) { + goto error; + } + con.localsplusnames = localsplusnames; + + co = _PyCode_New(&con); + if (co == NULL) { + goto error; + } + +error: + Py_XDECREF(names); + Py_XDECREF(consts); + Py_XDECREF(localsplusnames); + Py_XDECREF(localspluskinds); + return co; +} + + +PyCodeObject * +_PyAssemble_MakeCodeObject(_PyCompile_CodeUnitMetadata *umd, PyObject *const_cache, + PyObject *consts, int maxdepth, instr_sequence *instrs, + int nlocalsplus, int code_flags, PyObject *filename) +{ + PyCodeObject *co = NULL; + + struct assembler a; + int res = assemble_emit(&a, instrs, umd->u_firstlineno, const_cache); + if (res == SUCCESS) { + co = makecode(umd, &a, const_cache, consts, maxdepth, nlocalsplus, + code_flags, filename); + } + assemble_free(&a); + return co; +} diff --git a/Python/ast_opt.c b/Python/ast_opt.c index 1a0b2a05b1c713..8270fa8e372d93 100644 --- a/Python/ast_opt.c +++ b/Python/ast_opt.c @@ -2,6 +2,7 @@ #include "Python.h" #include "pycore_ast.h" // _PyAST_GetDocString() #include "pycore_compile.h" // _PyASTOptimizeState +#include "pycore_long.h" // _PyLong #include "pycore_pystate.h" // _PyThreadState_GET() #include "pycore_format.h" // F_LJUST @@ -152,7 +153,9 @@ check_complexity(PyObject *obj, Py_ssize_t limit) static PyObject * safe_multiply(PyObject *v, PyObject *w) { - if (PyLong_Check(v) && PyLong_Check(w) && Py_SIZE(v) && Py_SIZE(w)) { + if (PyLong_Check(v) && PyLong_Check(w) && + !_PyLong_IsZero((PyLongObject *)v) && !_PyLong_IsZero((PyLongObject *)w) + ) { size_t vbits = _PyLong_NumBits(v); size_t wbits = _PyLong_NumBits(w); if (vbits == (size_t)-1 || wbits == (size_t)-1) { @@ -198,7 +201,9 @@ safe_multiply(PyObject *v, PyObject *w) static PyObject * safe_power(PyObject *v, PyObject *w) { - if (PyLong_Check(v) && PyLong_Check(w) && Py_SIZE(v) && Py_SIZE(w) > 0) { + if (PyLong_Check(v) && PyLong_Check(w) && + !_PyLong_IsZero((PyLongObject *)v) && _PyLong_IsPositive((PyLongObject *)w) + ) { size_t vbits = _PyLong_NumBits(v); size_t wbits = PyLong_AsSize_t(w); if (vbits == (size_t)-1 || wbits == (size_t)-1) { @@ -215,7 +220,9 @@ safe_power(PyObject *v, PyObject *w) static PyObject * safe_lshift(PyObject *v, PyObject *w) { - if (PyLong_Check(v) && PyLong_Check(w) && Py_SIZE(v) && Py_SIZE(w)) { + if (PyLong_Check(v) && PyLong_Check(w) && + !_PyLong_IsZero((PyLongObject *)v) && !_PyLong_IsZero((PyLongObject *)w) + ) { size_t vbits = _PyLong_NumBits(v); size_t wbits = PyLong_AsSize_t(w); if (vbits == (size_t)-1 || wbits == (size_t)-1) { diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 12ca0ba6c4873c..8840bbabe4b584 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -5,6 +5,7 @@ #include "pycore_ast.h" // _PyAST_Validate() #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_compile.h" // _PyAST_Compile() +#include "pycore_long.h" // _PyLong_CompactValue #include "pycore_object.h" // _Py_AddToAllObjects() #include "pycore_pyerrors.h" // _PyErr_NoMemory() #include "pycore_pystate.h" // _PyThreadState_GET() @@ -2315,7 +2316,7 @@ builtin_round_impl(PyObject *module, PyObject *number, PyObject *ndigits) { PyObject *round, *result; - if (Py_TYPE(number)->tp_dict == NULL) { + if (!_PyType_IsReady(Py_TYPE(number))) { if (PyType_Ready(Py_TYPE(number)) < 0) return NULL; } @@ -2491,7 +2492,7 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) */ if (PyLong_CheckExact(result)) { int overflow; - long i_result = PyLong_AsLongAndOverflow(result, &overflow); + Py_ssize_t i_result = PyLong_AsLongAndOverflow(result, &overflow); /* If this already overflowed, don't even enter the loop. */ if (overflow == 0) { Py_SETREF(result, NULL); @@ -2502,18 +2503,17 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) Py_DECREF(iter); if (PyErr_Occurred()) return NULL; - return PyLong_FromLong(i_result); + return PyLong_FromSsize_t(i_result); } if (PyLong_CheckExact(item) || PyBool_Check(item)) { - long b; + Py_ssize_t b; overflow = 0; /* Single digits are common, fast, and cannot overflow on unpacking. */ - switch (Py_SIZE(item)) { - case -1: b = -(sdigit) ((PyLongObject*)item)->long_value.ob_digit[0]; break; - // Note: the continue goes to the top of the "while" loop that iterates over the elements - case 0: Py_DECREF(item); continue; - case 1: b = ((PyLongObject*)item)->long_value.ob_digit[0]; break; - default: b = PyLong_AsLongAndOverflow(item, &overflow); break; + if (_PyLong_IsCompact((PyLongObject *)item)) { + b = _PyLong_CompactValue((PyLongObject *)item); + } + else { + b = PyLong_AsLongAndOverflow(item, &overflow); } if (overflow == 0 && (i_result >= 0 ? (b <= LONG_MAX - i_result) @@ -2525,7 +2525,7 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) } } /* Either overflowed or is not an int. Restore real objects and process normally */ - result = PyLong_FromLong(i_result); + result = PyLong_FromSsize_t(i_result); if (result == NULL) { Py_DECREF(item); Py_DECREF(iter); diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 15a57e0dcdd704..82c10044418345 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -14,6 +14,7 @@ #include "pycore_function.h" #include "pycore_intrinsics.h" #include "pycore_long.h" // _PyLong_GetZero() +#include "pycore_instruments.h" #include "pycore_object.h" // _PyObject_GC_TRACK() #include "pycore_moduleobject.h" // PyModuleObject #include "pycore_opcode.h" // EXTRA_CASES @@ -24,6 +25,7 @@ #include "pycore_sliceobject.h" // _PyBuildSlice_ConsumeRefs #include "pycore_sysmodule.h" // _PySys_Audit() #include "pycore_tuple.h" // _PyTuple_ITEMS() +#include "pycore_typeobject.h" // _PySuper_Lookup() #include "pycore_emscripten_signal.h" // _Py_CHECK_EMSCRIPTEN_SIGNALS #include "pycore_dict.h" @@ -74,10 +76,56 @@ dummy_func( PyObject **stack_pointer, PyObject *kwnames, int throwflag, - binaryfunc binary_ops[] + binaryfunc binary_ops[], + PyObject *args[] ) { + // Dummy labels. + pop_1_error: + // Dummy locals. + PyObject *annotations; + PyObject *attrs; + PyObject *bottom; + PyObject *callable; + PyObject *callargs; + PyObject *closure; + PyObject *codeobj; + PyObject *cond; + PyObject *defaults; + PyObject *descr; _PyInterpreterFrame entry_frame; + PyObject *exc; + PyObject *exit; + PyObject *fget; + PyObject *fmt_spec; + PyObject *func; + uint32_t func_version; + PyObject *getattribute; + PyObject *kwargs; + PyObject *kwdefaults; + PyObject *len_o; + PyObject *match; + PyObject *match_type; + PyObject *method; + PyObject *mgr; + Py_ssize_t min_args; + PyObject *names; + PyObject *new_exc; + PyObject *next; + PyObject *none; + PyObject *null; + PyObject *prev_exc; + PyObject *receiver; + PyObject *rest; + int result; + PyObject *self; + PyObject *seq; + PyObject *slice; + PyObject *step; + PyObject *subject; + PyObject *top; + PyObject *type; + int values_or_none; switch (opcode) { @@ -88,11 +136,45 @@ dummy_func( inst(RESUME, (--)) { assert(tstate->cframe == &cframe); assert(frame == cframe.current_frame); - if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) { + /* Possibly combine this with eval breaker */ + if (frame->f_code->_co_instrumentation_version != tstate->interp->monitoring_version) { + int err = _Py_Instrument(frame->f_code, tstate->interp); + ERROR_IF(err, error); + next_instr--; + } + else if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) { goto handle_eval_breaker; } } + inst(INSTRUMENTED_RESUME, (--)) { + /* Possible performance enhancement: + * We need to check the eval breaker anyway, can we + * combine the instrument verison check and the eval breaker test? + */ + if (frame->f_code->_co_instrumentation_version != tstate->interp->monitoring_version) { + if (_Py_Instrument(frame->f_code, tstate->interp)) { + goto error; + } + next_instr--; + } + else { + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = _Py_call_instrumentation( + tstate, oparg > 0, frame, next_instr-1); + stack_pointer = _PyFrame_GetStackPointer(frame); + ERROR_IF(err, error); + if (frame->prev_instr != next_instr-1) { + /* Instrumentation has jumped */ + next_instr = frame->prev_instr; + DISPATCH(); + } + if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) { + goto handle_eval_breaker; + } + } + } + inst(LOAD_CLOSURE, (-- value)) { /* We keep LOAD_CLOSURE so that the bytecode stays more readable. */ value = GETLOCAL(oparg); @@ -143,6 +225,34 @@ dummy_func( macro(END_FOR) = POP_TOP + POP_TOP; + inst(INSTRUMENTED_END_FOR, (receiver, value --)) { + /* Need to create a fake StopIteration error here, + * to conform to PEP 380 */ + if (PyGen_Check(receiver)) { + PyErr_SetObject(PyExc_StopIteration, value); + if (monitor_stop_iteration(tstate, frame, next_instr-1)) { + goto error; + } + PyErr_SetRaisedException(NULL); + } + DECREF_INPUTS(); + } + + inst(END_SEND, (receiver, value -- value)) { + Py_DECREF(receiver); + } + + inst(INSTRUMENTED_END_SEND, (receiver, value -- value)) { + if (PyGen_Check(receiver) || PyCoro_CheckExact(receiver)) { + PyErr_SetObject(PyExc_StopIteration, value); + if (monitor_stop_iteration(tstate, frame, next_instr-1)) { + goto error; + } + PyErr_SetRaisedException(NULL); + } + Py_DECREF(receiver); + } + inst(UNARY_NEGATIVE, (value -- res)) { res = PyNumber_Negative(value); DECREF_INPUTS(); @@ -182,7 +292,6 @@ dummy_func( inst(BINARY_OP_MULTIPLY_INT, (unused/1, left, right -- prod)) { - assert(cframe.use_tracing == 0); DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); STAT_INC(BINARY_OP, hit); @@ -193,7 +302,6 @@ dummy_func( } inst(BINARY_OP_MULTIPLY_FLOAT, (unused/1, left, right -- prod)) { - assert(cframe.use_tracing == 0); DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); STAT_INC(BINARY_OP, hit); @@ -203,7 +311,6 @@ dummy_func( } inst(BINARY_OP_SUBTRACT_INT, (unused/1, left, right -- sub)) { - assert(cframe.use_tracing == 0); DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); STAT_INC(BINARY_OP, hit); @@ -214,7 +321,6 @@ dummy_func( } inst(BINARY_OP_SUBTRACT_FLOAT, (unused/1, left, right -- sub)) { - assert(cframe.use_tracing == 0); DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); STAT_INC(BINARY_OP, hit); @@ -223,7 +329,6 @@ dummy_func( } inst(BINARY_OP_ADD_UNICODE, (unused/1, left, right -- res)) { - assert(cframe.use_tracing == 0); DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); STAT_INC(BINARY_OP, hit); @@ -240,7 +345,6 @@ dummy_func( // specializations, but there is no output. // At the end we just skip over the STORE_FAST. inst(BINARY_OP_INPLACE_ADD_UNICODE, (left, right --)) { - assert(cframe.use_tracing == 0); DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); _Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP]; @@ -270,7 +374,6 @@ dummy_func( } inst(BINARY_OP_ADD_FLOAT, (unused/1, left, right -- sum)) { - assert(cframe.use_tracing == 0); DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); STAT_INC(BINARY_OP, hit); @@ -280,7 +383,6 @@ dummy_func( } inst(BINARY_OP_ADD_INT, (unused/1, left, right -- sum)) { - assert(cframe.use_tracing == 0); DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); STAT_INC(BINARY_OP, hit); @@ -298,11 +400,10 @@ dummy_func( BINARY_SUBSCR_TUPLE_INT, }; - inst(BINARY_SUBSCR, (unused/4, container, sub -- res)) { + inst(BINARY_SUBSCR, (unused/1, container, sub -- res)) { #if ENABLE_SPECIALIZATION _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); next_instr--; _Py_Specialize_BinarySubscr(container, sub, next_instr); DISPATCH_SAME_OPARG(); @@ -345,14 +446,12 @@ dummy_func( ERROR_IF(err, error); } - inst(BINARY_SUBSCR_LIST_INT, (unused/4, list, sub -- res)) { - assert(cframe.use_tracing == 0); + inst(BINARY_SUBSCR_LIST_INT, (unused/1, list, sub -- res)) { DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR); // Deopt unless 0 <= sub < PyList_Size(list) - DEOPT_IF(!_PyLong_IsPositiveSingleDigit(sub), BINARY_SUBSCR); - assert(((PyLongObject *)_PyLong_GetZero())->long_value.ob_digit[0] == 0); + DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub), BINARY_SUBSCR); Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; DEOPT_IF(index >= PyList_GET_SIZE(list), BINARY_SUBSCR); STAT_INC(BINARY_SUBSCR, hit); @@ -363,14 +462,12 @@ dummy_func( Py_DECREF(list); } - inst(BINARY_SUBSCR_TUPLE_INT, (unused/4, tuple, sub -- res)) { - assert(cframe.use_tracing == 0); + inst(BINARY_SUBSCR_TUPLE_INT, (unused/1, tuple, sub -- res)) { DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR); // Deopt unless 0 <= sub < PyTuple_Size(list) - DEOPT_IF(!_PyLong_IsPositiveSingleDigit(sub), BINARY_SUBSCR); - assert(((PyLongObject *)_PyLong_GetZero())->long_value.ob_digit[0] == 0); + DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub), BINARY_SUBSCR); Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; DEOPT_IF(index >= PyTuple_GET_SIZE(tuple), BINARY_SUBSCR); STAT_INC(BINARY_SUBSCR, hit); @@ -381,8 +478,7 @@ dummy_func( Py_DECREF(tuple); } - inst(BINARY_SUBSCR_DICT, (unused/4, dict, sub -- res)) { - assert(cframe.use_tracing == 0); + inst(BINARY_SUBSCR_DICT, (unused/1, dict, sub -- res)) { DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR); STAT_INC(BINARY_SUBSCR, hit); res = PyDict_GetItemWithError(dict, sub); @@ -397,14 +493,16 @@ dummy_func( DECREF_INPUTS(); } - inst(BINARY_SUBSCR_GETITEM, (unused/1, type_version/2, func_version/1, container, sub -- unused)) { + inst(BINARY_SUBSCR_GETITEM, (unused/1, container, sub -- unused)) { PyTypeObject *tp = Py_TYPE(container); - DEOPT_IF(tp->tp_version_tag != type_version, BINARY_SUBSCR); - assert(tp->tp_flags & Py_TPFLAGS_HEAPTYPE); - PyObject *cached = ((PyHeapTypeObject *)tp)->_spec_cache.getitem; + DEOPT_IF(!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE), BINARY_SUBSCR); + PyHeapTypeObject *ht = (PyHeapTypeObject *)tp; + PyObject *cached = ht->_spec_cache.getitem; + DEOPT_IF(cached == NULL, BINARY_SUBSCR); assert(PyFunction_Check(cached)); PyFunctionObject *getitem = (PyFunctionObject *)cached; - DEOPT_IF(getitem->func_version != func_version, BINARY_SUBSCR); + uint32_t cached_version = ht->_spec_cache.getitem_version; + DEOPT_IF(getitem->func_version != cached_version, BINARY_SUBSCR); PyCodeObject *code = (PyCodeObject *)getitem->func_code; assert(code->co_argcount == 2); DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), BINARY_SUBSCR); @@ -415,6 +513,7 @@ dummy_func( new_frame->localsplus[0] = container; new_frame->localsplus[1] = sub; JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); + frame->return_offset = 0; DISPATCH_INLINED(new_frame); } @@ -439,7 +538,6 @@ dummy_func( inst(STORE_SUBSCR, (counter/1, v, container, sub -- )) { #if ENABLE_SPECIALIZATION if (ADAPTIVE_COUNTER_IS_ZERO(counter)) { - assert(cframe.use_tracing == 0); next_instr--; _Py_Specialize_StoreSubscr(container, sub, next_instr); DISPATCH_SAME_OPARG(); @@ -457,12 +555,11 @@ dummy_func( } inst(STORE_SUBSCR_LIST_INT, (unused/1, value, list, sub -- )) { - assert(cframe.use_tracing == 0); DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR); DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR); // Ensure nonnegative, zero-or-one-digit ints. - DEOPT_IF(!_PyLong_IsPositiveSingleDigit(sub), STORE_SUBSCR); + DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub), STORE_SUBSCR); Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; // Ensure index < len(list) DEOPT_IF(index >= PyList_GET_SIZE(list), STORE_SUBSCR); @@ -477,7 +574,6 @@ dummy_func( } inst(STORE_SUBSCR_DICT, (unused/1, value, dict, sub -- )) { - assert(cframe.use_tracing == 0); DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR); STAT_INC(STORE_SUBSCR, hit); int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value); @@ -533,7 +629,6 @@ dummy_func( assert(EMPTY()); /* Restore previous cframe and return. */ tstate->cframe = cframe.previous; - tstate->cframe->use_tracing = cframe.use_tracing; assert(tstate->cframe->current_frame == frame->previous); assert(!_PyErr_Occurred(tstate)); _Py_LeaveRecursiveCallTstate(tstate); @@ -544,14 +639,32 @@ dummy_func( STACK_SHRINK(1); assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); - TRACE_FUNCTION_EXIT(); - DTRACE_FUNCTION_EXIT(); _Py_LeaveRecursiveCallPy(tstate); assert(frame != &entry_frame); // GH-99729: We need to unlink the frame *before* clearing it: _PyInterpreterFrame *dying = frame; frame = cframe.current_frame = dying->previous; _PyEvalFrameClearAndPop(tstate, dying); + frame->prev_instr += frame->return_offset; + _PyFrame_StackPush(frame, retval); + goto resume_frame; + } + + inst(INSTRUMENTED_RETURN_VALUE, (retval --)) { + int err = _Py_call_instrumentation_arg( + tstate, PY_MONITORING_EVENT_PY_RETURN, + frame, next_instr-1, retval); + if (err) goto error; + STACK_SHRINK(1); + assert(EMPTY()); + _PyFrame_SetStackPointer(frame, stack_pointer); + _Py_LeaveRecursiveCallPy(tstate); + assert(frame != &entry_frame); + // GH-99729: We need to unlink the frame *before* clearing it: + _PyInterpreterFrame *dying = frame; + frame = cframe.current_frame = dying->previous; + _PyEvalFrameClearAndPop(tstate, dying); + frame->prev_instr += frame->return_offset; _PyFrame_StackPush(frame, retval); goto resume_frame; } @@ -561,14 +674,33 @@ dummy_func( Py_INCREF(retval); assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); - TRACE_FUNCTION_EXIT(); - DTRACE_FUNCTION_EXIT(); _Py_LeaveRecursiveCallPy(tstate); assert(frame != &entry_frame); // GH-99729: We need to unlink the frame *before* clearing it: _PyInterpreterFrame *dying = frame; frame = cframe.current_frame = dying->previous; _PyEvalFrameClearAndPop(tstate, dying); + frame->prev_instr += frame->return_offset; + _PyFrame_StackPush(frame, retval); + goto resume_frame; + } + + inst(INSTRUMENTED_RETURN_CONST, (--)) { + PyObject *retval = GETITEM(frame->f_code->co_consts, oparg); + int err = _Py_call_instrumentation_arg( + tstate, PY_MONITORING_EVENT_PY_RETURN, + frame, next_instr-1, retval); + if (err) goto error; + Py_INCREF(retval); + assert(EMPTY()); + _PyFrame_SetStackPointer(frame, stack_pointer); + _Py_LeaveRecursiveCallPy(tstate); + assert(frame != &entry_frame); + // GH-99729: We need to unlink the frame *before* clearing it: + _PyInterpreterFrame *dying = frame; + frame = cframe.current_frame = dying->previous; + _PyEvalFrameClearAndPop(tstate, dying); + frame->prev_instr += frame->return_offset; _PyFrame_StackPush(frame, retval); goto resume_frame; } @@ -690,7 +822,6 @@ dummy_func( #if ENABLE_SPECIALIZATION _PySendCache *cache = (_PySendCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); next_instr--; _Py_Specialize_Send(receiver, next_instr); DISPATCH_SAME_OPARG(); @@ -699,6 +830,20 @@ dummy_func( DECREMENT_ADAPTIVE_COUNTER(cache->counter); #endif /* ENABLE_SPECIALIZATION */ assert(frame != &entry_frame); + if ((Py_TYPE(receiver) == &PyGen_Type || + Py_TYPE(receiver) == &PyCoro_Type) && ((PyGenObject *)receiver)->gi_frame_state < FRAME_EXECUTING) + { + PyGenObject *gen = (PyGenObject *)receiver; + _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; + frame->return_offset = oparg; + STACK_SHRINK(1); + _PyFrame_StackPush(gen_frame, v); + gen->gi_frame_state = FRAME_EXECUTING; + gen->gi_exc_state.previous_item = tstate->exc_info; + tstate->exc_info = &gen->gi_exc_state; + JUMPBY(INLINE_CACHE_ENTRIES_SEND); + DISPATCH_INLINED(gen_frame); + } if (Py_IsNone(v) && PyIter_Check(receiver)) { retval = Py_TYPE(receiver)->tp_iternext(receiver); } @@ -706,42 +851,57 @@ dummy_func( retval = PyObject_CallMethodOneArg(receiver, &_Py_ID(send), v); } if (retval == NULL) { - if (tstate->c_tracefunc != NULL - && _PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) - call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); + if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration) + ) { + monitor_raise(tstate, frame, next_instr-1); + } if (_PyGen_FetchStopIterationValue(&retval) == 0) { assert(retval != NULL); JUMPBY(oparg); } else { - assert(retval == NULL); goto error; } } - else { - assert(retval != NULL); - } Py_DECREF(v); } inst(SEND_GEN, (unused/1, receiver, v -- receiver)) { - assert(cframe.use_tracing == 0); PyGenObject *gen = (PyGenObject *)receiver; DEOPT_IF(Py_TYPE(gen) != &PyGen_Type && Py_TYPE(gen) != &PyCoro_Type, SEND); DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, SEND); STAT_INC(SEND, hit); _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; - frame->yield_offset = oparg; + frame->return_offset = oparg; STACK_SHRINK(1); _PyFrame_StackPush(gen_frame, v); gen->gi_frame_state = FRAME_EXECUTING; gen->gi_exc_state.previous_item = tstate->exc_info; tstate->exc_info = &gen->gi_exc_state; - JUMPBY(INLINE_CACHE_ENTRIES_SEND + oparg); + JUMPBY(INLINE_CACHE_ENTRIES_SEND); DISPATCH_INLINED(gen_frame); } + inst(INSTRUMENTED_YIELD_VALUE, (retval -- unused)) { + assert(frame != &entry_frame); + PyGenObject *gen = _PyFrame_GetGenerator(frame); + gen->gi_frame_state = FRAME_SUSPENDED; + _PyFrame_SetStackPointer(frame, stack_pointer - 1); + int err = _Py_call_instrumentation_arg( + tstate, PY_MONITORING_EVENT_PY_YIELD, + frame, next_instr-1, retval); + if (err) goto error; + tstate->exc_info = gen->gi_exc_state.previous_item; + gen->gi_exc_state.previous_item = NULL; + _Py_LeaveRecursiveCallPy(tstate); + _PyInterpreterFrame *gen_frame = frame; + frame = cframe.current_frame = frame->previous; + gen_frame->previous = NULL; + _PyFrame_StackPush(frame, retval); + goto resume_frame; + } + inst(YIELD_VALUE, (retval -- unused)) { // NOTE: It's important that YIELD_VALUE never raises an exception! // The compiler treats any exception raised here as a failed close() @@ -750,15 +910,12 @@ dummy_func( PyGenObject *gen = _PyFrame_GetGenerator(frame); gen->gi_frame_state = FRAME_SUSPENDED; _PyFrame_SetStackPointer(frame, stack_pointer - 1); - TRACE_FUNCTION_EXIT(); - DTRACE_FUNCTION_EXIT(); tstate->exc_info = gen->gi_exc_state.previous_item; gen->gi_exc_state.previous_item = NULL; _Py_LeaveRecursiveCallPy(tstate); _PyInterpreterFrame *gen_frame = frame; frame = cframe.current_frame = frame->previous; gen_frame->previous = NULL; - frame->prev_instr -= frame->yield_offset; _PyFrame_StackPush(frame, retval); goto resume_frame; } @@ -784,9 +941,7 @@ dummy_func( } assert(exc && PyExceptionInstance_Check(exc)); Py_INCREF(exc); - PyObject *typ = Py_NewRef(PyExceptionInstance_Class(exc)); - PyObject *tb = PyException_GetTraceback(exc); - _PyErr_Restore(tstate, typ, exc, tb); + _PyErr_SetRaisedException(tstate, exc); goto exception_unwind; } @@ -797,9 +952,7 @@ dummy_func( } else { Py_INCREF(exc); - PyObject *typ = Py_NewRef(PyExceptionInstance_Class(exc)); - PyObject *tb = PyException_GetTraceback(exc); - _PyErr_Restore(tstate, typ, exc, tb); + _PyErr_SetRaisedException(tstate, exc); goto exception_unwind; } } @@ -894,7 +1047,6 @@ dummy_func( #if ENABLE_SPECIALIZATION _PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); next_instr--; _Py_Specialize_UnpackSequence(seq, next_instr, oparg); DISPATCH_SAME_OPARG(); @@ -958,7 +1110,6 @@ dummy_func( inst(STORE_ATTR, (counter/1, unused/3, v, owner --)) { #if ENABLE_SPECIALIZATION if (ADAPTIVE_COUNTER_IS_ZERO(counter)) { - assert(cframe.use_tracing == 0); PyObject *name = GETITEM(frame->f_code->co_names, oparg); next_instr--; _Py_Specialize_StoreAttr(owner, next_instr, name); @@ -1075,7 +1226,6 @@ dummy_func( #if ENABLE_SPECIALIZATION _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); PyObject *name = GETITEM(frame->f_code->co_names, oparg>>1); next_instr--; _Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name); @@ -1127,7 +1277,6 @@ dummy_func( } inst(LOAD_GLOBAL_MODULE, (unused/1, index/1, version/1, unused/1 -- null if (oparg & 1), res)) { - assert(cframe.use_tracing == 0); DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); PyDictObject *dict = (PyDictObject *)GLOBALS(); DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); @@ -1141,11 +1290,11 @@ dummy_func( } inst(LOAD_GLOBAL_BUILTIN, (unused/1, index/1, mod_version/1, bltn_version/1 -- null if (oparg & 1), res)) { - assert(cframe.use_tracing == 0); DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); DEOPT_IF(!PyDict_CheckExact(BUILTINS()), LOAD_GLOBAL); PyDictObject *mdict = (PyDictObject *)GLOBALS(); PyDictObject *bdict = (PyDictObject *)BUILTINS(); + assert(opcode == LOAD_GLOBAL_BUILTIN); DEOPT_IF(mdict->ma_keys->dk_version != mod_version, LOAD_GLOBAL); DEOPT_IF(bdict->ma_keys->dk_version != bltn_version, LOAD_GLOBAL); assert(DK_IS_UNICODE(bdict->ma_keys)); @@ -1411,6 +1560,49 @@ dummy_func( PREDICT(JUMP_BACKWARD); } + family(load_super_attr, INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR) = { + LOAD_SUPER_ATTR, + LOAD_SUPER_ATTR_METHOD, + }; + + inst(LOAD_SUPER_ATTR, (unused/9, global_super, class, self -- res2 if (oparg & 1), res)) { + PyObject *name = GETITEM(frame->f_code->co_names, oparg >> 2); + int load_method = oparg & 1; + #if ENABLE_SPECIALIZATION + _PySuperAttrCache *cache = (_PySuperAttrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + next_instr--; + _Py_Specialize_LoadSuperAttr(global_super, class, self, next_instr, name, load_method); + DISPATCH_SAME_OPARG(); + } + STAT_INC(LOAD_SUPER_ATTR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + #endif /* ENABLE_SPECIALIZATION */ + + // we make no attempt to optimize here; specializations should + // handle any case whose performance we care about + PyObject *stack[] = {class, self}; + PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL); + DECREF_INPUTS(); + ERROR_IF(super == NULL, error); + res = PyObject_GetAttr(super, name); + Py_DECREF(super); + ERROR_IF(res == NULL, error); + } + + inst(LOAD_SUPER_ATTR_METHOD, (unused/1, class_version/2, self_type_version/2, method/4, global_super, class, self -- res2, res)) { + DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR); + DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR); + DEOPT_IF(((PyTypeObject *)class)->tp_version_tag != class_version, LOAD_SUPER_ATTR); + PyTypeObject *self_type = Py_TYPE(self); + DEOPT_IF(self_type->tp_version_tag != self_type_version, LOAD_SUPER_ATTR); + res2 = method; + res = self; // transfer ownership + Py_INCREF(res2); + Py_DECREF(global_super); + Py_DECREF(class); + } + family(load_attr, INLINE_CACHE_ENTRIES_LOAD_ATTR) = { LOAD_ATTR, LOAD_ATTR_INSTANCE_VALUE, @@ -1429,7 +1621,6 @@ dummy_func( #if ENABLE_SPECIALIZATION _PyAttrCache *cache = (_PyAttrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); PyObject *name = GETITEM(frame->f_code->co_names, oparg>>1); next_instr--; _Py_Specialize_LoadAttr(owner, next_instr, name); @@ -1475,7 +1666,6 @@ dummy_func( } inst(LOAD_ATTR_INSTANCE_VALUE, (unused/1, type_version/2, index/1, unused/5, owner -- res2 if (oparg & 1), res)) { - assert(cframe.use_tracing == 0); PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); @@ -1492,7 +1682,6 @@ dummy_func( } inst(LOAD_ATTR_MODULE, (unused/1, type_version/2, index/1, unused/5, owner -- res2 if (oparg & 1), res)) { - assert(cframe.use_tracing == 0); DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR); PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict; assert(dict != NULL); @@ -1509,7 +1698,6 @@ dummy_func( } inst(LOAD_ATTR_WITH_HINT, (unused/1, type_version/2, index/1, unused/5, owner -- res2 if (oparg & 1), res)) { - assert(cframe.use_tracing == 0); PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); @@ -1540,7 +1728,6 @@ dummy_func( } inst(LOAD_ATTR_SLOT, (unused/1, type_version/2, index/1, unused/5, owner -- res2 if (oparg & 1), res)) { - assert(cframe.use_tracing == 0); PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); @@ -1554,7 +1741,6 @@ dummy_func( } inst(LOAD_ATTR_CLASS, (unused/1, type_version/2, unused/2, descr/4, cls -- res2 if (oparg & 1), res)) { - assert(cframe.use_tracing == 0); DEOPT_IF(!PyType_Check(cls), LOAD_ATTR); DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version, @@ -1570,7 +1756,6 @@ dummy_func( } inst(LOAD_ATTR_PROPERTY, (unused/1, type_version/2, func_version/2, fget/4, owner -- unused if (oparg & 1), unused)) { - assert(cframe.use_tracing == 0); DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); PyTypeObject *cls = Py_TYPE(owner); @@ -1592,11 +1777,11 @@ dummy_func( STACK_SHRINK(shrink_stack); new_frame->localsplus[0] = owner; JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + frame->return_offset = 0; DISPATCH_INLINED(new_frame); } inst(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, (unused/1, type_version/2, func_version/2, getattribute/4, owner -- unused if (oparg & 1), unused)) { - assert(cframe.use_tracing == 0); DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); PyTypeObject *cls = Py_TYPE(owner); DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); @@ -1620,11 +1805,11 @@ dummy_func( new_frame->localsplus[0] = owner; new_frame->localsplus[1] = Py_NewRef(name); JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + frame->return_offset = 0; DISPATCH_INLINED(new_frame); } inst(STORE_ATTR_INSTANCE_VALUE, (unused/1, type_version/2, index/1, value, owner --)) { - assert(cframe.use_tracing == 0); PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); @@ -1645,7 +1830,6 @@ dummy_func( } inst(STORE_ATTR_WITH_HINT, (unused/1, type_version/2, hint/1, value, owner --)) { - assert(cframe.use_tracing == 0); PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); @@ -1687,7 +1871,6 @@ dummy_func( } inst(STORE_ATTR_SLOT, (unused/1, type_version/2, index/1, value, owner --)) { - assert(cframe.use_tracing == 0); PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); @@ -1699,105 +1882,77 @@ dummy_func( Py_DECREF(owner); } - inst(COMPARE_OP, (unused/1, left, right -- res)) { - STAT_INC(COMPARE_OP, deferred); - assert((oparg >> 4) <= Py_GE); - res = PyObject_RichCompare(left, right, oparg>>4); - DECREF_INPUTS(); - ERROR_IF(res == NULL, error); - } - - // No cache size here, since this is a family of super-instructions. - family(compare_and_branch) = { - COMPARE_AND_BRANCH, - COMPARE_AND_BRANCH_FLOAT, - COMPARE_AND_BRANCH_INT, - COMPARE_AND_BRANCH_STR, + family(compare_op, INLINE_CACHE_ENTRIES_COMPARE_OP) = { + COMPARE_OP, + COMPARE_OP_FLOAT, + COMPARE_OP_INT, + COMPARE_OP_STR, }; - inst(COMPARE_AND_BRANCH, (unused/2, left, right -- )) { + inst(COMPARE_OP, (unused/1, left, right -- res)) { #if ENABLE_SPECIALIZATION _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); next_instr--; - _Py_Specialize_CompareAndBranch(left, right, next_instr, oparg); + _Py_Specialize_CompareOp(left, right, next_instr, oparg); DISPATCH_SAME_OPARG(); } - STAT_INC(COMPARE_AND_BRANCH, deferred); + STAT_INC(COMPARE_OP, deferred); DECREMENT_ADAPTIVE_COUNTER(cache->counter); #endif /* ENABLE_SPECIALIZATION */ assert((oparg >> 4) <= Py_GE); - PyObject *cond = PyObject_RichCompare(left, right, oparg>>4); + res = PyObject_RichCompare(left, right, oparg>>4); DECREF_INPUTS(); - ERROR_IF(cond == NULL, error); - assert(next_instr[1].op.code == POP_JUMP_IF_FALSE || - next_instr[1].op.code == POP_JUMP_IF_TRUE); - bool jump_on_true = next_instr[1].op.code == POP_JUMP_IF_TRUE; - int offset = next_instr[1].op.arg; - int err = PyObject_IsTrue(cond); - Py_DECREF(cond); - ERROR_IF(err < 0, error); - if (jump_on_true == (err != 0)) { - JUMPBY(offset); - } + ERROR_IF(res == NULL, error); } - inst(COMPARE_AND_BRANCH_FLOAT, (unused/2, left, right -- )) { - assert(cframe.use_tracing == 0); - DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_AND_BRANCH); - DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_AND_BRANCH); - STAT_INC(COMPARE_AND_BRANCH, hit); + inst(COMPARE_OP_FLOAT, (unused/1, left, right -- res)) { + DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP); + STAT_INC(COMPARE_OP, hit); double dleft = PyFloat_AS_DOUBLE(left); double dright = PyFloat_AS_DOUBLE(right); // 1 if NaN, 2 if <, 4 if >, 8 if ==; this matches low four bits of the oparg int sign_ish = COMPARISON_BIT(dleft, dright); _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); - if (sign_ish & oparg) { - int offset = next_instr[1].op.arg; - JUMPBY(offset); - } - } - - // Similar to COMPARE_AND_BRANCH_FLOAT - inst(COMPARE_AND_BRANCH_INT, (unused/2, left, right -- )) { - assert(cframe.use_tracing == 0); - DEOPT_IF(!PyLong_CheckExact(left), COMPARE_AND_BRANCH); - DEOPT_IF(!PyLong_CheckExact(right), COMPARE_AND_BRANCH); - DEOPT_IF((size_t)(Py_SIZE(left) + 1) > 2, COMPARE_AND_BRANCH); - DEOPT_IF((size_t)(Py_SIZE(right) + 1) > 2, COMPARE_AND_BRANCH); - STAT_INC(COMPARE_AND_BRANCH, hit); - assert(Py_ABS(Py_SIZE(left)) <= 1 && Py_ABS(Py_SIZE(right)) <= 1); - Py_ssize_t ileft = Py_SIZE(left) * ((PyLongObject *)left)->long_value.ob_digit[0]; - Py_ssize_t iright = Py_SIZE(right) * ((PyLongObject *)right)->long_value.ob_digit[0]; + res = (sign_ish & oparg) ? Py_True : Py_False; + Py_INCREF(res); + } + + // Similar to COMPARE_OP_FLOAT + inst(COMPARE_OP_INT, (unused/1, left, right -- res)) { + DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP); + DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)left), COMPARE_OP); + DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)right), COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + assert(_PyLong_DigitCount((PyLongObject *)left) <= 1 && + _PyLong_DigitCount((PyLongObject *)right) <= 1); + Py_ssize_t ileft = _PyLong_CompactValue((PyLongObject *)left); + Py_ssize_t iright = _PyLong_CompactValue((PyLongObject *)right); // 2 if <, 4 if >, 8 if ==; this matches the low 4 bits of the oparg int sign_ish = COMPARISON_BIT(ileft, iright); _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); - if (sign_ish & oparg) { - int offset = next_instr[1].op.arg; - JUMPBY(offset); - } + res = (sign_ish & oparg) ? Py_True : Py_False; + Py_INCREF(res); } - // Similar to COMPARE_AND_BRANCH_FLOAT, but for ==, != only - inst(COMPARE_AND_BRANCH_STR, (unused/2, left, right -- )) { - assert(cframe.use_tracing == 0); - DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_AND_BRANCH); - DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_AND_BRANCH); - STAT_INC(COMPARE_AND_BRANCH, hit); - int res = _PyUnicode_Equal(left, right); + // Similar to COMPARE_OP_FLOAT, but for ==, != only + inst(COMPARE_OP_STR, (unused/1, left, right -- res)) { + DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + int eq = _PyUnicode_Equal(left, right); assert((oparg >>4) == Py_EQ || (oparg >>4) == Py_NE); _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); - assert(res == 0 || res == 1); + assert(eq == 0 || eq == 1); assert((oparg & 0xf) == COMPARISON_NOT_EQUALS || (oparg & 0xf) == COMPARISON_EQUALS); assert(COMPARISON_NOT_EQUALS + 1 == COMPARISON_EQUALS); - if ((res + COMPARISON_NOT_EQUALS) & oparg) { - int offset = next_instr[1].op.arg; - JUMPBY(offset); - } + res = ((COMPARISON_NOT_EQUALS + eq) & oparg) ? Py_True : Py_False; + Py_INCREF(res); } inst(IS_OP, (left, right -- b)) { @@ -1830,7 +1985,7 @@ dummy_func( ERROR_IF(match == NULL, error); if (!Py_IsNone(match)) { - PyErr_SetExcInfo(NULL, Py_NewRef(match), NULL); + PyErr_SetHandledException(match); } } @@ -1929,56 +2084,6 @@ dummy_func( } } - inst(JUMP_IF_FALSE_OR_POP, (cond -- cond if (jump))) { - bool jump = false; - int err; - if (Py_IsTrue(cond)) { - _Py_DECREF_NO_DEALLOC(cond); - } - else if (Py_IsFalse(cond)) { - JUMPBY(oparg); - jump = true; - } - else { - err = PyObject_IsTrue(cond); - if (err > 0) { - Py_DECREF(cond); - } - else if (err == 0) { - JUMPBY(oparg); - jump = true; - } - else { - goto error; - } - } - } - - inst(JUMP_IF_TRUE_OR_POP, (cond -- cond if (jump))) { - bool jump = false; - int err; - if (Py_IsFalse(cond)) { - _Py_DECREF_NO_DEALLOC(cond); - } - else if (Py_IsTrue(cond)) { - JUMPBY(oparg); - jump = true; - } - else { - err = PyObject_IsTrue(cond); - if (err > 0) { - JUMPBY(oparg); - jump = true; - } - else if (err == 0) { - Py_DECREF(cond); - } - else { - goto error; - } - } - } - inst(JUMP_BACKWARD_NO_INTERRUPT, (--)) { /* This bytecode is used in the `yield from` or `await` loop. * If there is an interrupt, we want it handled in the innermost @@ -2082,7 +2187,6 @@ dummy_func( #if ENABLE_SPECIALIZATION _PyForIterCache *cache = (_PyForIterCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); next_instr--; _Py_Specialize_ForIter(iter, next_instr, oparg); DISPATCH_SAME_OPARG(); @@ -2097,13 +2201,12 @@ dummy_func( if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { goto error; } - else if (tstate->c_tracefunc != NULL) { - call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); - } + monitor_raise(tstate, frame, next_instr-1); _PyErr_Clear(tstate); } /* iterator ended normally */ - assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR); + assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR || + next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == INSTRUMENTED_END_FOR); Py_DECREF(iter); STACK_SHRINK(1); /* Jump forward oparg, then skip following END_FOR instruction */ @@ -2113,8 +2216,35 @@ dummy_func( // Common case: no jump, leave it to the code generator } + inst(INSTRUMENTED_FOR_ITER, ( -- )) { + _Py_CODEUNIT *here = next_instr-1; + _Py_CODEUNIT *target; + PyObject *iter = TOP(); + PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter); + if (next != NULL) { + PUSH(next); + target = next_instr + INLINE_CACHE_ENTRIES_FOR_ITER; + } + else { + if (_PyErr_Occurred(tstate)) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { + goto error; + } + monitor_raise(tstate, frame, here); + _PyErr_Clear(tstate); + } + /* iterator ended normally */ + assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR || + next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == INSTRUMENTED_END_FOR); + STACK_SHRINK(1); + Py_DECREF(iter); + /* Skip END_FOR */ + target = next_instr + INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1; + } + INSTRUMENTED_JUMP(here, target, PY_MONITORING_EVENT_BRANCH); + } + inst(FOR_ITER_LIST, (unused/1, iter -- iter, next)) { - assert(cframe.use_tracing == 0); DEOPT_IF(Py_TYPE(iter) != &PyListIter_Type, FOR_ITER); _PyListIterObject *it = (_PyListIterObject *)iter; STAT_INC(FOR_ITER, hit); @@ -2137,7 +2267,6 @@ dummy_func( } inst(FOR_ITER_TUPLE, (unused/1, iter -- iter, next)) { - assert(cframe.use_tracing == 0); _PyTupleIterObject *it = (_PyTupleIterObject *)iter; DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER); STAT_INC(FOR_ITER, hit); @@ -2160,7 +2289,6 @@ dummy_func( } inst(FOR_ITER_RANGE, (unused/1, iter -- iter, next)) { - assert(cframe.use_tracing == 0); _PyRangeIterObject *r = (_PyRangeIterObject *)iter; DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER); STAT_INC(FOR_ITER, hit); @@ -2181,19 +2309,19 @@ dummy_func( } inst(FOR_ITER_GEN, (unused/1, iter -- iter, unused)) { - assert(cframe.use_tracing == 0); PyGenObject *gen = (PyGenObject *)iter; DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER); DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, FOR_ITER); STAT_INC(FOR_ITER, hit); _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; - frame->yield_offset = oparg; + frame->return_offset = oparg; _PyFrame_StackPush(gen_frame, Py_NewRef(Py_None)); gen->gi_frame_state = FRAME_EXECUTING; gen->gi_exc_state.previous_item = tstate->exc_info; tstate->exc_info = &gen->gi_exc_state; - JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg); - assert(next_instr->op.code == END_FOR); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); + assert(next_instr[oparg].op.code == END_FOR || + next_instr[oparg].op.code == INSTRUMENTED_END_FOR); DISPATCH_INLINED(gen_frame); } @@ -2302,7 +2430,6 @@ dummy_func( inst(LOAD_ATTR_METHOD_WITH_VALUES, (unused/1, type_version/2, keys_version/2, descr/4, self -- res2 if (oparg & 1), res)) { /* Cached method object */ - assert(cframe.use_tracing == 0); PyTypeObject *self_cls = Py_TYPE(self); assert(type_version != 0); DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); @@ -2321,7 +2448,6 @@ dummy_func( } inst(LOAD_ATTR_METHOD_NO_DICT, (unused/1, type_version/2, unused/2, descr/4, self -- res2 if (oparg & 1), res)) { - assert(cframe.use_tracing == 0); PyTypeObject *self_cls = Py_TYPE(self); DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); assert(self_cls->tp_dictoffset == 0); @@ -2334,7 +2460,6 @@ dummy_func( } inst(LOAD_ATTR_METHOD_LAZY_DICT, (unused/1, type_version/2, unused/2, descr/4, self -- res2 if (oparg & 1), res)) { - assert(cframe.use_tracing == 0); PyTypeObject *self_cls = Py_TYPE(self); DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); Py_ssize_t dictoffset = self_cls->tp_dictoffset; @@ -2356,7 +2481,22 @@ dummy_func( kwnames = GETITEM(frame->f_code->co_consts, oparg); } - // Cache layout: counter/1, func_version/2, min_args/1 + inst(INSTRUMENTED_CALL, ( -- )) { + int is_meth = PEEK(oparg+2) != NULL; + int total_args = oparg + is_meth; + PyObject *function = PEEK(total_args + 1); + PyObject *arg = total_args == 0 ? + &_PyInstrumentation_MISSING : PEEK(total_args); + int err = _Py_call_instrumentation_2args( + tstate, PY_MONITORING_EVENT_CALL, + frame, next_instr-1, function, arg); + ERROR_IF(err, error); + _PyCallCache *cache = (_PyCallCache *)next_instr; + INCREMENT_ADAPTIVE_COUNTER(cache->counter); + GO_TO_INSTRUCTION(CALL); + } + + // Cache layout: counter/1, func_version/2 // Neither CALL_INTRINSIC_1/2 nor CALL_FUNCTION_EX are members! family(call, INLINE_CACHE_ENTRIES_CALL) = { CALL, @@ -2386,7 +2526,7 @@ dummy_func( // (Some args may be keywords, see KW_NAMES, which sets 'kwnames'.) // On exit, the stack is [result]. // When calling Python, inline the call using DISPATCH_INLINED(). - inst(CALL, (unused/1, unused/2, unused/1, method, callable, args[oparg] -- res)) { + inst(CALL, (unused/1, unused/2, method, callable, args[oparg] -- res)) { int is_meth = method != NULL; int total_args = oparg; if (is_meth) { @@ -2397,7 +2537,6 @@ dummy_func( #if ENABLE_SPECIALIZATION _PyCallCache *cache = (_PyCallCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); next_instr--; _Py_Specialize_Call(callable, next_instr, total_args, kwnames); DISPATCH_SAME_OPARG(); @@ -2437,19 +2576,30 @@ dummy_func( goto error; } JUMPBY(INLINE_CACHE_ENTRIES_CALL); + frame->return_offset = 0; DISPATCH_INLINED(new_frame); } /* Callable is not a normal Python function */ - if (cframe.use_tracing) { - res = trace_call_function( - tstate, callable, args, - positional_args, kwnames); - } - else { - res = PyObject_Vectorcall( - callable, args, - positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, - kwnames); + res = PyObject_Vectorcall( + callable, args, + positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, + kwnames); + if (opcode == INSTRUMENTED_CALL) { + PyObject *arg = total_args == 0 ? + &_PyInstrumentation_MISSING : PEEK(total_args); + if (res == NULL) { + _Py_call_instrumentation_exc2( + tstate, PY_MONITORING_EVENT_C_RAISE, + frame, next_instr-1, callable, arg); + } + else { + int err = _Py_call_instrumentation_2args( + tstate, PY_MONITORING_EVENT_C_RETURN, + frame, next_instr-1, callable, arg); + if (err < 0) { + Py_CLEAR(res); + } + } } kwnames = NULL; assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); @@ -2464,7 +2614,7 @@ dummy_func( // Start out with [NULL, bound_method, arg1, arg2, ...] // Transform to [callable, self, arg1, arg2, ...] // Then fall through to CALL_PY_EXACT_ARGS - inst(CALL_BOUND_METHOD_EXACT_ARGS, (unused/1, unused/2, unused/1, method, callable, unused[oparg] -- unused)) { + inst(CALL_BOUND_METHOD_EXACT_ARGS, (unused/1, unused/2, method, callable, unused[oparg] -- unused)) { DEOPT_IF(method != NULL, CALL); DEOPT_IF(Py_TYPE(callable) != &PyMethod_Type, CALL); STAT_INC(CALL, hit); @@ -2476,7 +2626,7 @@ dummy_func( GO_TO_INSTRUCTION(CALL_PY_EXACT_ARGS); } - inst(CALL_PY_EXACT_ARGS, (unused/1, func_version/2, unused/1, method, callable, args[oparg] -- unused)) { + inst(CALL_PY_EXACT_ARGS, (unused/1, func_version/2, method, callable, args[oparg] -- unused)) { assert(kwnames == NULL); DEOPT_IF(tstate->interp->eval_frame, CALL); int is_meth = method != NULL; @@ -2500,10 +2650,11 @@ dummy_func( // Manipulate stack directly since we leave using DISPATCH_INLINED(). STACK_SHRINK(oparg + 2); JUMPBY(INLINE_CACHE_ENTRIES_CALL); + frame->return_offset = 0; DISPATCH_INLINED(new_frame); } - inst(CALL_PY_WITH_DEFAULTS, (unused/1, func_version/2, min_args/1, method, callable, args[oparg] -- unused)) { + inst(CALL_PY_WITH_DEFAULTS, (unused/1, func_version/2, method, callable, args[oparg] -- unused)) { assert(kwnames == NULL); DEOPT_IF(tstate->interp->eval_frame, CALL); int is_meth = method != NULL; @@ -2517,6 +2668,11 @@ dummy_func( PyFunctionObject *func = (PyFunctionObject *)callable; DEOPT_IF(func->func_version != func_version, CALL); PyCodeObject *code = (PyCodeObject *)func->func_code; + assert(func->func_defaults); + assert(PyTuple_CheckExact(func->func_defaults)); + int defcount = (int)PyTuple_GET_SIZE(func->func_defaults); + assert(defcount <= code->co_argcount); + int min_args = code->co_argcount - defcount; DEOPT_IF(argcount > code->co_argcount, CALL); DEOPT_IF(argcount < min_args, CALL); DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); @@ -2532,12 +2688,12 @@ dummy_func( // Manipulate stack and cache directly since we leave using DISPATCH_INLINED(). STACK_SHRINK(oparg + 2); JUMPBY(INLINE_CACHE_ENTRIES_CALL); + frame->return_offset = 0; DISPATCH_INLINED(new_frame); } - inst(CALL_NO_KW_TYPE_1, (unused/1, unused/2, unused/1, null, callable, args[oparg] -- res)) { + inst(CALL_NO_KW_TYPE_1, (unused/1, unused/2, null, callable, args[oparg] -- res)) { assert(kwnames == NULL); - assert(cframe.use_tracing == 0); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); PyObject *obj = args[0]; @@ -2548,9 +2704,8 @@ dummy_func( Py_DECREF(&PyType_Type); // I.e., callable } - inst(CALL_NO_KW_STR_1, (unused/1, unused/2, unused/1, null, callable, args[oparg] -- res)) { + inst(CALL_NO_KW_STR_1, (unused/1, unused/2, null, callable, args[oparg] -- res)) { assert(kwnames == NULL); - assert(cframe.use_tracing == 0); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); DEOPT_IF(callable != (PyObject *)&PyUnicode_Type, CALL); @@ -2563,7 +2718,7 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_NO_KW_TUPLE_1, (unused/1, unused/2, unused/1, null, callable, args[oparg] -- res)) { + inst(CALL_NO_KW_TUPLE_1, (unused/1, unused/2, null, callable, args[oparg] -- res)) { assert(kwnames == NULL); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -2577,7 +2732,7 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_BUILTIN_CLASS, (unused/1, unused/2, unused/1, method, callable, args[oparg] -- res)) { + inst(CALL_BUILTIN_CLASS, (unused/1, unused/2, method, callable, args[oparg] -- res)) { int is_meth = method != NULL; int total_args = oparg; if (is_meth) { @@ -2602,8 +2757,7 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_NO_KW_BUILTIN_O, (unused/1, unused/2, unused/1, method, callable, args[oparg] -- res)) { - assert(cframe.use_tracing == 0); + inst(CALL_NO_KW_BUILTIN_O, (unused/1, unused/2, method, callable, args[oparg] -- res)) { /* Builtin METH_O functions */ assert(kwnames == NULL); int is_meth = method != NULL; @@ -2634,8 +2788,7 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_NO_KW_BUILTIN_FAST, (unused/1, unused/2, unused/1, method, callable, args[oparg] -- res)) { - assert(cframe.use_tracing == 0); + inst(CALL_NO_KW_BUILTIN_FAST, (unused/1, unused/2, method, callable, args[oparg] -- res)) { /* Builtin METH_FASTCALL functions, without keywords */ assert(kwnames == NULL); int is_meth = method != NULL; @@ -2670,8 +2823,7 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_BUILTIN_FAST_WITH_KEYWORDS, (unused/1, unused/2, unused/1, method, callable, args[oparg] -- res)) { - assert(cframe.use_tracing == 0); + inst(CALL_BUILTIN_FAST_WITH_KEYWORDS, (unused/1, unused/2, method, callable, args[oparg] -- res)) { /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ int is_meth = method != NULL; int total_args = oparg; @@ -2706,8 +2858,7 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_NO_KW_LEN, (unused/1, unused/2, unused/1, method, callable, args[oparg] -- res)) { - assert(cframe.use_tracing == 0); + inst(CALL_NO_KW_LEN, (unused/1, unused/2, method, callable, args[oparg] -- res)) { assert(kwnames == NULL); /* len(o) */ int is_meth = method != NULL; @@ -2734,8 +2885,7 @@ dummy_func( ERROR_IF(res == NULL, error); } - inst(CALL_NO_KW_ISINSTANCE, (unused/1, unused/2, unused/1, method, callable, args[oparg] -- res)) { - assert(cframe.use_tracing == 0); + inst(CALL_NO_KW_ISINSTANCE, (unused/1, unused/2, method, callable, args[oparg] -- res)) { assert(kwnames == NULL); /* isinstance(o, o2) */ int is_meth = method != NULL; @@ -2765,8 +2915,7 @@ dummy_func( } // This is secretly a super-instruction - inst(CALL_NO_KW_LIST_APPEND, (unused/1, unused/2, unused/1, method, self, args[oparg] -- unused)) { - assert(cframe.use_tracing == 0); + inst(CALL_NO_KW_LIST_APPEND, (unused/1, unused/2, method, self, args[oparg] -- unused)) { assert(kwnames == NULL); assert(oparg == 1); assert(method != NULL); @@ -2786,7 +2935,7 @@ dummy_func( DISPATCH(); } - inst(CALL_NO_KW_METHOD_DESCRIPTOR_O, (unused/1, unused/2, unused/1, method, unused, args[oparg] -- res)) { + inst(CALL_NO_KW_METHOD_DESCRIPTOR_O, (unused/1, unused/2, method, unused, args[oparg] -- res)) { assert(kwnames == NULL); int is_meth = method != NULL; int total_args = oparg; @@ -2820,7 +2969,7 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, (unused/1, unused/2, unused/1, method, unused, args[oparg] -- res)) { + inst(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, (unused/1, unused/2, method, unused, args[oparg] -- res)) { int is_meth = method != NULL; int total_args = oparg; if (is_meth) { @@ -2852,7 +3001,7 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS, (unused/1, unused/2, unused/1, method, unused, args[oparg] -- res)) { + inst(CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS, (unused/1, unused/2, method, unused, args[oparg] -- res)) { assert(kwnames == NULL); assert(oparg == 0 || oparg == 1); int is_meth = method != NULL; @@ -2884,7 +3033,7 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_NO_KW_METHOD_DESCRIPTOR_FAST, (unused/1, unused/2, unused/1, method, unused, args[oparg] -- res)) { + inst(CALL_NO_KW_METHOD_DESCRIPTOR_FAST, (unused/1, unused/2, method, unused, args[oparg] -- res)) { assert(kwnames == NULL); int is_meth = method != NULL; int total_args = oparg; @@ -2915,12 +3064,14 @@ dummy_func( CHECK_EVAL_BREAKER(); } + inst(INSTRUMENTED_CALL_FUNCTION_EX, ( -- )) { + GO_TO_INSTRUCTION(CALL_FUNCTION_EX); + } + inst(CALL_FUNCTION_EX, (unused, func, callargs, kwargs if (oparg & 1) -- result)) { - if (oparg & 1) { - // DICT_MERGE is called before this opcode if there are kwargs. - // It converts all dict subtypes in kwargs into regular dicts. - assert(PyDict_CheckExact(kwargs)); - } + // DICT_MERGE is called before this opcode if there are kwargs. + // It converts all dict subtypes in kwargs into regular dicts. + assert(kwargs == NULL || PyDict_CheckExact(kwargs)); if (!PyTuple_CheckExact(callargs)) { if (check_args_iterable(tstate, func, callargs) < 0) { goto error; @@ -2932,10 +3083,54 @@ dummy_func( Py_SETREF(callargs, tuple); } assert(PyTuple_CheckExact(callargs)); - - result = do_call_core(tstate, func, callargs, kwargs, cframe.use_tracing); + EVAL_CALL_STAT_INC_IF_FUNCTION(EVAL_CALL_FUNCTION_EX, func); + if (opcode == INSTRUMENTED_CALL_FUNCTION_EX && + !PyFunction_Check(func) && !PyMethod_Check(func) + ) { + PyObject *arg = PyTuple_GET_SIZE(callargs) > 0 ? + PyTuple_GET_ITEM(callargs, 0) : Py_None; + int err = _Py_call_instrumentation_2args( + tstate, PY_MONITORING_EVENT_CALL, + frame, next_instr-1, func, arg); + if (err) goto error; + result = PyObject_Call(func, callargs, kwargs); + if (result == NULL) { + _Py_call_instrumentation_exc2( + tstate, PY_MONITORING_EVENT_C_RAISE, + frame, next_instr-1, func, arg); + } + else { + int err = _Py_call_instrumentation_2args( + tstate, PY_MONITORING_EVENT_C_RETURN, + frame, next_instr-1, func, arg); + if (err < 0) { + Py_CLEAR(result); + } + } + } + else { + if (Py_TYPE(func) == &PyFunction_Type && + tstate->interp->eval_frame == NULL && + ((PyFunctionObject *)func)->vectorcall == _PyFunction_Vectorcall) { + assert(PyTuple_CheckExact(callargs)); + Py_ssize_t nargs = PyTuple_GET_SIZE(callargs); + int code_flags = ((PyCodeObject *)PyFunction_GET_CODE(func))->co_flags; + PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(func)); + + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_Ex(tstate, + (PyFunctionObject *)func, locals, + nargs, callargs, kwargs); + // Need to manually shrink the stack since we exit with DISPATCH_INLINED. + STACK_SHRINK(oparg + 3); + if (new_frame == NULL) { + goto error; + } + frame->return_offset = 0; + DISPATCH_INLINED(new_frame); + } + result = PyObject_Call(func, callargs, kwargs); + } DECREF_INPUTS(); - assert(PEEK(3 + (oparg & 1)) == NULL); ERROR_IF(result == NULL, error); CHECK_EVAL_BREAKER(); @@ -3036,20 +3231,10 @@ dummy_func( value = result; } - /* If value is a unicode object, and there's no fmt_spec, - then we know the result of format(value) is value - itself. In that case, skip calling format(). I plan to - move this optimization in to PyObject_Format() - itself. */ - if (PyUnicode_CheckExact(value) && fmt_spec == NULL) { - /* Do nothing, just transfer ownership to result. */ - result = value; - } else { - /* Actually call format(). */ - result = PyObject_Format(value, fmt_spec); - DECREF_INPUTS(); - ERROR_IF(result == NULL, error); - } + result = PyObject_Format(value, fmt_spec); + Py_DECREF(value); + Py_XDECREF(fmt_spec); + ERROR_IF(result == NULL, error); } inst(COPY, (bottom, unused[oparg-1] -- bottom, unused[oparg-1], top)) { @@ -3061,7 +3246,6 @@ dummy_func( #if ENABLE_SPECIALIZATION _PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); next_instr--; _Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, &GETLOCAL(0)); DISPATCH_SAME_OPARG(); @@ -3082,9 +3266,105 @@ dummy_func( assert(oparg >= 2); } - inst(EXTENDED_ARG, (--)) { + inst(INSTRUMENTED_LINE, ( -- )) { + _Py_CODEUNIT *here = next_instr-1; + _PyFrame_SetStackPointer(frame, stack_pointer); + int original_opcode = _Py_call_instrumentation_line( + tstate, frame, here); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (original_opcode < 0) { + next_instr = here+1; + goto error; + } + next_instr = frame->prev_instr; + if (next_instr != here) { + DISPATCH(); + } + if (_PyOpcode_Caches[original_opcode]) { + _PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1); + INCREMENT_ADAPTIVE_COUNTER(cache->counter); + } + opcode = original_opcode; + DISPATCH_GOTO(); + } + + inst(INSTRUMENTED_INSTRUCTION, ( -- )) { + int next_opcode = _Py_call_instrumentation_instruction( + tstate, frame, next_instr-1); + ERROR_IF(next_opcode < 0, error); + next_instr--; + if (_PyOpcode_Caches[next_opcode]) { + _PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1); + INCREMENT_ADAPTIVE_COUNTER(cache->counter); + } + assert(next_opcode > 0 && next_opcode < 256); + opcode = next_opcode; + DISPATCH_GOTO(); + } + + inst(INSTRUMENTED_JUMP_FORWARD, ( -- )) { + INSTRUMENTED_JUMP(next_instr-1, next_instr+oparg, PY_MONITORING_EVENT_JUMP); + } + + inst(INSTRUMENTED_JUMP_BACKWARD, ( -- )) { + INSTRUMENTED_JUMP(next_instr-1, next_instr-oparg, PY_MONITORING_EVENT_JUMP); + CHECK_EVAL_BREAKER(); + } + + inst(INSTRUMENTED_POP_JUMP_IF_TRUE, ( -- )) { + PyObject *cond = POP(); + int err = PyObject_IsTrue(cond); + Py_DECREF(cond); + ERROR_IF(err < 0, error); + _Py_CODEUNIT *here = next_instr-1; + assert(err == 0 || err == 1); + int offset = err*oparg; + INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); + } + + inst(INSTRUMENTED_POP_JUMP_IF_FALSE, ( -- )) { + PyObject *cond = POP(); + int err = PyObject_IsTrue(cond); + Py_DECREF(cond); + ERROR_IF(err < 0, error); + _Py_CODEUNIT *here = next_instr-1; + assert(err == 0 || err == 1); + int offset = (1-err)*oparg; + INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); + } + + inst(INSTRUMENTED_POP_JUMP_IF_NONE, ( -- )) { + PyObject *value = POP(); + _Py_CODEUNIT *here = next_instr-1; + int offset; + if (Py_IsNone(value)) { + _Py_DECREF_NO_DEALLOC(value); + offset = oparg; + } + else { + Py_DECREF(value); + offset = 0; + } + INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); + } + + inst(INSTRUMENTED_POP_JUMP_IF_NOT_NONE, ( -- )) { + PyObject *value = POP(); + _Py_CODEUNIT *here = next_instr-1; + int offset; + if (Py_IsNone(value)) { + _Py_DECREF_NO_DEALLOC(value); + offset = 0; + } + else { + Py_DECREF(value); + offset = oparg; + } + INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); + } + + inst(EXTENDED_ARG, ( -- )) { assert(oparg); - assert(cframe.use_tracing == 0); opcode = next_instr->op.code; oparg = oparg << 8 | next_instr->op.arg; PRE_DISPATCH_GOTO(); @@ -3092,6 +3372,12 @@ dummy_func( } inst(CACHE, (--)) { + assert(0 && "Executing a cache."); + Py_UNREACHABLE(); + } + + inst(RESERVED, (--)) { + assert(0 && "Executing RESERVED instruction."); Py_UNREACHABLE(); } diff --git a/Python/ceval.c b/Python/ceval.c index 7d60cf987e9c47..958689debc87f8 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -10,6 +10,7 @@ #include "pycore_function.h" #include "pycore_intrinsics.h" #include "pycore_long.h" // _PyLong_GetZero() +#include "pycore_instruments.h" #include "pycore_object.h" // _PyObject_GC_TRACK() #include "pycore_moduleobject.h" // PyModuleObject #include "pycore_opcode.h" // EXTRA_CASES @@ -20,6 +21,7 @@ #include "pycore_sliceobject.h" // _PyBuildSlice_ConsumeRefs #include "pycore_sysmodule.h" // _PySys_Audit() #include "pycore_tuple.h" // _PyTuple_ITEMS() +#include "pycore_typeobject.h" // _PySuper_Lookup() #include "pycore_emscripten_signal.h" // _Py_CHECK_EMSCRIPTEN_SIGNALS #include "pycore_dict.h" @@ -52,8 +54,11 @@ #undef Py_DECREF #define Py_DECREF(arg) \ do { \ - _Py_DECREF_STAT_INC(); \ PyObject *op = _PyObject_CAST(arg); \ + if (_Py_IsImmortal(op)) { \ + break; \ + } \ + _Py_DECREF_STAT_INC(); \ if (--op->ob_refcnt == 0) { \ destructor dealloc = Py_TYPE(op)->tp_dealloc; \ (*dealloc)(op); \ @@ -76,8 +81,11 @@ #undef _Py_DECREF_SPECIALIZED #define _Py_DECREF_SPECIALIZED(arg, dealloc) \ do { \ - _Py_DECREF_STAT_INC(); \ PyObject *op = _PyObject_CAST(arg); \ + if (_Py_IsImmortal(op)) { \ + break; \ + } \ + _Py_DECREF_STAT_INC(); \ if (--op->ob_refcnt == 0) { \ destructor d = (destructor)(dealloc); \ d(op); \ @@ -92,13 +100,6 @@ #define _Py_atomic_load_relaxed_int32(ATOMIC_VAL) _Py_atomic_load_relaxed(ATOMIC_VAL) #endif -/* Forward declarations */ -static PyObject *trace_call_function( - PyThreadState *tstate, PyObject *callable, PyObject **stack, - Py_ssize_t oparg, PyObject *kwnames); -static PyObject * do_call_core( - PyThreadState *tstate, PyObject *func, - PyObject *callargs, PyObject *kwdict, int use_tracing); #ifdef LLTRACE static void @@ -179,19 +180,22 @@ lltrace_resume_frame(_PyInterpreterFrame *frame) PyErr_SetRaisedException(exc); } #endif -static int call_trace(Py_tracefunc, PyObject *, - PyThreadState *, _PyInterpreterFrame *, - int, PyObject *); -static int call_trace_protected(Py_tracefunc, PyObject *, - PyThreadState *, _PyInterpreterFrame *, - int, PyObject *); -static void call_exc_trace(Py_tracefunc, PyObject *, - PyThreadState *, _PyInterpreterFrame *); -static int maybe_call_line_trace(Py_tracefunc, PyObject *, - PyThreadState *, _PyInterpreterFrame *, int); -static void maybe_dtrace_line(_PyInterpreterFrame *, PyTraceInfo *, int); -static void dtrace_function_entry(_PyInterpreterFrame *); -static void dtrace_function_return(_PyInterpreterFrame *); + +static void monitor_raise(PyThreadState *tstate, + _PyInterpreterFrame *frame, + _Py_CODEUNIT *instr); +static int monitor_stop_iteration(PyThreadState *tstate, + _PyInterpreterFrame *frame, + _Py_CODEUNIT *instr); +static void monitor_unwind(PyThreadState *tstate, + _PyInterpreterFrame *frame, + _Py_CODEUNIT *instr); +static void monitor_handled(PyThreadState *tstate, + _PyInterpreterFrame *frame, + _Py_CODEUNIT *instr, PyObject *exc); +static void monitor_throw(PyThreadState *tstate, + _PyInterpreterFrame *frame, + _Py_CODEUNIT *instr); static PyObject * import_name(PyThreadState *, _PyInterpreterFrame *, PyObject *, PyObject *, PyObject *); @@ -208,6 +212,9 @@ static _PyInterpreterFrame * _PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func, PyObject *locals, PyObject* const* args, size_t argcount, PyObject *kwnames); +static _PyInterpreterFrame * +_PyEvalFramePushAndInit_Ex(PyThreadState *tstate, PyFunctionObject *func, + PyObject *locals, Py_ssize_t nargs, PyObject *callargs, PyObject *kwargs); static void _PyEvalFrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame *frame); @@ -217,21 +224,6 @@ _PyEvalFrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame *frame); "cannot access free variable '%s' where it is not associated with a" \ " value in enclosing scope" -#ifndef NDEBUG -/* Ensure that tstate is valid: sanity check for PyEval_AcquireThread() and - PyEval_RestoreThread(). Detect if tstate memory was freed. It can happen - when a thread continues to run after Python finalization, especially - daemon threads. */ -static int -is_tstate_valid(PyThreadState *tstate) -{ - assert(!_PyMem_IsPtrFreed(tstate)); - assert(!_PyMem_IsPtrFreed(tstate->interp)); - return 1; -} -#endif - - #ifdef HAVE_ERRNO_H #include <errno.h> #endif @@ -434,7 +426,7 @@ match_class(PyThreadState *tstate, PyObject *subject, PyObject *type, Py_ssize_t nargs, PyObject *kwargs) { if (!PyType_Check(type)) { - const char *e = "called match pattern must be a type"; + const char *e = "called match pattern must be a class"; _PyErr_Format(tstate, PyExc_TypeError, e); return NULL; } @@ -596,63 +588,6 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag) #include "ceval_macros.h" -static int -trace_function_entry(PyThreadState *tstate, _PyInterpreterFrame *frame) -{ - if (tstate->c_tracefunc != NULL) { - /* tstate->c_tracefunc, if defined, is a - function that will be called on *every* entry - to a code block. Its return value, if not - None, is a function that will be called at - the start of each executed line of code. - (Actually, the function must return itself - in order to continue tracing.) The trace - functions are called with three arguments: - a pointer to the current frame, a string - indicating why the function is called, and - an argument which depends on the situation. - The global trace function is also called - whenever an exception is detected. */ - if (call_trace_protected(tstate->c_tracefunc, - tstate->c_traceobj, - tstate, frame, - PyTrace_CALL, Py_None)) { - /* Trace function raised an error */ - return -1; - } - } - if (tstate->c_profilefunc != NULL) { - /* Similar for c_profilefunc, except it needn't - return itself and isn't called for "line" events */ - if (call_trace_protected(tstate->c_profilefunc, - tstate->c_profileobj, - tstate, frame, - PyTrace_CALL, Py_None)) { - /* Profile function raised an error */ - return -1; - } - } - return 0; -} - -static int -trace_function_exit(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject *retval) -{ - if (tstate->c_tracefunc) { - if (call_trace_protected(tstate->c_tracefunc, tstate->c_traceobj, - tstate, frame, PyTrace_RETURN, retval)) { - return -1; - } - } - if (tstate->c_profilefunc) { - if (call_trace_protected(tstate->c_profilefunc, tstate->c_profileobj, - tstate, frame, PyTrace_RETURN, retval)) { - return -1; - } - } - return 0; -} - int _Py_CheckRecursiveCallPy( PyThreadState *tstate) @@ -730,7 +665,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int * strict stack discipline must be maintained. */ _PyCFrame *prev_cframe = tstate->cframe; - cframe.use_tracing = prev_cframe->use_tracing; cframe.previous = prev_cframe; tstate->cframe = &cframe; @@ -748,7 +682,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int _PyCode_CODE(tstate->interp->interpreter_trampoline); entry_frame.stacktop = 0; entry_frame.owner = FRAME_OWNED_BY_CSTACK; - entry_frame.yield_offset = 0; + entry_frame.return_offset = 0; /* Push frame */ entry_frame.previous = prev_cframe->current_frame; frame->previous = &entry_frame; @@ -765,8 +699,11 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int if (_Py_EnterRecursivePy(tstate)) { goto exit_unwind; } - TRACE_FUNCTION_THROW_ENTRY(); - DTRACE_FUNCTION_ENTRY(); + /* Because this avoids the RESUME, + * we need to update instrumentation */ + _Py_Instrument(frame->f_code, tstate->interp); + monitor_throw(tstate, frame, frame->prev_instr); + /* TO DO -- Monitor throw entry. */ goto resume_with_error; } @@ -781,15 +718,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int assert(_PyInterpreterFrame_LASTI(frame) >= -1); \ /* Jump back to the last instruction executed... */ \ next_instr = frame->prev_instr + 1; \ - stack_pointer = _PyFrame_GetStackPointer(frame); \ - /* Set stackdepth to -1. \ - Update when returning or calling trace function. \ - Having stackdepth <= 0 ensures that invalid \ - values are not visible to the cycle GC. \ - We choose -1 rather than 0 to assist debugging. \ - */ \ - frame->stacktop = -1; - + stack_pointer = _PyFrame_GetStackPointer(frame); start_frame: if (_Py_EnterRecursivePy(tstate)) { @@ -845,91 +774,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int #include "generated_cases.c.h" -#if USE_COMPUTED_GOTOS - TARGET_DO_TRACING: -#else - case DO_TRACING: -#endif - { - assert(cframe.use_tracing); - assert(tstate->tracing == 0); - if (INSTR_OFFSET() >= frame->f_code->_co_firsttraceable) { - int instr_prev = _PyInterpreterFrame_LASTI(frame); - frame->prev_instr = next_instr; - NEXTOPARG(); - // No _PyOpcode_Deopt here, since RESUME has no optimized forms: - if (opcode == RESUME) { - if (oparg < 2) { - CHECK_EVAL_BREAKER(); - } - /* Call tracing */ - TRACE_FUNCTION_ENTRY(); - DTRACE_FUNCTION_ENTRY(); - } - else { - /* line-by-line tracing support */ - if (PyDTrace_LINE_ENABLED()) { - maybe_dtrace_line(frame, &tstate->trace_info, instr_prev); - } - - if (cframe.use_tracing && - tstate->c_tracefunc != NULL && !tstate->tracing) { - int err; - /* see maybe_call_line_trace() - for expository comments */ - _PyFrame_SetStackPointer(frame, stack_pointer); - - err = maybe_call_line_trace(tstate->c_tracefunc, - tstate->c_traceobj, - tstate, frame, instr_prev); - // Reload possibly changed frame fields: - stack_pointer = _PyFrame_GetStackPointer(frame); - frame->stacktop = -1; - // next_instr is only reloaded if tracing *does not* raise. - // This is consistent with the behavior of older Python - // versions. If a trace function sets a new f_lineno and - // *then* raises, we use the *old* location when searching - // for an exception handler, displaying the traceback, and - // so on: - if (err) { - // next_instr wasn't incremented at the start of this - // instruction. Increment it before handling the error, - // so that it looks the same as a "normal" instruction: - next_instr++; - goto error; - } - // Reload next_instr. Don't increment it, though, since - // we're going to re-dispatch to the "true" instruction now: - next_instr = frame->prev_instr; - } - } - } - NEXTOPARG(); - PRE_DISPATCH_GOTO(); - // No _PyOpcode_Deopt here, since EXTENDED_ARG has no optimized forms: - while (opcode == EXTENDED_ARG) { - // CPython hasn't ever traced the instruction after an EXTENDED_ARG. - // Inline the EXTENDED_ARG here, so we can avoid branching there: - INSTRUCTION_START(EXTENDED_ARG); - opcode = next_instr->op.code; - oparg = oparg << 8 | next_instr->op.arg; - // Make sure the next instruction isn't a RESUME, since that needs - // to trace properly (and shouldn't have an EXTENDED_ARG, anyways): - assert(opcode != RESUME); - PRE_DISPATCH_GOTO(); - } - opcode = _PyOpcode_Deopt[opcode]; - if (_PyOpcode_Caches[opcode]) { - uint16_t *counter = &next_instr[1].cache; - // The instruction is going to decrement the counter, so we need to - // increment it here to make sure it doesn't try to specialize: - if (!ADAPTIVE_COUNTER_IS_MAX(*counter)) { - INCREMENT_ADAPTIVE_COUNTER(*counter); - } - } - DISPATCH_GOTO(); - } - #if USE_COMPUTED_GOTOS _unknown_opcode: #else @@ -988,12 +832,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int PyTraceBack_Here(f); } } - - if (tstate->c_tracefunc != NULL) { - /* Make sure state is set to FRAME_UNWINDING for tracing */ - call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, - tstate, frame); - } + monitor_raise(tstate, frame, next_instr-1); exception_unwind: { @@ -1012,8 +851,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int } assert(STACK_LEVEL() == 0); _PyFrame_SetStackPointer(frame, stack_pointer); - TRACE_FUNCTION_UNWIND(); - DTRACE_FUNCTION_EXIT(); + monitor_unwind(tstate, frame, next_instr-1); goto exit_unwind; } @@ -1036,8 +874,10 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int available to the handler, so a program can emulate the Python main loop. */ - PUSH(_PyErr_GetRaisedException(tstate)); + PyObject *exc = _PyErr_GetRaisedException(tstate); + PUSH(exc); JUMPTO(handler); + monitor_handled(tstate, frame, next_instr, exc); /* Resume normal execution */ DISPATCH(); } @@ -1051,10 +891,10 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int _PyInterpreterFrame *dying = frame; frame = cframe.current_frame = dying->previous; _PyEvalFrameClearAndPop(tstate, dying); + frame->return_offset = 0; if (frame == &entry_frame) { /* Restore previous cframe and exit */ tstate->cframe = cframe.previous; - tstate->cframe->use_tracing = cframe.use_tracing; assert(tstate->cframe->current_frame == frame->previous); _Py_LeaveRecursiveCallTstate(tstate); return NULL; @@ -1664,6 +1504,49 @@ _PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func, return NULL; } +/* Same as _PyEvalFramePushAndInit but takes an args tuple and kwargs dict. + Steals references to func, callargs and kwargs. +*/ +static _PyInterpreterFrame * +_PyEvalFramePushAndInit_Ex(PyThreadState *tstate, PyFunctionObject *func, + PyObject *locals, Py_ssize_t nargs, PyObject *callargs, PyObject *kwargs) +{ + bool has_dict = (kwargs != NULL && PyDict_GET_SIZE(kwargs) > 0); + PyObject *kwnames = NULL; + PyObject *const *newargs; + if (has_dict) { + newargs = _PyStack_UnpackDict(tstate, _PyTuple_ITEMS(callargs), nargs, kwargs, &kwnames); + if (newargs == NULL) { + Py_DECREF(func); + goto error; + } + } + else { + newargs = &PyTuple_GET_ITEM(callargs, 0); + /* We need to incref all our args since the new frame steals the references. */ + for (Py_ssize_t i = 0; i < nargs; ++i) { + Py_INCREF(PyTuple_GET_ITEM(callargs, i)); + } + } + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( + tstate, (PyFunctionObject *)func, locals, + newargs, nargs, kwnames + ); + if (has_dict) { + _PyStack_UnpackDict_FreeNoDecRef(newargs, kwnames); + } + /* No need to decref func here because the reference has been stolen by + _PyEvalFramePushAndInit. + */ + Py_DECREF(callargs); + Py_XDECREF(kwargs); + return new_frame; +error: + Py_DECREF(callargs); + Py_XDECREF(kwargs); + return NULL; +} + PyObject * _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func, PyObject *locals, @@ -2020,105 +1903,108 @@ unpack_iterable(PyThreadState *tstate, PyObject *v, return 0; } -static void -call_exc_trace(Py_tracefunc func, PyObject *self, - PyThreadState *tstate, - _PyInterpreterFrame *f) +static int +do_monitor_exc(PyThreadState *tstate, _PyInterpreterFrame *frame, + _Py_CODEUNIT *instr, int event) { - PyObject *exc = _PyErr_GetRaisedException(tstate); - assert(exc && PyExceptionInstance_Check(exc)); - PyObject *type = PyExceptionInstance_Class(exc); - PyObject *traceback = PyException_GetTraceback(exc); - if (traceback == NULL) { - traceback = Py_NewRef(Py_None); + assert(event < PY_MONITORING_UNGROUPED_EVENTS); + PyObject *exc = PyErr_GetRaisedException(); + assert(exc != NULL); + int err = _Py_call_instrumentation_arg(tstate, event, frame, instr, exc); + if (err == 0) { + PyErr_SetRaisedException(exc); } - PyObject *arg = PyTuple_Pack(3, type, exc, traceback); - Py_XDECREF(traceback); - - if (arg == NULL) { - _PyErr_SetRaisedException(tstate, exc); - return; + else { + Py_DECREF(exc); } - int err = call_trace(func, self, tstate, f, PyTrace_EXCEPTION, arg); - Py_DECREF(arg); - if (err == 0) { - _PyErr_SetRaisedException(tstate, exc); + return err; +} + +static inline int +no_tools_for_event(PyThreadState *tstate, _PyInterpreterFrame *frame, int event) +{ + _PyCoMonitoringData *data = frame->f_code->_co_monitoring; + if (data) { + if (data->active_monitors.tools[event] == 0) { + return 1; + } } else { - Py_XDECREF(exc); + if (tstate->interp->monitors.tools[event] == 0) { + return 1; + } } + return 0; +} + +static void +monitor_raise(PyThreadState *tstate, _PyInterpreterFrame *frame, + _Py_CODEUNIT *instr) +{ + if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_RAISE)) { + return; + } + do_monitor_exc(tstate, frame, instr, PY_MONITORING_EVENT_RAISE); } static int -call_trace_protected(Py_tracefunc func, PyObject *obj, - PyThreadState *tstate, _PyInterpreterFrame *frame, - int what, PyObject *arg) +monitor_stop_iteration(PyThreadState *tstate, _PyInterpreterFrame *frame, + _Py_CODEUNIT *instr) { - PyObject *exc = _PyErr_GetRaisedException(tstate); - int err = call_trace(func, obj, tstate, frame, what, arg); - if (err == 0) - { - _PyErr_SetRaisedException(tstate, exc); + if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_STOP_ITERATION)) { return 0; } - else { - Py_XDECREF(exc); - return -1; + return do_monitor_exc(tstate, frame, instr, PY_MONITORING_EVENT_STOP_ITERATION); +} + +static void +monitor_unwind(PyThreadState *tstate, + _PyInterpreterFrame *frame, + _Py_CODEUNIT *instr) +{ + if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_PY_UNWIND)) { + return; } + _Py_call_instrumentation_exc0(tstate, PY_MONITORING_EVENT_PY_UNWIND, frame, instr); +} + + +static void +monitor_handled(PyThreadState *tstate, + _PyInterpreterFrame *frame, + _Py_CODEUNIT *instr, PyObject *exc) +{ + if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_EXCEPTION_HANDLED)) { + return; + } + _Py_call_instrumentation_arg(tstate, PY_MONITORING_EVENT_EXCEPTION_HANDLED, frame, instr, exc); } static void -initialize_trace_info(PyTraceInfo *trace_info, _PyInterpreterFrame *frame) +monitor_throw(PyThreadState *tstate, + _PyInterpreterFrame *frame, + _Py_CODEUNIT *instr) { - PyCodeObject *code = frame->f_code; - if (trace_info->code != code) { - trace_info->code = code; - _PyCode_InitAddressRange(code, &trace_info->bounds); + if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_PY_THROW)) { + return; } + _Py_call_instrumentation_exc0(tstate, PY_MONITORING_EVENT_PY_THROW, frame, instr); } void PyThreadState_EnterTracing(PyThreadState *tstate) { + assert(tstate->tracing >= 0); tstate->tracing++; - tstate->cframe->use_tracing = 0; } void PyThreadState_LeaveTracing(PyThreadState *tstate) { - assert(tstate->tracing > 0 && tstate->cframe->use_tracing == 0); + assert(tstate->tracing > 0); tstate->tracing--; - _PyThreadState_UpdateTracingState(tstate); } -static int -call_trace(Py_tracefunc func, PyObject *obj, - PyThreadState *tstate, _PyInterpreterFrame *frame, - int what, PyObject *arg) -{ - int result; - if (tstate->tracing) { - return 0; - } - PyFrameObject *f = _PyFrame_GetFrameObject(frame); - if (f == NULL) { - return -1; - } - int old_what = tstate->tracing_what; - tstate->tracing_what = what; - PyThreadState_EnterTracing(tstate); - assert(_PyInterpreterFrame_LASTI(frame) >= 0); - if (_PyCode_InitLineArray(frame->f_code)) { - return -1; - } - f->f_lineno = _PyCode_LineNumberFromArray(frame->f_code, _PyInterpreterFrame_LASTI(frame)); - result = func(obj, f, what, arg); - f->f_lineno = 0; - PyThreadState_LeaveTracing(tstate); - tstate->tracing_what = old_what; - return result; -} PyObject* _PyEval_CallTracing(PyObject *func, PyObject *args) @@ -2126,7 +2012,6 @@ _PyEval_CallTracing(PyObject *func, PyObject *args) // Save and disable tracing PyThreadState *tstate = _PyThreadState_GET(); int save_tracing = tstate->tracing; - int save_use_tracing = tstate->cframe->use_tracing; tstate->tracing = 0; // Call the tracing function @@ -2134,81 +2019,9 @@ _PyEval_CallTracing(PyObject *func, PyObject *args) // Restore tracing tstate->tracing = save_tracing; - tstate->cframe->use_tracing = save_use_tracing; - return result; -} - -/* See Objects/lnotab_notes.txt for a description of how tracing works. */ -static int -maybe_call_line_trace(Py_tracefunc func, PyObject *obj, - PyThreadState *tstate, _PyInterpreterFrame *frame, int instr_prev) -{ - int result = 0; - - /* If the last instruction falls at the start of a line or if it - represents a jump backwards, update the frame's line number and - then call the trace function if we're tracing source lines. - */ - if (_PyCode_InitLineArray(frame->f_code)) { - return -1; - } - int lastline; - if (instr_prev <= frame->f_code->_co_firsttraceable) { - lastline = -1; - } - else { - lastline = _PyCode_LineNumberFromArray(frame->f_code, instr_prev); - } - int line = _PyCode_LineNumberFromArray(frame->f_code, _PyInterpreterFrame_LASTI(frame)); - PyFrameObject *f = _PyFrame_GetFrameObject(frame); - if (f == NULL) { - return -1; - } - if (line != -1 && f->f_trace_lines) { - /* Trace backward edges (except in 'yield from') or if line number has changed */ - int trace = line != lastline || - (_PyInterpreterFrame_LASTI(frame) < instr_prev && - // SEND has no quickened forms, so no need to use _PyOpcode_Deopt - // here: - frame->prev_instr->op.code != SEND); - if (trace) { - result = call_trace(func, obj, tstate, frame, PyTrace_LINE, Py_None); - } - } - /* Always emit an opcode event if we're tracing all opcodes. */ - if (f->f_trace_opcodes && result == 0) { - result = call_trace(func, obj, tstate, frame, PyTrace_OPCODE, Py_None); - } return result; } -int -_PyEval_SetProfile(PyThreadState *tstate, Py_tracefunc func, PyObject *arg) -{ - assert(is_tstate_valid(tstate)); - /* The caller must hold the GIL */ - assert(PyGILState_Check()); - - /* Call _PySys_Audit() in the context of the current thread state, - even if tstate is not the current thread state. */ - PyThreadState *current_tstate = _PyThreadState_GET(); - if (_PySys_Audit(current_tstate, "sys.setprofile", NULL) < 0) { - return -1; - } - - tstate->c_profilefunc = func; - PyObject *old_profileobj = tstate->c_profileobj; - tstate->c_profileobj = Py_XNewRef(arg); - /* Flag that tracing or profiling is turned on */ - _PyThreadState_UpdateTracingState(tstate); - - // gh-98257: Only call Py_XDECREF() once the new profile function is fully - // set, so it's safe to call sys.setprofile() again (reentrant call). - Py_XDECREF(old_profileobj); - - return 0; -} - void PyEval_SetProfile(Py_tracefunc func, PyObject *arg) { @@ -2240,33 +2053,6 @@ PyEval_SetProfileAllThreads(Py_tracefunc func, PyObject *arg) } } -int -_PyEval_SetTrace(PyThreadState *tstate, Py_tracefunc func, PyObject *arg) -{ - assert(is_tstate_valid(tstate)); - /* The caller must hold the GIL */ - assert(PyGILState_Check()); - - /* Call _PySys_Audit() in the context of the current thread state, - even if tstate is not the current thread state. */ - PyThreadState *current_tstate = _PyThreadState_GET(); - if (_PySys_Audit(current_tstate, "sys.settrace", NULL) < 0) { - return -1; - } - - tstate->c_tracefunc = func; - PyObject *old_traceobj = tstate->c_traceobj; - tstate->c_traceobj = Py_XNewRef(arg); - /* Flag that tracing or profiling is turned on */ - _PyThreadState_UpdateTracingState(tstate); - - // gh-98257: Only call Py_XDECREF() once the new trace function is fully - // set, so it's safe to call sys.settrace() again (reentrant call). - Py_XDECREF(old_traceobj); - - return 0; -} - void PyEval_SetTrace(Py_tracefunc func, PyObject *arg) { @@ -2492,114 +2278,6 @@ PyEval_GetFuncDesc(PyObject *func) return " object"; } -#define C_TRACE(x, call) \ -if (use_tracing && tstate->c_profilefunc) { \ - if (call_trace(tstate->c_profilefunc, tstate->c_profileobj, \ - tstate, tstate->cframe->current_frame, \ - PyTrace_C_CALL, func)) { \ - x = NULL; \ - } \ - else { \ - x = call; \ - if (tstate->c_profilefunc != NULL) { \ - if (x == NULL) { \ - call_trace_protected(tstate->c_profilefunc, \ - tstate->c_profileobj, \ - tstate, tstate->cframe->current_frame, \ - PyTrace_C_EXCEPTION, func); \ - /* XXX should pass (type, value, tb) */ \ - } else { \ - if (call_trace(tstate->c_profilefunc, \ - tstate->c_profileobj, \ - tstate, tstate->cframe->current_frame, \ - PyTrace_C_RETURN, func)) { \ - Py_DECREF(x); \ - x = NULL; \ - } \ - } \ - } \ - } \ -} else { \ - x = call; \ - } - - -static PyObject * -trace_call_function(PyThreadState *tstate, - PyObject *func, - PyObject **args, Py_ssize_t nargs, - PyObject *kwnames) -{ - int use_tracing = 1; - PyObject *x; - if (PyCFunction_CheckExact(func) || PyCMethod_CheckExact(func)) { - C_TRACE(x, PyObject_Vectorcall(func, args, nargs, kwnames)); - return x; - } - else if (Py_IS_TYPE(func, &PyMethodDescr_Type) && nargs > 0) { - /* We need to create a temporary bound method as argument - for profiling. - - If nargs == 0, then this cannot work because we have no - "self". In any case, the call itself would raise - TypeError (foo needs an argument), so we just skip - profiling. */ - PyObject *self = args[0]; - func = Py_TYPE(func)->tp_descr_get(func, self, (PyObject*)Py_TYPE(self)); - if (func == NULL) { - return NULL; - } - C_TRACE(x, PyObject_Vectorcall(func, - args+1, nargs-1, - kwnames)); - Py_DECREF(func); - return x; - } - return PyObject_Vectorcall(func, args, nargs | PY_VECTORCALL_ARGUMENTS_OFFSET, kwnames); -} - -static PyObject * -do_call_core(PyThreadState *tstate, - PyObject *func, - PyObject *callargs, - PyObject *kwdict, - int use_tracing - ) -{ - PyObject *result; - if (PyCFunction_CheckExact(func) || PyCMethod_CheckExact(func)) { - C_TRACE(result, PyObject_Call(func, callargs, kwdict)); - return result; - } - else if (Py_IS_TYPE(func, &PyMethodDescr_Type)) { - Py_ssize_t nargs = PyTuple_GET_SIZE(callargs); - if (nargs > 0 && use_tracing) { - /* We need to create a temporary bound method as argument - for profiling. - - If nargs == 0, then this cannot work because we have no - "self". In any case, the call itself would raise - TypeError (foo needs an argument), so we just skip - profiling. */ - PyObject *self = PyTuple_GET_ITEM(callargs, 0); - func = Py_TYPE(func)->tp_descr_get(func, self, (PyObject*)Py_TYPE(self)); - if (func == NULL) { - return NULL; - } - - C_TRACE(result, _PyObject_FastCallDictTstate( - tstate, func, - &_PyTuple_ITEMS(callargs)[1], - nargs - 1, - kwdict)); - Py_DECREF(func); - return result; - } - } - EVAL_CALL_STAT_INC_IF_FUNCTION(EVAL_CALL_FUNCTION_EX, func); - return PyObject_Call(func, callargs, kwdict); -} - /* Extract a slice index from a PyLong or an object with the nb_index slot defined, and store in *pi. Silently reduce values larger than PY_SSIZE_T_MAX to PY_SSIZE_T_MAX, @@ -2973,69 +2651,6 @@ PyUnstable_Eval_RequestCodeExtraIndex(freefunc free) return new_index; } -static void -dtrace_function_entry(_PyInterpreterFrame *frame) -{ - const char *filename; - const char *funcname; - int lineno; - - PyCodeObject *code = frame->f_code; - filename = PyUnicode_AsUTF8(code->co_filename); - funcname = PyUnicode_AsUTF8(code->co_name); - lineno = _PyInterpreterFrame_GetLine(frame); - - PyDTrace_FUNCTION_ENTRY(filename, funcname, lineno); -} - -static void -dtrace_function_return(_PyInterpreterFrame *frame) -{ - const char *filename; - const char *funcname; - int lineno; - - PyCodeObject *code = frame->f_code; - filename = PyUnicode_AsUTF8(code->co_filename); - funcname = PyUnicode_AsUTF8(code->co_name); - lineno = _PyInterpreterFrame_GetLine(frame); - - PyDTrace_FUNCTION_RETURN(filename, funcname, lineno); -} - -/* DTrace equivalent of maybe_call_line_trace. */ -static void -maybe_dtrace_line(_PyInterpreterFrame *frame, - PyTraceInfo *trace_info, - int instr_prev) -{ - const char *co_filename, *co_name; - - /* If the last instruction executed isn't in the current - instruction window, reset the window. - */ - initialize_trace_info(trace_info, frame); - int lastline = _PyCode_CheckLineNumber(instr_prev*sizeof(_Py_CODEUNIT), &trace_info->bounds); - int addr = _PyInterpreterFrame_LASTI(frame) * sizeof(_Py_CODEUNIT); - int line = _PyCode_CheckLineNumber(addr, &trace_info->bounds); - if (line != -1) { - /* Trace backward edges or first instruction of a new line */ - if (_PyInterpreterFrame_LASTI(frame) < instr_prev || - (line != lastline && addr == trace_info->bounds.ar_start)) - { - co_filename = PyUnicode_AsUTF8(frame->f_code->co_filename); - if (!co_filename) { - co_filename = "?"; - } - co_name = PyUnicode_AsUTF8(frame->f_code->co_name); - if (!co_name) { - co_name = "?"; - } - PyDTrace_LINE(co_filename, co_name, line); - } - } -} - /* Implement Py_EnterRecursiveCall() and Py_LeaveRecursiveCall() as functions for the limited API. */ diff --git a/Python/ceval_gil.c b/Python/ceval_gil.c index 749d8144bf7a23..29796be4b80e93 100644 --- a/Python/ceval_gil.c +++ b/Python/ceval_gil.c @@ -467,7 +467,7 @@ void _PyEval_SetSwitchInterval(unsigned long microseconds) gil->interval = microseconds; } -unsigned long _PyEval_GetSwitchInterval() +unsigned long _PyEval_GetSwitchInterval(void) { struct _gil_runtime_state *gil = &_PyRuntime.ceval.gil; return gil->interval; diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h index 98b72ec1b36428..485771ac65a767 100644 --- a/Python/ceval_macros.h +++ b/Python/ceval_macros.h @@ -93,8 +93,6 @@ { \ NEXTOPARG(); \ PRE_DISPATCH_GOTO(); \ - assert(cframe.use_tracing == 0 || cframe.use_tracing == 255); \ - opcode |= cframe.use_tracing OR_DTRACE_LINE; \ DISPATCH_GOTO(); \ } @@ -102,7 +100,6 @@ { \ opcode = next_instr->op.code; \ PRE_DISPATCH_GOTO(); \ - opcode |= cframe.use_tracing OR_DTRACE_LINE; \ DISPATCH_GOTO(); \ } @@ -183,7 +180,7 @@ GETITEM(PyObject *v, Py_ssize_t i) { #define PREDICT(next_op) \ do { \ _Py_CODEUNIT word = *next_instr; \ - opcode = word.op.code | cframe.use_tracing OR_DTRACE_LINE; \ + opcode = word.op.code; \ if (opcode == next_op) { \ oparg = word.op.arg; \ INSTRUCTION_START(next_op); \ @@ -283,46 +280,6 @@ GETITEM(PyObject *v, Py_ssize_t i) { #define BUILTINS() frame->f_builtins #define LOCALS() frame->f_locals -/* Shared opcode macros */ - -#define TRACE_FUNCTION_EXIT() \ - if (cframe.use_tracing) { \ - if (trace_function_exit(tstate, frame, retval)) { \ - Py_DECREF(retval); \ - goto exit_unwind; \ - } \ - } - -#define DTRACE_FUNCTION_EXIT() \ - if (PyDTrace_FUNCTION_RETURN_ENABLED()) { \ - dtrace_function_return(frame); \ - } - -#define TRACE_FUNCTION_UNWIND() \ - if (cframe.use_tracing) { \ - /* Since we are already unwinding, \ - * we don't care if this raises */ \ - trace_function_exit(tstate, frame, NULL); \ - } - -#define TRACE_FUNCTION_ENTRY() \ - if (cframe.use_tracing) { \ - _PyFrame_SetStackPointer(frame, stack_pointer); \ - int err = trace_function_entry(tstate, frame); \ - stack_pointer = _PyFrame_GetStackPointer(frame); \ - if (err) { \ - goto error; \ - } \ - } - -#define TRACE_FUNCTION_THROW_ENTRY() \ - if (cframe.use_tracing) { \ - assert(frame->stacktop >= 0); \ - if (trace_function_entry(tstate, frame)) { \ - goto exit_unwind; \ - } \ - } - #define DTRACE_FUNCTION_ENTRY() \ if (PyDTrace_FUNCTION_ENTRY_ENABLED()) { \ dtrace_function_entry(frame); \ @@ -370,3 +327,18 @@ do { \ _Py_DECREF_NO_DEALLOC(right); \ } \ } while (0) + +// If a trace function sets a new f_lineno and +// *then* raises, we use the destination when searching +// for an exception handler, displaying the traceback, and so on +#define INSTRUMENTED_JUMP(src, dest, event) \ +do { \ + _PyFrame_SetStackPointer(frame, stack_pointer); \ + int err = _Py_call_instrumentation_jump(tstate, event, frame, src, dest); \ + stack_pointer = _PyFrame_GetStackPointer(frame); \ + if (err) { \ + next_instr = (dest)+1; \ + goto error; \ + } \ + next_instr = frame->prev_instr; \ +} while (0); diff --git a/Python/clinic/instrumentation.c.h b/Python/clinic/instrumentation.c.h new file mode 100644 index 00000000000000..cf3984ca24bbfe --- /dev/null +++ b/Python/clinic/instrumentation.c.h @@ -0,0 +1,311 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_gc.h" // PyGC_Head +# include "pycore_runtime.h" // _Py_ID() +#endif + + +PyDoc_STRVAR(monitoring_use_tool_id__doc__, +"use_tool_id($module, tool_id, name, /)\n" +"--\n" +"\n"); + +#define MONITORING_USE_TOOL_ID_METHODDEF \ + {"use_tool_id", _PyCFunction_CAST(monitoring_use_tool_id), METH_FASTCALL, monitoring_use_tool_id__doc__}, + +static PyObject * +monitoring_use_tool_id_impl(PyObject *module, int tool_id, PyObject *name); + +static PyObject * +monitoring_use_tool_id(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int tool_id; + PyObject *name; + + if (!_PyArg_CheckPositional("use_tool_id", nargs, 2, 2)) { + goto exit; + } + tool_id = _PyLong_AsInt(args[0]); + if (tool_id == -1 && PyErr_Occurred()) { + goto exit; + } + name = args[1]; + return_value = monitoring_use_tool_id_impl(module, tool_id, name); + +exit: + return return_value; +} + +PyDoc_STRVAR(monitoring_free_tool_id__doc__, +"free_tool_id($module, tool_id, /)\n" +"--\n" +"\n"); + +#define MONITORING_FREE_TOOL_ID_METHODDEF \ + {"free_tool_id", (PyCFunction)monitoring_free_tool_id, METH_O, monitoring_free_tool_id__doc__}, + +static PyObject * +monitoring_free_tool_id_impl(PyObject *module, int tool_id); + +static PyObject * +monitoring_free_tool_id(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + int tool_id; + + tool_id = _PyLong_AsInt(arg); + if (tool_id == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = monitoring_free_tool_id_impl(module, tool_id); + +exit: + return return_value; +} + +PyDoc_STRVAR(monitoring_get_tool__doc__, +"get_tool($module, tool_id, /)\n" +"--\n" +"\n"); + +#define MONITORING_GET_TOOL_METHODDEF \ + {"get_tool", (PyCFunction)monitoring_get_tool, METH_O, monitoring_get_tool__doc__}, + +static PyObject * +monitoring_get_tool_impl(PyObject *module, int tool_id); + +static PyObject * +monitoring_get_tool(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + int tool_id; + + tool_id = _PyLong_AsInt(arg); + if (tool_id == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = monitoring_get_tool_impl(module, tool_id); + +exit: + return return_value; +} + +PyDoc_STRVAR(monitoring_register_callback__doc__, +"register_callback($module, tool_id, event, func, /)\n" +"--\n" +"\n"); + +#define MONITORING_REGISTER_CALLBACK_METHODDEF \ + {"register_callback", _PyCFunction_CAST(monitoring_register_callback), METH_FASTCALL, monitoring_register_callback__doc__}, + +static PyObject * +monitoring_register_callback_impl(PyObject *module, int tool_id, int event, + PyObject *func); + +static PyObject * +monitoring_register_callback(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int tool_id; + int event; + PyObject *func; + + if (!_PyArg_CheckPositional("register_callback", nargs, 3, 3)) { + goto exit; + } + tool_id = _PyLong_AsInt(args[0]); + if (tool_id == -1 && PyErr_Occurred()) { + goto exit; + } + event = _PyLong_AsInt(args[1]); + if (event == -1 && PyErr_Occurred()) { + goto exit; + } + func = args[2]; + return_value = monitoring_register_callback_impl(module, tool_id, event, func); + +exit: + return return_value; +} + +PyDoc_STRVAR(monitoring_get_events__doc__, +"get_events($module, tool_id, /)\n" +"--\n" +"\n"); + +#define MONITORING_GET_EVENTS_METHODDEF \ + {"get_events", (PyCFunction)monitoring_get_events, METH_O, monitoring_get_events__doc__}, + +static int +monitoring_get_events_impl(PyObject *module, int tool_id); + +static PyObject * +monitoring_get_events(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + int tool_id; + int _return_value; + + tool_id = _PyLong_AsInt(arg); + if (tool_id == -1 && PyErr_Occurred()) { + goto exit; + } + _return_value = monitoring_get_events_impl(module, tool_id); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyLong_FromLong((long)_return_value); + +exit: + return return_value; +} + +PyDoc_STRVAR(monitoring_set_events__doc__, +"set_events($module, tool_id, event_set, /)\n" +"--\n" +"\n"); + +#define MONITORING_SET_EVENTS_METHODDEF \ + {"set_events", _PyCFunction_CAST(monitoring_set_events), METH_FASTCALL, monitoring_set_events__doc__}, + +static PyObject * +monitoring_set_events_impl(PyObject *module, int tool_id, int event_set); + +static PyObject * +monitoring_set_events(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int tool_id; + int event_set; + + if (!_PyArg_CheckPositional("set_events", nargs, 2, 2)) { + goto exit; + } + tool_id = _PyLong_AsInt(args[0]); + if (tool_id == -1 && PyErr_Occurred()) { + goto exit; + } + event_set = _PyLong_AsInt(args[1]); + if (event_set == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = monitoring_set_events_impl(module, tool_id, event_set); + +exit: + return return_value; +} + +PyDoc_STRVAR(monitoring_get_local_events__doc__, +"get_local_events($module, tool_id, code, /)\n" +"--\n" +"\n"); + +#define MONITORING_GET_LOCAL_EVENTS_METHODDEF \ + {"get_local_events", _PyCFunction_CAST(monitoring_get_local_events), METH_FASTCALL, monitoring_get_local_events__doc__}, + +static int +monitoring_get_local_events_impl(PyObject *module, int tool_id, + PyObject *code); + +static PyObject * +monitoring_get_local_events(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int tool_id; + PyObject *code; + int _return_value; + + if (!_PyArg_CheckPositional("get_local_events", nargs, 2, 2)) { + goto exit; + } + tool_id = _PyLong_AsInt(args[0]); + if (tool_id == -1 && PyErr_Occurred()) { + goto exit; + } + code = args[1]; + _return_value = monitoring_get_local_events_impl(module, tool_id, code); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyLong_FromLong((long)_return_value); + +exit: + return return_value; +} + +PyDoc_STRVAR(monitoring_set_local_events__doc__, +"set_local_events($module, tool_id, code, event_set, /)\n" +"--\n" +"\n"); + +#define MONITORING_SET_LOCAL_EVENTS_METHODDEF \ + {"set_local_events", _PyCFunction_CAST(monitoring_set_local_events), METH_FASTCALL, monitoring_set_local_events__doc__}, + +static PyObject * +monitoring_set_local_events_impl(PyObject *module, int tool_id, + PyObject *code, int event_set); + +static PyObject * +monitoring_set_local_events(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int tool_id; + PyObject *code; + int event_set; + + if (!_PyArg_CheckPositional("set_local_events", nargs, 3, 3)) { + goto exit; + } + tool_id = _PyLong_AsInt(args[0]); + if (tool_id == -1 && PyErr_Occurred()) { + goto exit; + } + code = args[1]; + event_set = _PyLong_AsInt(args[2]); + if (event_set == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = monitoring_set_local_events_impl(module, tool_id, code, event_set); + +exit: + return return_value; +} + +PyDoc_STRVAR(monitoring_restart_events__doc__, +"restart_events($module, /)\n" +"--\n" +"\n"); + +#define MONITORING_RESTART_EVENTS_METHODDEF \ + {"restart_events", (PyCFunction)monitoring_restart_events, METH_NOARGS, monitoring_restart_events__doc__}, + +static PyObject * +monitoring_restart_events_impl(PyObject *module); + +static PyObject * +monitoring_restart_events(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return monitoring_restart_events_impl(module); +} + +PyDoc_STRVAR(monitoring__all_events__doc__, +"_all_events($module, /)\n" +"--\n" +"\n"); + +#define MONITORING__ALL_EVENTS_METHODDEF \ + {"_all_events", (PyCFunction)monitoring__all_events, METH_NOARGS, monitoring__all_events__doc__}, + +static PyObject * +monitoring__all_events_impl(PyObject *module); + +static PyObject * +monitoring__all_events(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return monitoring__all_events_impl(module); +} +/*[clinic end generated code: output=11cc0803875b3ffa input=a9049054013a1b77]*/ diff --git a/Python/clinic/sysmodule.c.h b/Python/clinic/sysmodule.c.h index 46252dd404325b..7a7c188bcccc37 100644 --- a/Python/clinic/sysmodule.c.h +++ b/Python/clinic/sysmodule.c.h @@ -912,6 +912,34 @@ sys_getallocatedblocks(PyObject *module, PyObject *Py_UNUSED(ignored)) return return_value; } +PyDoc_STRVAR(sys_getunicodeinternedsize__doc__, +"getunicodeinternedsize($module, /)\n" +"--\n" +"\n" +"Return the number of elements of the unicode interned dictionary"); + +#define SYS_GETUNICODEINTERNEDSIZE_METHODDEF \ + {"getunicodeinternedsize", (PyCFunction)sys_getunicodeinternedsize, METH_NOARGS, sys_getunicodeinternedsize__doc__}, + +static Py_ssize_t +sys_getunicodeinternedsize_impl(PyObject *module); + +static PyObject * +sys_getunicodeinternedsize(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + PyObject *return_value = NULL; + Py_ssize_t _return_value; + + _return_value = sys_getunicodeinternedsize_impl(module); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyLong_FromSsize_t(_return_value); + +exit: + return return_value; +} + PyDoc_STRVAR(sys__getframe__doc__, "_getframe($module, depth=0, /)\n" "--\n" @@ -1387,4 +1415,4 @@ sys__getframemodulename(PyObject *module, PyObject *const *args, Py_ssize_t narg #ifndef SYS_GETANDROIDAPILEVEL_METHODDEF #define SYS_GETANDROIDAPILEVEL_METHODDEF #endif /* !defined(SYS_GETANDROIDAPILEVEL_METHODDEF) */ -/*[clinic end generated code: output=5c761f14326ced54 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6d598acc26237fbe input=a9049054013a1b77]*/ diff --git a/Python/codecs.c b/Python/codecs.c index b2087b499dfdba..1983f56ba204c1 100644 --- a/Python/codecs.c +++ b/Python/codecs.c @@ -11,6 +11,7 @@ Copyright (c) Corporation for National Research Initiatives. #include "Python.h" #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_interp.h" // PyInterpreterState.codec_search_path +#include "pycore_pyerrors.h" // _PyErr_FormatNote() #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_ucnhash.h" // _PyUnicode_Name_CAPI #include <ctype.h> @@ -382,22 +383,6 @@ PyObject *PyCodec_StreamWriter(const char *encoding, return codec_getstreamcodec(encoding, stream, errors, 3); } -/* Helper that tries to ensure the reported exception chain indicates the - * codec that was invoked to trigger the failure without changing the type - * of the exception raised. - */ -static void -wrap_codec_error(const char *operation, - const char *encoding) -{ - /* TrySetFromCause will replace the active exception with a suitably - * updated clone if it can, otherwise it will leave the original - * exception alone. - */ - _PyErr_TrySetFromCause("%s with '%s' codec failed", - operation, encoding); -} - /* Encode an object (e.g. a Unicode object) using the given encoding and return the resulting encoded object (usually a Python string). @@ -418,7 +403,7 @@ _PyCodec_EncodeInternal(PyObject *object, result = PyObject_Call(encoder, args, NULL); if (result == NULL) { - wrap_codec_error("encoding", encoding); + _PyErr_FormatNote("%s with '%s' codec failed", "encoding", encoding); goto onError; } @@ -463,7 +448,7 @@ _PyCodec_DecodeInternal(PyObject *object, result = PyObject_Call(decoder, args, NULL); if (result == NULL) { - wrap_codec_error("decoding", encoding); + _PyErr_FormatNote("%s with '%s' codec failed", "decoding", encoding); goto onError; } if (!PyTuple_Check(result) || diff --git a/Python/compile.c b/Python/compile.c index e109afd54260a0..5e8f5da5b4ad68 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -6,10 +6,10 @@ * object: * 1. Checks for future statements. See future.c * 2. Builds a symbol table. See symtable.c. - * 3. Generate code for basic blocks. See compiler_mod() in this file. - * 4. Assemble the basic blocks into final code. See assemble() in - * this file. - * 5. Optimize the byte code (peephole optimizations). + * 3. Generate an instruction sequence. See compiler_mod() in this file. + * 4. Generate a control flow graph and run optimizations on it. See flowgraph.c. + * 5. Assemble the basic blocks into final code. See optimize_and_assemble() in + * this file, and assembler.c. * * Note that compiler_mod() suggests module, but the module ast type * (mod_ty) has cases for expressions and interactive statements. @@ -23,23 +23,21 @@ #include <stdbool.h> -// Need _PyOpcode_RelativeJump of pycore_opcode.h -#define NEED_OPCODE_TABLES - #include "Python.h" #include "pycore_ast.h" // _PyAST_GetDocString() +#define NEED_OPCODE_TABLES +#include "pycore_opcode_utils.h" +#undef NEED_OPCODE_TABLES +#include "pycore_flowgraph.h" #include "pycore_code.h" // _PyCode_New() #include "pycore_compile.h" #include "pycore_intrinsics.h" #include "pycore_long.h" // _PyLong_GetZero() -#include "pycore_opcode.h" // _PyOpcode_Caches #include "pycore_pymem.h" // _PyMem_IsPtrFreed() #include "pycore_symtable.h" // PySTEntryObject, _PyFuture_FromAST() #include "opcode_metadata.h" // _PyOpcode_opcode_metadata, _PyOpcode_num_popped/pushed - -#define DEFAULT_BLOCK_SIZE 16 #define DEFAULT_CODE_SIZE 128 #define DEFAULT_LNOTAB_SIZE 16 #define DEFAULT_CNOTAB_SIZE 32 @@ -83,68 +81,17 @@ */ #define MAX_ALLOWED_STACK_USE (STACK_USE_GUIDELINE * 100) - -#define MAX_REAL_OPCODE 254 - -#define IS_WITHIN_OPCODE_RANGE(opcode) \ - (((opcode) >= 0 && (opcode) <= MAX_REAL_OPCODE) || \ - IS_PSEUDO_OPCODE(opcode)) - -#define IS_JUMP_OPCODE(opcode) \ - is_bit_set_in_table(_PyOpcode_Jump, opcode) - -#define IS_BLOCK_PUSH_OPCODE(opcode) \ - ((opcode) == SETUP_FINALLY || \ - (opcode) == SETUP_WITH || \ - (opcode) == SETUP_CLEANUP) - -#define HAS_TARGET(opcode) \ - (IS_JUMP_OPCODE(opcode) || IS_BLOCK_PUSH_OPCODE(opcode)) - -/* opcodes that must be last in the basicblock */ -#define IS_TERMINATOR_OPCODE(opcode) \ - (IS_JUMP_OPCODE(opcode) || IS_SCOPE_EXIT_OPCODE(opcode)) - -/* opcodes which are not emitted in codegen stage, only by the assembler */ -#define IS_ASSEMBLER_OPCODE(opcode) \ - ((opcode) == JUMP_FORWARD || \ - (opcode) == JUMP_BACKWARD || \ - (opcode) == JUMP_BACKWARD_NO_INTERRUPT) - -#define IS_BACKWARDS_JUMP_OPCODE(opcode) \ - ((opcode) == JUMP_BACKWARD || \ - (opcode) == JUMP_BACKWARD_NO_INTERRUPT) - -#define IS_UNCONDITIONAL_JUMP_OPCODE(opcode) \ - ((opcode) == JUMP || \ - (opcode) == JUMP_NO_INTERRUPT || \ - (opcode) == JUMP_FORWARD || \ - (opcode) == JUMP_BACKWARD || \ - (opcode) == JUMP_BACKWARD_NO_INTERRUPT) - -#define IS_SCOPE_EXIT_OPCODE(opcode) \ - ((opcode) == RETURN_VALUE || \ - (opcode) == RETURN_CONST || \ - (opcode) == RAISE_VARARGS || \ - (opcode) == RERAISE) - -#define IS_SUPERINSTRUCTION_OPCODE(opcode) \ - ((opcode) == LOAD_FAST__LOAD_FAST || \ - (opcode) == LOAD_FAST__LOAD_CONST || \ - (opcode) == LOAD_CONST__LOAD_FAST || \ - (opcode) == STORE_FAST__LOAD_FAST || \ - (opcode) == STORE_FAST__STORE_FAST) - #define IS_TOP_LEVEL_AWAIT(C) ( \ ((C)->c_flags.cf_flags & PyCF_ALLOW_TOP_LEVEL_AWAIT) \ && ((C)->u->u_ste->ste_type == ModuleBlock)) typedef _PyCompilerSrcLocation location; +typedef _PyCfgInstruction cfg_instr; +typedef _PyCfgBasicblock basicblock; +typedef _PyCfgBuilder cfg_builder; #define LOCATION(LNO, END_LNO, COL, END_COL) \ - ((const location){(LNO), (END_LNO), (COL), (END_COL)}) - -static location NO_LOCATION = {-1, -1, -1, -1}; + ((const _PyCompilerSrcLocation){(LNO), (END_LNO), (COL), (END_COL)}) /* Return true if loc1 starts after loc2 ends. */ static inline bool @@ -154,22 +101,11 @@ location_is_after(location loc1, location loc2) { (loc1.col_offset > loc2.end_col_offset)); } -static inline bool -same_location(location a, location b) -{ - return a.lineno == b.lineno && - a.end_lineno == b.end_lineno && - a.col_offset == b.col_offset && - a.end_col_offset == b.end_col_offset; -} - #define LOC(x) SRC_LOCATION_FROM_AST(x) -typedef struct jump_target_label_ { - int id; -} jump_target_label; +typedef _PyCfgJumpTargetLabel jump_target_label; -static struct jump_target_label_ NO_LABEL = {-1}; +static jump_target_label NO_LABEL = {-1}; #define SAME_LABEL(L1, L2) ((L1).id == (L2).id) #define IS_LABEL(L) (!SAME_LABEL((L), (NO_LABEL))) @@ -183,190 +119,6 @@ static struct jump_target_label_ NO_LABEL = {-1}; #define USE_LABEL(C, LBL) \ RETURN_IF_ERROR(instr_sequence_use_label(INSTR_SEQUENCE(C), (LBL).id)) -struct cfg_instr { - int i_opcode; - int i_oparg; - location i_loc; - struct basicblock_ *i_target; /* target block (if jump instruction) */ - struct basicblock_ *i_except; /* target block when exception is raised */ -}; - -/* One arg*/ -#define INSTR_SET_OP1(I, OP, ARG) \ - do { \ - assert(HAS_ARG(OP)); \ - struct cfg_instr *_instr__ptr_ = (I); \ - _instr__ptr_->i_opcode = (OP); \ - _instr__ptr_->i_oparg = (ARG); \ - } while (0); - -/* No args*/ -#define INSTR_SET_OP0(I, OP) \ - do { \ - assert(!HAS_ARG(OP)); \ - struct cfg_instr *_instr__ptr_ = (I); \ - _instr__ptr_->i_opcode = (OP); \ - _instr__ptr_->i_oparg = 0; \ - } while (0); - -typedef struct exceptstack { - struct basicblock_ *handlers[CO_MAXBLOCKS+1]; - int depth; -} ExceptStack; - -#define LOG_BITS_PER_INT 5 -#define MASK_LOW_LOG_BITS 31 - -static inline int -is_bit_set_in_table(const uint32_t *table, int bitindex) { - /* Is the relevant bit set in the relevant word? */ - /* 512 bits fit into 9 32-bits words. - * Word is indexed by (bitindex>>ln(size of int in bits)). - * Bit within word is the low bits of bitindex. - */ - if (bitindex >= 0 && bitindex < 512) { - uint32_t word = table[bitindex >> LOG_BITS_PER_INT]; - return (word >> (bitindex & MASK_LOW_LOG_BITS)) & 1; - } - else { - return 0; - } -} - -static inline int -is_relative_jump(struct cfg_instr *i) -{ - return is_bit_set_in_table(_PyOpcode_RelativeJump, i->i_opcode); -} - -static inline int -is_block_push(struct cfg_instr *i) -{ - return IS_BLOCK_PUSH_OPCODE(i->i_opcode); -} - -static inline int -is_jump(struct cfg_instr *i) -{ - return IS_JUMP_OPCODE(i->i_opcode); -} - -static int -instr_size(struct cfg_instr *instruction) -{ - int opcode = instruction->i_opcode; - assert(!IS_PSEUDO_OPCODE(opcode)); - int oparg = instruction->i_oparg; - assert(HAS_ARG(opcode) || oparg == 0); - int extended_args = (0xFFFFFF < oparg) + (0xFFFF < oparg) + (0xFF < oparg); - int caches = _PyOpcode_Caches[opcode]; - return extended_args + 1 + caches; -} - -static void -write_instr(_Py_CODEUNIT *codestr, struct cfg_instr *instruction, int ilen) -{ - int opcode = instruction->i_opcode; - assert(!IS_PSEUDO_OPCODE(opcode)); - int oparg = instruction->i_oparg; - assert(HAS_ARG(opcode) || oparg == 0); - int caches = _PyOpcode_Caches[opcode]; - switch (ilen - caches) { - case 4: - codestr->op.code = EXTENDED_ARG; - codestr->op.arg = (oparg >> 24) & 0xFF; - codestr++; - /* fall through */ - case 3: - codestr->op.code = EXTENDED_ARG; - codestr->op.arg = (oparg >> 16) & 0xFF; - codestr++; - /* fall through */ - case 2: - codestr->op.code = EXTENDED_ARG; - codestr->op.arg = (oparg >> 8) & 0xFF; - codestr++; - /* fall through */ - case 1: - codestr->op.code = opcode; - codestr->op.arg = oparg & 0xFF; - codestr++; - break; - default: - Py_UNREACHABLE(); - } - while (caches--) { - codestr->op.code = CACHE; - codestr->op.arg = 0; - codestr++; - } -} - -typedef struct basicblock_ { - /* Each basicblock in a compilation unit is linked via b_list in the - reverse order that the block are allocated. b_list points to the next - block, not to be confused with b_next, which is next by control flow. */ - struct basicblock_ *b_list; - /* The label of this block if it is a jump target, -1 otherwise */ - jump_target_label b_label; - /* Exception stack at start of block, used by assembler to create the exception handling table */ - ExceptStack *b_exceptstack; - /* pointer to an array of instructions, initially NULL */ - struct cfg_instr *b_instr; - /* If b_next is non-NULL, it is a pointer to the next - block reached by normal control flow. */ - struct basicblock_ *b_next; - /* number of instructions used */ - int b_iused; - /* length of instruction array (b_instr) */ - int b_ialloc; - /* Used by add_checks_for_loads_of_unknown_variables */ - uint64_t b_unsafe_locals_mask; - /* Number of predecessors that a block has. */ - int b_predecessors; - /* depth of stack upon entry of block, computed by stackdepth() */ - int b_startdepth; - /* instruction offset for block, computed by assemble_jump_offsets() */ - int b_offset; - /* Basic block is an exception handler that preserves lasti */ - unsigned b_preserve_lasti : 1; - /* Used by compiler passes to mark whether they have visited a basic block. */ - unsigned b_visited : 1; - /* b_except_handler is used by the cold-detection algorithm to mark exception targets */ - unsigned b_except_handler : 1; - /* b_cold is true if this block is not perf critical (like an exception handler) */ - unsigned b_cold : 1; - /* b_warm is used by the cold-detection algorithm to mark blocks which are definitely not cold */ - unsigned b_warm : 1; -} basicblock; - - -static struct cfg_instr * -basicblock_last_instr(const basicblock *b) { - assert(b->b_iused >= 0); - if (b->b_iused > 0) { - assert(b->b_instr != NULL); - return &b->b_instr[b->b_iused - 1]; - } - return NULL; -} - -static inline int -basicblock_exits_scope(const basicblock *b) { - struct cfg_instr *last = basicblock_last_instr(b); - return last && IS_SCOPE_EXIT_OPCODE(last->i_opcode); -} - -static inline int -basicblock_nofallthrough(const basicblock *b) { - struct cfg_instr *last = basicblock_last_instr(b); - return (last && - (IS_SCOPE_EXIT_OPCODE(last->i_opcode) || - IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode))); -} - -#define BB_NO_FALLTHROUGH(B) (basicblock_nofallthrough(B)) -#define BB_HAS_FALLTHROUGH(B) (!basicblock_nofallthrough(B)) /* fblockinfo tracks the current frame block. @@ -397,35 +149,19 @@ enum { COMPILER_SCOPE_COMPREHENSION, }; -typedef struct cfg_builder_ { - /* The entryblock, at which control flow begins. All blocks of the - CFG are reachable through the b_next links */ - basicblock *g_entryblock; - /* Pointer to the most recently allocated block. By following - b_list links, you can reach all allocated blocks. */ - basicblock *g_block_list; - /* pointer to the block currently being constructed */ - basicblock *g_curblock; - /* label for the next instruction to be placed */ - jump_target_label g_current_label; -} cfg_builder; - -typedef struct { - int i_opcode; - int i_oparg; - location i_loc; -} instruction; - -typedef struct instr_sequence_ { - instruction *s_instrs; - int s_allocated; - int s_used; +int +_PyCompile_InstrSize(int opcode, int oparg) +{ + assert(!IS_PSEUDO_OPCODE(opcode)); + assert(HAS_ARG(opcode) || oparg == 0); + int extended_args = (0xFFFFFF < oparg) + (0xFFFF < oparg) + (0xFF < oparg); + int caches = _PyOpcode_Caches[opcode]; + return extended_args + 1 + caches; +} - int *s_labelmap; /* label id --> instr offset */ - int s_labelmap_size; - int s_next_free_label; /* next free label id */ -} instr_sequence; +typedef _PyCompile_Instruction instruction; +typedef _PyCompile_InstructionSequence instr_sequence; #define INITIAL_INSTR_SEQUENCE_SIZE 100 #define INITIAL_INSTR_SEQUENCE_LABELS_MAP_SIZE 10 @@ -440,10 +176,11 @@ typedef struct instr_sequence_ { * item_size: size of each item * */ -static int -ensure_array_large_enough(int idx, void **arr_, int *alloc, int default_alloc, size_t item_size) +int +_PyCompile_EnsureArrayLargeEnough(int idx, void **array, int *alloc, + int default_alloc, size_t item_size) { - void *arr = *arr_; + void *arr = *array; if (arr == NULL) { int new_alloc = default_alloc; if (idx >= new_alloc) { @@ -480,7 +217,7 @@ ensure_array_large_enough(int idx, void **arr_, int *alloc, int default_alloc, s memset((char *)arr + oldsize, 0, newsize - oldsize); } - *arr_ = arr; + *array = arr; return SUCCESS; } @@ -489,11 +226,11 @@ instr_sequence_next_inst(instr_sequence *seq) { assert(seq->s_instrs != NULL || seq->s_used == 0); RETURN_IF_ERROR( - ensure_array_large_enough(seq->s_used + 1, - (void**)&seq->s_instrs, - &seq->s_allocated, - INITIAL_INSTR_SEQUENCE_SIZE, - sizeof(instruction))); + _PyCompile_EnsureArrayLargeEnough(seq->s_used + 1, + (void**)&seq->s_instrs, + &seq->s_allocated, + INITIAL_INSTR_SEQUENCE_SIZE, + sizeof(instruction))); assert(seq->s_used < seq->s_allocated); return seq->s_used++; } @@ -509,11 +246,11 @@ static int instr_sequence_use_label(instr_sequence *seq, int lbl) { int old_size = seq->s_labelmap_size; RETURN_IF_ERROR( - ensure_array_large_enough(lbl, - (void**)&seq->s_labelmap, - &seq->s_labelmap_size, - INITIAL_INSTR_SEQUENCE_LABELS_MAP_SIZE, - sizeof(int))); + _PyCompile_EnsureArrayLargeEnough(lbl, + (void**)&seq->s_labelmap, + &seq->s_labelmap_size, + INITIAL_INSTR_SEQUENCE_LABELS_MAP_SIZE, + sizeof(int))); for(int i = old_size; i < seq->s_labelmap_size; i++) { seq->s_labelmap[i] = -111; /* something weird, for debugging */ @@ -526,7 +263,6 @@ static int instr_sequence_addop(instr_sequence *seq, int opcode, int oparg, location loc) { assert(IS_WITHIN_OPCODE_RANGE(opcode)); - assert(!IS_ASSEMBLER_OPCODE(opcode)); assert(HAS_ARG(opcode) || HAS_TARGET(opcode) || oparg == 0); assert(0 <= oparg && oparg < (1 << 30)); @@ -572,40 +308,56 @@ instr_sequence_fini(instr_sequence *seq) { seq->s_instrs = NULL; } -static int basicblock_addop(basicblock *b, int opcode, int oparg, location loc); -static int cfg_builder_maybe_start_new_block(cfg_builder *g); - static int -cfg_builder_use_label(cfg_builder *g, jump_target_label lbl) -{ - g->g_current_label = lbl; - return cfg_builder_maybe_start_new_block(g); -} +instr_sequence_to_cfg(instr_sequence *seq, cfg_builder *g) { + memset(g, 0, sizeof(cfg_builder)); + RETURN_IF_ERROR(_PyCfgBuilder_Init(g)); -static int -cfg_builder_addop(cfg_builder *g, int opcode, int oparg, location loc) -{ - RETURN_IF_ERROR(cfg_builder_maybe_start_new_block(g)); - return basicblock_addop(g->g_curblock, opcode, oparg, loc); -} + /* There can be more than one label for the same offset. The + * offset2lbl maping selects one of them which we use consistently. + */ -static int cfg_builder_init(cfg_builder *g); + int *offset2lbl = PyMem_Malloc(seq->s_used * sizeof(int)); + if (offset2lbl == NULL) { + PyErr_NoMemory(); + return ERROR; + } + for (int i = 0; i < seq->s_used; i++) { + offset2lbl[i] = -1; + } + for (int lbl=0; lbl < seq->s_labelmap_size; lbl++) { + int offset = seq->s_labelmap[lbl]; + if (offset >= 0) { + assert(offset < seq->s_used); + offset2lbl[offset] = lbl; + } + } -static int -instr_sequence_to_cfg(instr_sequence *seq, cfg_builder *g) { - memset(g, 0, sizeof(cfg_builder)); - RETURN_IF_ERROR(cfg_builder_init(g)); - /* Note: there can be more than one label for the same offset */ for (int i = 0; i < seq->s_used; i++) { - for (int j=0; j < seq->s_labelmap_size; j++) { - if (seq->s_labelmap[j] == i) { - jump_target_label lbl = {j}; - RETURN_IF_ERROR(cfg_builder_use_label(g, lbl)); + int lbl = offset2lbl[i]; + if (lbl >= 0) { + assert (lbl < seq->s_labelmap_size); + jump_target_label lbl_ = {lbl}; + if (_PyCfgBuilder_UseLabel(g, lbl_) < 0) { + goto error; } } instruction *instr = &seq->s_instrs[i]; - RETURN_IF_ERROR(cfg_builder_addop(g, instr->i_opcode, instr->i_oparg, instr->i_loc)); + int opcode = instr->i_opcode; + int oparg = instr->i_oparg; + if (HAS_TARGET(opcode)) { + int offset = seq->s_labelmap[oparg]; + assert(offset >= 0 && offset < seq->s_used); + int lbl = offset2lbl[offset]; + assert(lbl >= 0 && lbl < seq->s_labelmap_size); + oparg = lbl; + } + if (_PyCfgBuilder_Addop(g, opcode, oparg, instr->i_loc) < 0) { + goto error; + } } + PyMem_Free(offset2lbl); + int nblocks = 0; for (basicblock *b = g->g_block_list; b != NULL; b = b->b_list) { nblocks++; @@ -615,6 +367,9 @@ instr_sequence_to_cfg(instr_sequence *seq, cfg_builder *g) { return ERROR; } return SUCCESS; +error: + PyMem_Free(offset2lbl); + return ERROR; } @@ -624,35 +379,16 @@ instr_sequence_to_cfg(instr_sequence *seq, cfg_builder *g) { struct compiler_unit { PySTEntryObject *u_ste; - PyObject *u_name; - PyObject *u_qualname; /* dot-separated qualified name (lazy) */ int u_scope_type; - /* The following fields are dicts that map objects to - the index of them in co_XXX. The index is used as - the argument for opcodes that refer to those collections. - */ - PyObject *u_consts; /* all constants */ - PyObject *u_names; /* all names */ - PyObject *u_varnames; /* local variables */ - PyObject *u_cellvars; /* cell variables */ - PyObject *u_freevars; /* free variables */ - - PyObject *u_fasthidden; /* dict; keys are names that are fast-locals only - temporarily within an inlined comprehension. When - value is True, treat as fast-local. */ PyObject *u_private; /* for private name mangling */ - Py_ssize_t u_argcount; /* number of arguments for block */ - Py_ssize_t u_posonlyargcount; /* number of positional only arguments for block */ - Py_ssize_t u_kwonlyargcount; /* number of keyword only arguments for block */ - instr_sequence u_instr_sequence; /* codegen output */ int u_nfblocks; struct fblockinfo u_fblock[CO_MAXBLOCKS]; - int u_firstlineno; /* the first lineno of the block */ + _PyCompile_CodeUnitMetadata u_metadata; }; /* This struct captures the global state of a compilation. @@ -711,8 +447,6 @@ typedef struct { Py_ssize_t on_top; } pattern_context; -static int basicblock_next_instr(basicblock *); - static int codegen_addop_i(instr_sequence *seq, int opcode, Py_ssize_t oparg, location loc); static void compiler_free(struct compiler *); @@ -765,9 +499,7 @@ static int compiler_match(struct compiler *, stmt_ty); static int compiler_pattern_subpattern(struct compiler *, pattern_ty, pattern_context *); -static int remove_redundant_nops(basicblock *bb); - -static PyCodeObject *assemble(struct compiler *, int addNone); +static PyCodeObject *optimize_and_assemble(struct compiler *, int addNone); #define CAPSULE_NAME "compile.c compiler unit" @@ -946,70 +678,19 @@ dictbytype(PyObject *src, int scope_type, int flag, Py_ssize_t offset) return dest; } -#ifndef NDEBUG -static bool -cfg_builder_check(cfg_builder *g) -{ - assert(g->g_entryblock->b_iused > 0); - for (basicblock *block = g->g_block_list; block != NULL; block = block->b_list) { - assert(!_PyMem_IsPtrFreed(block)); - if (block->b_instr != NULL) { - assert(block->b_ialloc > 0); - assert(block->b_iused >= 0); - assert(block->b_ialloc >= block->b_iused); - } - else { - assert (block->b_iused == 0); - assert (block->b_ialloc == 0); - } - } - return true; -} -#endif - -static basicblock *cfg_builder_new_block(cfg_builder *g); - -static int -cfg_builder_init(cfg_builder *g) -{ - g->g_block_list = NULL; - basicblock *block = cfg_builder_new_block(g); - if (block == NULL) { - return ERROR; - } - g->g_curblock = g->g_entryblock = block; - g->g_current_label = NO_LABEL; - return SUCCESS; -} - -static void -cfg_builder_fini(cfg_builder* g) -{ - assert(cfg_builder_check(g)); - basicblock *b = g->g_block_list; - while (b != NULL) { - if (b->b_instr) { - PyObject_Free((void *)b->b_instr); - } - basicblock *next = b->b_list; - PyObject_Free((void *)b); - b = next; - } -} - static void compiler_unit_free(struct compiler_unit *u) { instr_sequence_fini(&u->u_instr_sequence); Py_CLEAR(u->u_ste); - Py_CLEAR(u->u_name); - Py_CLEAR(u->u_qualname); - Py_CLEAR(u->u_consts); - Py_CLEAR(u->u_names); - Py_CLEAR(u->u_varnames); - Py_CLEAR(u->u_freevars); - Py_CLEAR(u->u_cellvars); - Py_CLEAR(u->u_fasthidden); + Py_CLEAR(u->u_metadata.u_name); + Py_CLEAR(u->u_metadata.u_qualname); + Py_CLEAR(u->u_metadata.u_consts); + Py_CLEAR(u->u_metadata.u_names); + Py_CLEAR(u->u_metadata.u_varnames); + Py_CLEAR(u->u_metadata.u_freevars); + Py_CLEAR(u->u_metadata.u_cellvars); + Py_CLEAR(u->u_metadata.u_fasthidden); Py_CLEAR(u->u_private); PyObject_Free(u); } @@ -1036,8 +717,8 @@ compiler_set_qualname(struct compiler *c) if (u->u_scope_type == COMPILER_SCOPE_FUNCTION || u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION || u->u_scope_type == COMPILER_SCOPE_CLASS) { - assert(u->u_name); - mangled = _Py_Mangle(parent->u_private, u->u_name); + assert(u->u_metadata.u_name); + mangled = _Py_Mangle(parent->u_private, u->u_metadata.u_name); if (!mangled) { return ERROR; } @@ -1055,14 +736,14 @@ compiler_set_qualname(struct compiler *c) || parent->u_scope_type == COMPILER_SCOPE_LAMBDA) { _Py_DECLARE_STR(dot_locals, ".<locals>"); - base = PyUnicode_Concat(parent->u_qualname, + base = PyUnicode_Concat(parent->u_metadata.u_qualname, &_Py_STR(dot_locals)); if (base == NULL) { return ERROR; } } else { - base = Py_NewRef(parent->u_qualname); + base = Py_NewRef(parent->u_metadata.u_qualname); } } } @@ -1074,98 +755,19 @@ compiler_set_qualname(struct compiler *c) if (name == NULL) { return ERROR; } - PyUnicode_Append(&name, u->u_name); + PyUnicode_Append(&name, u->u_metadata.u_name); if (name == NULL) { return ERROR; } } else { - name = Py_NewRef(u->u_name); - } - u->u_qualname = name; - - return SUCCESS; -} - -/* Allocate a new block and return a pointer to it. - Returns NULL on error. -*/ -static basicblock * -cfg_builder_new_block(cfg_builder *g) -{ - basicblock *b = (basicblock *)PyObject_Calloc(1, sizeof(basicblock)); - if (b == NULL) { - PyErr_NoMemory(); - return NULL; + name = Py_NewRef(u->u_metadata.u_name); } - /* Extend the singly linked list of blocks with new block. */ - b->b_list = g->g_block_list; - g->g_block_list = b; - b->b_label = NO_LABEL; - return b; -} + u->u_metadata.u_qualname = name; -static basicblock * -cfg_builder_use_next_block(cfg_builder *g, basicblock *block) -{ - assert(block != NULL); - g->g_curblock->b_next = block; - g->g_curblock = block; - return block; -} - -static inline int -basicblock_append_instructions(basicblock *target, basicblock *source) -{ - for (int i = 0; i < source->b_iused; i++) { - int n = basicblock_next_instr(target); - if (n < 0) { - return ERROR; - } - target->b_instr[n] = source->b_instr[i]; - } return SUCCESS; } -static basicblock * -copy_basicblock(cfg_builder *g, basicblock *block) -{ - /* Cannot copy a block if it has a fallthrough, since - * a block can only have one fallthrough predecessor. - */ - assert(BB_NO_FALLTHROUGH(block)); - basicblock *result = cfg_builder_new_block(g); - if (result == NULL) { - return NULL; - } - if (basicblock_append_instructions(result, block) < 0) { - return NULL; - } - return result; -} - -/* Returns the offset of the next instruction in the current block's - b_instr array. Resizes the b_instr as necessary. - Returns -1 on failure. -*/ - -static int -basicblock_next_instr(basicblock *b) -{ - assert(b != NULL); - - RETURN_IF_ERROR( - ensure_array_large_enough( - b->b_iused + 1, - (void**)&b->b_instr, - &b->b_ialloc, - DEFAULT_BLOCK_SIZE, - sizeof(struct cfg_instr))); - - return b->b_iused++; -} - - /* Return the stack effect of opcode with argument oparg. Some opcodes have different stack effect when jump to the target and @@ -1241,6 +843,10 @@ stack_effect(int opcode, int oparg, int jump) return -1; case LOAD_METHOD: return 1; + case LOAD_SUPER_METHOD: + case LOAD_ZERO_SUPER_METHOD: + case LOAD_ZERO_SUPER_ATTR: + return -1; default: return PY_INVALID_STACK_EFFECT; } @@ -1260,66 +866,11 @@ PyCompile_OpcodeStackEffect(int opcode, int oparg) return stack_effect(opcode, oparg, -1); } -static int -basicblock_addop(basicblock *b, int opcode, int oparg, location loc) -{ - assert(IS_WITHIN_OPCODE_RANGE(opcode)); - assert(!IS_ASSEMBLER_OPCODE(opcode)); - assert(HAS_ARG(opcode) || HAS_TARGET(opcode) || oparg == 0); - assert(0 <= oparg && oparg < (1 << 30)); - - int off = basicblock_next_instr(b); - if (off < 0) { - return ERROR; - } - struct cfg_instr *i = &b->b_instr[off]; - i->i_opcode = opcode; - i->i_oparg = oparg; - i->i_target = NULL; - i->i_loc = loc; - - return SUCCESS; -} - -static bool -cfg_builder_current_block_is_terminated(cfg_builder *g) -{ - struct cfg_instr *last = basicblock_last_instr(g->g_curblock); - if (last && IS_TERMINATOR_OPCODE(last->i_opcode)) { - return true; - } - if (IS_LABEL(g->g_current_label)) { - if (last || IS_LABEL(g->g_curblock->b_label)) { - return true; - } - else { - /* current block is empty, label it */ - g->g_curblock->b_label = g->g_current_label; - g->g_current_label = NO_LABEL; - } - } - return false; -} - -static int -cfg_builder_maybe_start_new_block(cfg_builder *g) -{ - if (cfg_builder_current_block_is_terminated(g)) { - basicblock *b = cfg_builder_new_block(g); - if (b == NULL) { - return ERROR; - } - b->b_label = g->g_current_label; - g->g_current_label = NO_LABEL; - cfg_builder_use_next_block(g, b); - } - return SUCCESS; -} - static int codegen_addop_noarg(instr_sequence *seq, int opcode, location loc) { assert(!HAS_ARG(opcode)); + assert(!IS_ASSEMBLER_OPCODE(opcode)); return instr_sequence_addop(seq, opcode, 0, loc); } @@ -1467,7 +1018,7 @@ compiler_add_const(PyObject *const_cache, struct compiler_unit *u, PyObject *o) return ERROR; } - Py_ssize_t arg = dict_add_o(u->u_consts, key); + Py_ssize_t arg = dict_add_o(u->u_metadata.u_consts, key); Py_DECREF(key); return arg; } @@ -1514,6 +1065,24 @@ compiler_addop_name(struct compiler_unit *u, location loc, arg <<= 1; arg |= 1; } + if (opcode == LOAD_SUPER_ATTR) { + arg <<= 2; + arg |= 2; + } + if (opcode == LOAD_SUPER_METHOD) { + opcode = LOAD_SUPER_ATTR; + arg <<= 2; + arg |= 3; + } + if (opcode == LOAD_ZERO_SUPER_ATTR) { + opcode = LOAD_SUPER_ATTR; + arg <<= 2; + } + if (opcode == LOAD_ZERO_SUPER_METHOD) { + opcode = LOAD_SUPER_ATTR; + arg <<= 2; + arg |= 1; + } return codegen_addop_i(&u->u_instr_sequence, opcode, arg, loc); } @@ -1530,6 +1099,7 @@ codegen_addop_i(instr_sequence *seq, int opcode, Py_ssize_t oparg, location loc) EXTENDED_ARG is used for 16, 24, and 32-bit arguments. */ int oparg_ = Py_SAFE_DOWNCAST(oparg, Py_ssize_t, int); + assert(!IS_ASSEMBLER_OPCODE(opcode)); return instr_sequence_addop(seq, opcode, oparg_, loc); } @@ -1539,6 +1109,7 @@ codegen_addop_j(instr_sequence *seq, location loc, { assert(IS_LABEL(target)); assert(IS_JUMP_OPCODE(opcode) || IS_BLOCK_PUSH_OPCODE(opcode)); + assert(!IS_ASSEMBLER_OPCODE(opcode)); return instr_sequence_addop(seq, opcode, target.id, loc); } @@ -1570,7 +1141,7 @@ codegen_addop_j(instr_sequence *seq, location loc, #define ADDOP_N(C, LOC, OP, O, TYPE) { \ assert(!HAS_CONST(OP)); /* use ADDOP_LOAD_CONST_NEW */ \ - if (compiler_addop_o((C)->u, (LOC), (OP), (C)->u->u_ ## TYPE, (O)) < 0) { \ + if (compiler_addop_o((C)->u, (LOC), (OP), (C)->u->u_metadata.u_ ## TYPE, (O)) < 0) { \ Py_DECREF((O)); \ return ERROR; \ } \ @@ -1578,7 +1149,7 @@ codegen_addop_j(instr_sequence *seq, location loc, } #define ADDOP_NAME(C, LOC, OP, O, TYPE) \ - RETURN_IF_ERROR(compiler_addop_name((C)->u, (LOC), (OP), (C)->u->u_ ## TYPE, (O))) + RETURN_IF_ERROR(compiler_addop_name((C)->u, (LOC), (OP), (C)->u->u_metadata.u_ ## TYPE, (O))) #define ADDOP_I(C, LOC, OP, O) \ RETURN_IF_ERROR(codegen_addop_i(INSTR_SEQUENCE(C), (OP), (O), (LOC))) @@ -1655,18 +1226,18 @@ compiler_enter_scope(struct compiler *c, identifier name, return ERROR; } u->u_scope_type = scope_type; - u->u_argcount = 0; - u->u_posonlyargcount = 0; - u->u_kwonlyargcount = 0; + u->u_metadata.u_argcount = 0; + u->u_metadata.u_posonlyargcount = 0; + u->u_metadata.u_kwonlyargcount = 0; u->u_ste = PySymtable_Lookup(c->c_st, key); if (!u->u_ste) { compiler_unit_free(u); return ERROR; } - u->u_name = Py_NewRef(name); - u->u_varnames = list2dict(u->u_ste->ste_varnames); - u->u_cellvars = dictbytype(u->u_ste->ste_symbols, CELL, 0, 0); - if (!u->u_varnames || !u->u_cellvars) { + u->u_metadata.u_name = Py_NewRef(name); + u->u_metadata.u_varnames = list2dict(u->u_ste->ste_varnames); + u->u_metadata.u_cellvars = dictbytype(u->u_ste->ste_symbols, CELL, 0, 0); + if (!u->u_metadata.u_varnames || !u->u_metadata.u_cellvars) { compiler_unit_free(u); return ERROR; } @@ -1674,8 +1245,8 @@ compiler_enter_scope(struct compiler *c, identifier name, /* Cook up an implicit __class__ cell. */ int res; assert(u->u_scope_type == COMPILER_SCOPE_CLASS); - assert(PyDict_GET_SIZE(u->u_cellvars) == 0); - res = PyDict_SetItem(u->u_cellvars, &_Py_ID(__class__), + assert(PyDict_GET_SIZE(u->u_metadata.u_cellvars) == 0); + res = PyDict_SetItem(u->u_metadata.u_cellvars, &_Py_ID(__class__), _PyLong_GetZero()); if (res < 0) { compiler_unit_free(u); @@ -1683,28 +1254,28 @@ compiler_enter_scope(struct compiler *c, identifier name, } } - u->u_freevars = dictbytype(u->u_ste->ste_symbols, FREE, DEF_FREE_CLASS, - PyDict_GET_SIZE(u->u_cellvars)); - if (!u->u_freevars) { + u->u_metadata.u_freevars = dictbytype(u->u_ste->ste_symbols, FREE, DEF_FREE_CLASS, + PyDict_GET_SIZE(u->u_metadata.u_cellvars)); + if (!u->u_metadata.u_freevars) { compiler_unit_free(u); return ERROR; } - u->u_fasthidden = PyDict_New(); - if (!u->u_fasthidden) { + u->u_metadata.u_fasthidden = PyDict_New(); + if (!u->u_metadata.u_fasthidden) { compiler_unit_free(u); return ERROR; } u->u_nfblocks = 0; - u->u_firstlineno = lineno; - u->u_consts = PyDict_New(); - if (!u->u_consts) { + u->u_metadata.u_firstlineno = lineno; + u->u_metadata.u_consts = PyDict_New(); + if (!u->u_metadata.u_consts) { compiler_unit_free(u); return ERROR; } - u->u_names = PyDict_New(); - if (!u->u_names) { + u->u_metadata.u_names = PyDict_New(); + if (!u->u_metadata.u_names) { compiler_unit_free(u); return ERROR; } @@ -1898,8 +1469,7 @@ compiler_add_yield_from(struct compiler *c, location loc, int await) ADDOP(c, loc, CLEANUP_THROW); USE_LABEL(c, exit); - ADDOP_I(c, loc, SWAP, 2); - ADDOP(c, loc, POP_TOP); + ADDOP(c, loc, END_SEND); return SUCCESS; } @@ -2063,7 +1633,7 @@ compiler_body(struct compiler *c, location loc, asdl_stmt_seq *stmts) /* Set current line number to the line number of first statement. This way line number for SETUP_ANNOTATIONS will always coincide with the line number of first "real" statement in module. - If body is empty, then lineno will be set later in assemble. */ + If body is empty, then lineno will be set later in optimize_and_assemble. */ if (c->u->u_scope_type == COMPILER_SCOPE_MODULE && asdl_seq_LEN(stmts)) { st = (stmt_ty)asdl_seq_GET(stmts, 0); loc = LOC(st); @@ -2134,7 +1704,7 @@ compiler_mod(struct compiler *c, mod_ty mod) if (compiler_codegen(c, mod) < 0) { return NULL; } - PyCodeObject *co = assemble(c, addNone); + PyCodeObject *co = optimize_and_assemble(c, addNone); compiler_exit_scope(c); return co; } @@ -2158,8 +1728,8 @@ get_ref_type(struct compiler *c, PyObject *name) "unknown scope in unit %S (%R); " "symbols: %R; locals: %R; globals: %R", name, - c->u->u_name, c->u->u_ste->ste_id, - c->u->u_ste->ste_symbols, c->u->u_varnames, c->u->u_names); + c->u->u_metadata.u_name, c->u->u_ste->ste_id, + c->u->u_ste->ste_symbols, c->u->u_metadata.u_varnames, c->u->u_metadata.u_names); return ERROR; } return scope; @@ -2199,10 +1769,10 @@ compiler_make_closure(struct compiler *c, location loc, } int arg; if (reftype == CELL) { - arg = compiler_lookup_arg(c->u->u_cellvars, name); + arg = compiler_lookup_arg(c->u->u_metadata.u_cellvars, name); } else { - arg = compiler_lookup_arg(c->u->u_freevars, name); + arg = compiler_lookup_arg(c->u->u_metadata.u_freevars, name); } if (arg == -1) { PyObject *freevars = _PyCode_GetFreevars(co); @@ -2214,7 +1784,7 @@ compiler_make_closure(struct compiler *c, location loc, "freevars of code %S: %R", name, reftype, - c->u->u_name, + c->u->u_metadata.u_name, co->co_name, freevars); Py_DECREF(freevars); @@ -2496,16 +2066,6 @@ compiler_check_debug_args(struct compiler *c, arguments_ty args) return SUCCESS; } -static inline int -insert_instruction(basicblock *block, int pos, struct cfg_instr *instr) { - RETURN_IF_ERROR(basicblock_next_instr(block)); - for (int i = block->b_iused - 1; i > pos; i--) { - block->b_instr[i] = block->b_instr[i-1]; - } - block->b_instr[pos] = *instr; - return SUCCESS; -} - static int wrap_in_stopiteration_handler(struct compiler *c) { @@ -2593,9 +2153,9 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async) return ERROR; } - c->u->u_argcount = asdl_seq_LEN(args->args); - c->u->u_posonlyargcount = asdl_seq_LEN(args->posonlyargs); - c->u->u_kwonlyargcount = asdl_seq_LEN(args->kwonlyargs); + c->u->u_metadata.u_argcount = asdl_seq_LEN(args->args); + c->u->u_metadata.u_posonlyargcount = asdl_seq_LEN(args->posonlyargs); + c->u->u_metadata.u_kwonlyargcount = asdl_seq_LEN(args->kwonlyargs); for (i = docstring ? 1 : 0; i < asdl_seq_LEN(body); i++) { VISIT_IN_SCOPE(c, stmt, (stmt_ty)asdl_seq_GET(body, i)); } @@ -2605,7 +2165,7 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async) return ERROR; } } - co = assemble(c, 1); + co = optimize_and_assemble(c, 1); compiler_exit_scope(c); if (co == NULL) { Py_XDECREF(co); @@ -2665,8 +2225,8 @@ compiler_class(struct compiler *c, stmt_ty s) compiler_exit_scope(c); return ERROR; } - assert(c->u->u_qualname); - ADDOP_LOAD_CONST(c, loc, c->u->u_qualname); + assert(c->u->u_metadata.u_qualname); + ADDOP_LOAD_CONST(c, loc, c->u->u_metadata.u_qualname); if (compiler_nameop(c, loc, &_Py_ID(__qualname__), Store) < 0) { compiler_exit_scope(c); return ERROR; @@ -2680,7 +2240,7 @@ compiler_class(struct compiler *c, stmt_ty s) /* Return __classcell__ if it is referenced, otherwise return None */ if (c->u->u_ste->ste_needs_class_closure) { /* Store __classcell__ into class namespace & return it */ - i = compiler_lookup_arg(c->u->u_cellvars, &_Py_ID(__class__)); + i = compiler_lookup_arg(c->u->u_metadata.u_cellvars, &_Py_ID(__class__)); if (i < 0) { compiler_exit_scope(c); return ERROR; @@ -2699,7 +2259,7 @@ compiler_class(struct compiler *c, stmt_ty s) } ADDOP_IN_SCOPE(c, NO_LOCATION, RETURN_VALUE); /* create the code object */ - co = assemble(c, 1); + co = optimize_and_assemble(c, 1); } /* leave the new scope */ compiler_exit_scope(c); @@ -2750,6 +2310,8 @@ check_is_arg(expr_ty e) || value == Py_Ellipsis); } +static PyTypeObject * infer_type(expr_ty e); + /* Check operands of identity checks ("is" and "is not"). Emit a warning if any operand is a constant except named singletons. */ @@ -2758,23 +2320,38 @@ check_compare(struct compiler *c, expr_ty e) { Py_ssize_t i, n; bool left = check_is_arg(e->v.Compare.left); + expr_ty left_expr = e->v.Compare.left; n = asdl_seq_LEN(e->v.Compare.ops); for (i = 0; i < n; i++) { cmpop_ty op = (cmpop_ty)asdl_seq_GET(e->v.Compare.ops, i); - bool right = check_is_arg((expr_ty)asdl_seq_GET(e->v.Compare.comparators, i)); + expr_ty right_expr = (expr_ty)asdl_seq_GET(e->v.Compare.comparators, i); + bool right = check_is_arg(right_expr); if (op == Is || op == IsNot) { if (!right || !left) { const char *msg = (op == Is) - ? "\"is\" with a literal. Did you mean \"==\"?" - : "\"is not\" with a literal. Did you mean \"!=\"?"; - return compiler_warn(c, LOC(e), msg); + ? "\"is\" with '%.200s' literal. Did you mean \"==\"?" + : "\"is not\" with '%.200s' literal. Did you mean \"!=\"?"; + expr_ty literal = !left ? left_expr : right_expr; + return compiler_warn( + c, LOC(e), msg, infer_type(literal)->tp_name + ); } } left = right; + left_expr = right_expr; } return SUCCESS; } +static const int compare_masks[] = { + [Py_LT] = COMPARISON_LESS_THAN, + [Py_LE] = COMPARISON_LESS_THAN | COMPARISON_EQUALS, + [Py_EQ] = COMPARISON_EQUALS, + [Py_NE] = COMPARISON_NOT_EQUALS, + [Py_GT] = COMPARISON_GREATER_THAN, + [Py_GE] = COMPARISON_GREATER_THAN | COMPARISON_EQUALS, +}; + static int compiler_addcompare(struct compiler *c, location loc, cmpop_ty op) { @@ -2815,7 +2392,7 @@ static int compiler_addcompare(struct compiler *c, location loc, } /* cmp goes in top bits of the oparg, while the low bits are used by quickened * versions of this opcode to store the comparison mask. */ - ADDOP_I(c, loc, COMPARE_OP, cmp << 4); + ADDOP_I(c, loc, COMPARE_OP, (cmp << 4) | compare_masks[cmp]); return SUCCESS; } @@ -2957,17 +2534,17 @@ compiler_lambda(struct compiler *c, expr_ty e) docstring. */ RETURN_IF_ERROR(compiler_add_const(c->c_const_cache, c->u, Py_None)); - c->u->u_argcount = asdl_seq_LEN(args->args); - c->u->u_posonlyargcount = asdl_seq_LEN(args->posonlyargs); - c->u->u_kwonlyargcount = asdl_seq_LEN(args->kwonlyargs); + c->u->u_metadata.u_argcount = asdl_seq_LEN(args->args); + c->u->u_metadata.u_posonlyargcount = asdl_seq_LEN(args->posonlyargs); + c->u->u_metadata.u_kwonlyargcount = asdl_seq_LEN(args->kwonlyargs); VISIT_IN_SCOPE(c, expr, e->v.Lambda.body); if (c->u->u_ste->ste_generator) { - co = assemble(c, 0); + co = optimize_and_assemble(c, 0); } else { location loc = LOCATION(e->lineno, e->lineno, 0, 0); ADDOP_IN_SCOPE(c, loc, RETURN_VALUE); - co = assemble(c, 1); + co = optimize_and_assemble(c, 1); } compiler_exit_scope(c); if (co == NULL) { @@ -3517,11 +3094,9 @@ compiler_try_except(struct compiler *c, stmt_ty s) [orig, res, exc] <evaluate E1> [orig, res, exc, E1] CHECK_EG_MATCH [orig, res, rest/exc, match?] COPY 1 - [orig, res, rest/exc, match?, match?] POP_JUMP_IF_NOT_NONE H1 - [orig, res, exc, None] POP_TOP - [orig, res, exc] JUMP L2 + [orig, res, rest/exc, match?, match?] POP_JUMP_IF_NONE C1 - [orig, res, rest, match] H1: <assign to V1> (or POP if no V1) + [orig, res, rest, match] <assign to V1> (or POP if no V1) [orig, res, rest] SETUP_FINALLY R1 [orig, res, rest] <code for S1> @@ -3529,8 +3104,14 @@ compiler_try_except(struct compiler *c, stmt_ty s) [orig, res, rest, i, v] R1: LIST_APPEND 3 ) exc raised in except* body - add to res [orig, res, rest, i] POP + [orig, res, rest] JUMP LE2 + + [orig, res, rest] L2: NOP ) for lineno + [orig, res, rest] JUMP LE2 + + [orig, res, rest/exc, None] C1: POP - [orig, res, rest] L2: <evaluate E2> + [orig, res, rest] LE2: <evaluate E2> .............................etc....................... [orig, res, rest] Ln+1: LIST_APPEND 1 ) add unhandled exc to res (could be None) @@ -3586,7 +3167,8 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) location loc = LOC(handler); NEW_JUMP_TARGET_LABEL(c, next_except); except = next_except; - NEW_JUMP_TARGET_LABEL(c, handle_match); + NEW_JUMP_TARGET_LABEL(c, except_with_error); + NEW_JUMP_TARGET_LABEL(c, no_match); if (i == 0) { /* create empty list for exceptions raised/reraise in the except* blocks */ /* @@ -3604,13 +3186,9 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) VISIT(c, expr, handler->v.ExceptHandler.type); ADDOP(c, loc, CHECK_EG_MATCH); ADDOP_I(c, loc, COPY, 1); - ADDOP_JUMP(c, loc, POP_JUMP_IF_NOT_NONE, handle_match); - ADDOP(c, loc, POP_TOP); // match - ADDOP_JUMP(c, loc, JUMP, except); + ADDOP_JUMP(c, loc, POP_JUMP_IF_NONE, no_match); } - USE_LABEL(c, handle_match); - NEW_JUMP_TARGET_LABEL(c, cleanup_end); NEW_JUMP_TARGET_LABEL(c, cleanup_body); @@ -3669,9 +3247,16 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) /* add exception raised to the res list */ ADDOP_I(c, NO_LOCATION, LIST_APPEND, 3); // exc ADDOP(c, NO_LOCATION, POP_TOP); // lasti - ADDOP_JUMP(c, NO_LOCATION, JUMP, except); + ADDOP_JUMP(c, NO_LOCATION, JUMP, except_with_error); USE_LABEL(c, except); + ADDOP(c, NO_LOCATION, NOP); // to hold a propagated location info + ADDOP_JUMP(c, NO_LOCATION, JUMP, except_with_error); + + USE_LABEL(c, no_match); + ADDOP(c, loc, POP_TOP); // match (None) + + USE_LABEL(c, except_with_error); if (i == n - 1) { /* Add exc to the list (if not None it's the unhandled part of the EG) */ @@ -4113,7 +3698,7 @@ compiler_nameop(struct compiler *c, location loc, Py_ssize_t arg; enum { OP_FAST, OP_GLOBAL, OP_DEREF, OP_NAME } optype; - PyObject *dict = c->u->u_names; + PyObject *dict = c->u->u_metadata.u_names; PyObject *mangled; assert(!_PyUnicode_EqualToASCIIString(name, "None") && @@ -4134,16 +3719,16 @@ compiler_nameop(struct compiler *c, location loc, scope = _PyST_GetScope(c->u->u_ste, mangled); switch (scope) { case FREE: - dict = c->u->u_freevars; + dict = c->u->u_metadata.u_freevars; optype = OP_DEREF; break; case CELL: - dict = c->u->u_cellvars; + dict = c->u->u_metadata.u_cellvars; optype = OP_DEREF; break; case LOCAL: if (c->u->u_ste->ste_type == FunctionBlock || - (PyDict_GetItem(c->u->u_fasthidden, mangled) == Py_True)) + (PyDict_GetItem(c->u->u_metadata.u_fasthidden, mangled) == Py_True)) optype = OP_FAST; break; case GLOBAL_IMPLICIT: @@ -4217,19 +3802,18 @@ compiler_boolop(struct compiler *c, expr_ty e) location loc = LOC(e); assert(e->kind == BoolOp_kind); if (e->v.BoolOp.op == And) - jumpi = JUMP_IF_FALSE_OR_POP; + jumpi = POP_JUMP_IF_FALSE; else - jumpi = JUMP_IF_TRUE_OR_POP; + jumpi = POP_JUMP_IF_TRUE; NEW_JUMP_TARGET_LABEL(c, end); s = e->v.BoolOp.values; n = asdl_seq_LEN(s) - 1; assert(n >= 0); for (i = 0; i < n; ++i) { VISIT(c, expr, (expr_ty)asdl_seq_GET(s, i)); + ADDOP_I(c, loc, COPY, 1); ADDOP_JUMP(c, loc, jumpi, end); - NEW_JUMP_TARGET_LABEL(c, next); - - USE_LABEL(c, next); + ADDOP(c, loc, POP_TOP); } VISIT(c, expr, (expr_ty)asdl_seq_GET(s, n)); @@ -4534,7 +4118,9 @@ compiler_compare(struct compiler *c, expr_ty e) ADDOP_I(c, loc, SWAP, 2); ADDOP_I(c, loc, COPY, 2); ADDOP_COMPARE(c, loc, asdl_seq_GET(e->v.Compare.ops, i)); - ADDOP_JUMP(c, loc, JUMP_IF_FALSE_OR_POP, cleanup); + ADDOP_I(c, loc, COPY, 1); + ADDOP_JUMP(c, loc, POP_JUMP_IF_FALSE, cleanup); + ADDOP(c, loc, POP_TOP); } VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, n)); ADDOP_COMPARE(c, loc, asdl_seq_GET(e->v.Compare.ops, n)); @@ -4686,23 +4272,106 @@ is_import_originated(struct compiler *c, expr_ty e) return flags & DEF_IMPORT; } -// If an attribute access spans multiple lines, update the current start -// location to point to the attribute name. -static location -update_start_location_to_match_attr(struct compiler *c, location loc, - expr_ty attr) +static int +can_optimize_super_call(struct compiler *c, expr_ty attr) { - assert(attr->kind == Attribute_kind); - if (loc.lineno != attr->end_lineno) { - loc.lineno = attr->end_lineno; - int len = (int)PyUnicode_GET_LENGTH(attr->v.Attribute.attr); - if (len <= attr->end_col_offset) { - loc.col_offset = attr->end_col_offset - len; - } - else { - // GH-94694: Somebody's compiling weird ASTs. Just drop the columns: - loc.col_offset = -1; - loc.end_col_offset = -1; + expr_ty e = attr->v.Attribute.value; + if (e->kind != Call_kind || + e->v.Call.func->kind != Name_kind || + !_PyUnicode_EqualToASCIIString(e->v.Call.func->v.Name.id, "super") || + _PyUnicode_EqualToASCIIString(attr->v.Attribute.attr, "__class__") || + asdl_seq_LEN(e->v.Call.keywords) != 0) { + return 0; + } + Py_ssize_t num_args = asdl_seq_LEN(e->v.Call.args); + + PyObject *super_name = e->v.Call.func->v.Name.id; + // detect statically-visible shadowing of 'super' name + int scope = _PyST_GetScope(c->u->u_ste, super_name); + if (scope != GLOBAL_IMPLICIT) { + return 0; + } + scope = _PyST_GetScope(c->c_st->st_top, super_name); + if (scope != 0) { + return 0; + } + + if (num_args == 2) { + for (Py_ssize_t i = 0; i < num_args; i++) { + expr_ty elt = asdl_seq_GET(e->v.Call.args, i); + if (elt->kind == Starred_kind) { + return 0; + } + } + // exactly two non-starred args; we can just load + // the provided args + return 1; + } + + if (num_args != 0) { + return 0; + } + // we need the following for zero-arg super(): + + // enclosing function should have at least one argument + if (c->u->u_metadata.u_argcount == 0 && + c->u->u_metadata.u_posonlyargcount == 0) { + return 0; + } + // __class__ cell should be available + if (get_ref_type(c, &_Py_ID(__class__)) == FREE) { + return 1; + } + return 0; +} + +static int +load_args_for_super(struct compiler *c, expr_ty e) { + location loc = LOC(e); + + // load super() global + PyObject *super_name = e->v.Call.func->v.Name.id; + RETURN_IF_ERROR(compiler_nameop(c, loc, super_name, Load)); + + if (asdl_seq_LEN(e->v.Call.args) == 2) { + VISIT(c, expr, asdl_seq_GET(e->v.Call.args, 0)); + VISIT(c, expr, asdl_seq_GET(e->v.Call.args, 1)); + return SUCCESS; + } + + // load __class__ cell + PyObject *name = &_Py_ID(__class__); + assert(get_ref_type(c, name) == FREE); + RETURN_IF_ERROR(compiler_nameop(c, loc, name, Load)); + + // load self (first argument) + Py_ssize_t i = 0; + PyObject *key, *value; + if (!PyDict_Next(c->u->u_metadata.u_varnames, &i, &key, &value)) { + return ERROR; + } + RETURN_IF_ERROR(compiler_nameop(c, loc, key, Load)); + + return SUCCESS; +} + +// If an attribute access spans multiple lines, update the current start +// location to point to the attribute name. +static location +update_start_location_to_match_attr(struct compiler *c, location loc, + expr_ty attr) +{ + assert(attr->kind == Attribute_kind); + if (loc.lineno != attr->end_lineno) { + loc.lineno = attr->end_lineno; + int len = (int)PyUnicode_GET_LENGTH(attr->v.Attribute.attr); + if (len <= attr->end_col_offset) { + loc.col_offset = attr->end_col_offset - len; + } + else { + // GH-94694: Somebody's compiling weird ASTs. Just drop the columns: + loc.col_offset = -1; + loc.end_col_offset = -1; } // Make sure the end position still follows the start position, even for // weird ASTs: @@ -4753,11 +4422,21 @@ maybe_optimize_method_call(struct compiler *c, expr_ty e) return 0; } } + /* Alright, we can optimize the code. */ - VISIT(c, expr, meth->v.Attribute.value); location loc = LOC(meth); - loc = update_start_location_to_match_attr(c, loc, meth); - ADDOP_NAME(c, loc, LOAD_METHOD, meth->v.Attribute.attr, names); + + if (can_optimize_super_call(c, meth)) { + RETURN_IF_ERROR(load_args_for_super(c, meth->v.Attribute.value)); + int opcode = asdl_seq_LEN(meth->v.Attribute.value->v.Call.args) ? + LOAD_SUPER_METHOD : LOAD_ZERO_SUPER_METHOD; + ADDOP_NAME(c, loc, opcode, meth->v.Attribute.attr, names); + } else { + VISIT(c, expr, meth->v.Attribute.value); + loc = update_start_location_to_match_attr(c, loc, meth); + ADDOP_NAME(c, loc, LOAD_METHOD, meth->v.Attribute.attr, names); + } + VISIT_SEQ(c, expr, e->v.Call.args); if (kwdsl) { @@ -5109,7 +4788,7 @@ compiler_sync_comprehension_generator(struct compiler *c, location loc, if (!iter_on_stack) { if (gen_index == 0) { /* Receive outermost iter as an implicit argument */ - c->u->u_argcount = 1; + c->u->u_metadata.u_argcount = 1; ADDOP_I(c, loc, LOAD_FAST, 0); } else { @@ -5225,7 +4904,7 @@ compiler_async_comprehension_generator(struct compiler *c, location loc, if (!iter_on_stack) { if (gen_index == 0) { /* Receive outermost iter as an implicit argument */ - c->u->u_argcount = 1; + c->u->u_metadata.u_argcount = 1; ADDOP_I(c, loc, LOAD_FAST, 0); } else { @@ -5332,7 +5011,7 @@ push_inlined_comprehension_state(struct compiler *c, location loc, if (symbol & DEF_LOCAL && ~symbol & DEF_NONLOCAL) { if (c->u->u_ste->ste_type != FunctionBlock) { // non-function scope: override this name to use fast locals - PyDict_SetItem(c->u->u_fasthidden, k, Py_True); + PyDict_SetItem(c->u->u_metadata.u_fasthidden, k, Py_True); } long scope = (symbol >> SCOPE_OFFSET) & SCOPE_MASK; PyObject *outv = PyDict_GetItemWithError(c->u->u_ste->ste_symbols, k); @@ -5429,11 +5108,11 @@ pop_inlined_comprehension_state(struct compiler *c, location loc, } } pos = 0; - while (PyDict_Next(c->u->u_fasthidden, &pos, &k, &v)) { + while (PyDict_Next(c->u->u_metadata.u_fasthidden, &pos, &k, &v)) { if (v == Py_True) { // we set to False instead of clearing, so we can track which names // were temporarily fast-locals and should use CO_FAST_HIDDEN - if (PyDict_SetItem(c->u->u_fasthidden, k, Py_False)) { + if (PyDict_SetItem(c->u->u_metadata.u_fasthidden, k, Py_False)) { return ERROR; } } @@ -5547,7 +5226,7 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, } } - co = assemble(c, 1); + co = optimize_and_assemble(c, 1); compiler_exit_scope(c); if (is_top_level_await && is_async_generator){ c->u->u_ste->ste_coroutine = 1; @@ -5947,6 +5626,13 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) return compiler_formatted_value(c, e); /* The following exprs can be assignment targets. */ case Attribute_kind: + if (e->v.Attribute.ctx == Load && can_optimize_super_call(c, e)) { + RETURN_IF_ERROR(load_args_for_super(c, e->v.Attribute.value)); + int opcode = asdl_seq_LEN(e->v.Attribute.value->v.Call.args) ? + LOAD_SUPER_ATTR : LOAD_ZERO_SUPER_ATTR; + ADDOP_NAME(c, loc, opcode, e->v.Attribute.attr, names); + return SUCCESS; + } VISIT(c, expr, e->v.Attribute.value); loc = LOC(e); loc = update_start_location_to_match_attr(c, loc, e); @@ -7170,2819 +6856,398 @@ compiler_match(struct compiler *c, stmt_ty s) #undef WILDCARD_CHECK #undef WILDCARD_STAR_CHECK - -/* End of the compiler section, beginning of the assembler section */ - - -struct assembler { - PyObject *a_bytecode; /* bytes containing bytecode */ - int a_offset; /* offset into bytecode */ - PyObject *a_except_table; /* bytes containing exception table */ - int a_except_table_off; /* offset into exception table */ - /* Location Info */ - int a_lineno; /* lineno of last emitted instruction */ - PyObject* a_linetable; /* bytes containing location info */ - int a_location_off; /* offset of last written location info frame */ -}; - -static basicblock** -make_cfg_traversal_stack(basicblock *entryblock) { - int nblocks = 0; - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - b->b_visited = 0; - nblocks++; - } - basicblock **stack = (basicblock **)PyMem_Malloc(sizeof(basicblock *) * nblocks); - if (!stack) { - PyErr_NoMemory(); - } - return stack; -} - -Py_LOCAL_INLINE(void) -stackdepth_push(basicblock ***sp, basicblock *b, int depth) +static PyObject * +consts_dict_keys_inorder(PyObject *dict) { - assert(b->b_startdepth < 0 || b->b_startdepth == depth); - if (b->b_startdepth < depth && b->b_startdepth < 100) { - assert(b->b_startdepth < 0); - b->b_startdepth = depth; - *(*sp)++ = b; + PyObject *consts, *k, *v; + Py_ssize_t i, pos = 0, size = PyDict_GET_SIZE(dict); + + consts = PyList_New(size); /* PyCode_Optimize() requires a list */ + if (consts == NULL) + return NULL; + while (PyDict_Next(dict, &pos, &k, &v)) { + i = PyLong_AS_LONG(v); + /* The keys of the dictionary can be tuples wrapping a constant. + * (see dict_add_o and _PyCode_ConstantKey). In that case + * the object we want is always second. */ + if (PyTuple_CheckExact(k)) { + k = PyTuple_GET_ITEM(k, 1); + } + assert(i < size); + assert(i >= 0); + PyList_SET_ITEM(consts, i, Py_NewRef(k)); } + return consts; } -/* Find the flow path that needs the largest stack. We assume that - * cycles in the flow graph have no net effect on the stack depth. - */ static int -stackdepth(basicblock *entryblock, int code_flags) +compute_code_flags(struct compiler *c) { - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - b->b_startdepth = INT_MIN; - } - basicblock **stack = make_cfg_traversal_stack(entryblock); - if (!stack) { - return ERROR; + PySTEntryObject *ste = c->u->u_ste; + int flags = 0; + if (ste->ste_type == FunctionBlock) { + flags |= CO_NEWLOCALS | CO_OPTIMIZED; + if (ste->ste_nested) + flags |= CO_NESTED; + if (ste->ste_generator && !ste->ste_coroutine) + flags |= CO_GENERATOR; + if (!ste->ste_generator && ste->ste_coroutine) + flags |= CO_COROUTINE; + if (ste->ste_generator && ste->ste_coroutine) + flags |= CO_ASYNC_GENERATOR; + if (ste->ste_varargs) + flags |= CO_VARARGS; + if (ste->ste_varkeywords) + flags |= CO_VARKEYWORDS; } - int maxdepth = 0; - basicblock **sp = stack; - if (code_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR)) { - stackdepth_push(&sp, entryblock, 1); - } else { - stackdepth_push(&sp, entryblock, 0); - } + /* (Only) inherit compilerflags in PyCF_MASK */ + flags |= (c->c_flags.cf_flags & PyCF_MASK); - while (sp != stack) { - basicblock *b = *--sp; - int depth = b->b_startdepth; - assert(depth >= 0); - basicblock *next = b->b_next; - for (int i = 0; i < b->b_iused; i++) { - struct cfg_instr *instr = &b->b_instr[i]; - int effect = stack_effect(instr->i_opcode, instr->i_oparg, 0); - if (effect == PY_INVALID_STACK_EFFECT) { - PyErr_Format(PyExc_SystemError, - "compiler stack_effect(opcode=%d, arg=%i) failed", - instr->i_opcode, instr->i_oparg); - return ERROR; - } - int new_depth = depth + effect; - assert(new_depth >= 0); /* invalid code or bug in stackdepth() */ - if (new_depth > maxdepth) { - maxdepth = new_depth; - } - if (HAS_TARGET(instr->i_opcode)) { - effect = stack_effect(instr->i_opcode, instr->i_oparg, 1); - assert(effect != PY_INVALID_STACK_EFFECT); - int target_depth = depth + effect; - assert(target_depth >= 0); /* invalid code or bug in stackdepth() */ - if (target_depth > maxdepth) { - maxdepth = target_depth; - } - stackdepth_push(&sp, instr->i_target, target_depth); - } - depth = new_depth; - assert(!IS_ASSEMBLER_OPCODE(instr->i_opcode)); - if (IS_UNCONDITIONAL_JUMP_OPCODE(instr->i_opcode) || - IS_SCOPE_EXIT_OPCODE(instr->i_opcode)) - { - /* remaining code is dead */ - next = NULL; - break; - } - } - if (next != NULL) { - assert(BB_HAS_FALLTHROUGH(b)); - stackdepth_push(&sp, next, depth); - } + if ((IS_TOP_LEVEL_AWAIT(c)) && + ste->ste_coroutine && + !ste->ste_generator) { + flags |= CO_COROUTINE; } - PyMem_Free(stack); - return maxdepth; + + return flags; } -static int -assemble_init(struct assembler *a, int firstlineno) +// Merge *obj* with constant cache. +// Unlike merge_consts_recursive(), this function doesn't work recursively. +int +_PyCompile_ConstCacheMergeOne(PyObject *const_cache, PyObject **obj) { - memset(a, 0, sizeof(struct assembler)); - a->a_lineno = firstlineno; - a->a_linetable = NULL; - a->a_location_off = 0; - a->a_except_table = NULL; - a->a_bytecode = PyBytes_FromStringAndSize(NULL, DEFAULT_CODE_SIZE); - if (a->a_bytecode == NULL) { - goto error; + assert(PyDict_CheckExact(const_cache)); + PyObject *key = _PyCode_ConstantKey(*obj); + if (key == NULL) { + return ERROR; } - a->a_linetable = PyBytes_FromStringAndSize(NULL, DEFAULT_CNOTAB_SIZE); - if (a->a_linetable == NULL) { - goto error; + + // t is borrowed reference + PyObject *t = PyDict_SetDefault(const_cache, key, key); + Py_DECREF(key); + if (t == NULL) { + return ERROR; } - a->a_except_table = PyBytes_FromStringAndSize(NULL, DEFAULT_LNOTAB_SIZE); - if (a->a_except_table == NULL) { - goto error; + if (t == key) { // obj is new constant. + return SUCCESS; } - return SUCCESS; -error: - Py_XDECREF(a->a_bytecode); - Py_XDECREF(a->a_linetable); - Py_XDECREF(a->a_except_table); - return ERROR; -} - -static void -assemble_free(struct assembler *a) -{ - Py_XDECREF(a->a_bytecode); - Py_XDECREF(a->a_linetable); - Py_XDECREF(a->a_except_table); -} -static int -blocksize(basicblock *b) -{ - int size = 0; - for (int i = 0; i < b->b_iused; i++) { - size += instr_size(&b->b_instr[i]); + if (PyTuple_CheckExact(t)) { + // t is still borrowed reference + t = PyTuple_GET_ITEM(t, 1); } - return size; -} -static basicblock * -push_except_block(ExceptStack *stack, struct cfg_instr *setup) { - assert(is_block_push(setup)); - int opcode = setup->i_opcode; - basicblock * target = setup->i_target; - if (opcode == SETUP_WITH || opcode == SETUP_CLEANUP) { - target->b_preserve_lasti = 1; - } - stack->handlers[++stack->depth] = target; - return target; + Py_SETREF(*obj, Py_NewRef(t)); + return SUCCESS; } -static basicblock * -pop_except_block(ExceptStack *stack) { - assert(stack->depth > 0); - return stack->handlers[--stack->depth]; -} -static basicblock * -except_stack_top(ExceptStack *stack) { - return stack->handlers[stack->depth]; -} +static int * +build_cellfixedoffsets(_PyCompile_CodeUnitMetadata *umd) +{ + int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames); + int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); + int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars); -static ExceptStack * -make_except_stack(void) { - ExceptStack *new = PyMem_Malloc(sizeof(ExceptStack)); - if (new == NULL) { + int noffsets = ncellvars + nfreevars; + int *fixed = PyMem_New(int, noffsets); + if (fixed == NULL) { PyErr_NoMemory(); return NULL; } - new->depth = 0; - new->handlers[0] = NULL; - return new; -} - -static ExceptStack * -copy_except_stack(ExceptStack *stack) { - ExceptStack *copy = PyMem_Malloc(sizeof(ExceptStack)); - if (copy == NULL) { - PyErr_NoMemory(); - return NULL; + for (int i = 0; i < noffsets; i++) { + fixed[i] = nlocals + i; } - memcpy(copy, stack, sizeof(ExceptStack)); - return copy; -} -static int -label_exception_targets(basicblock *entryblock) { - basicblock **todo_stack = make_cfg_traversal_stack(entryblock); - if (todo_stack == NULL) { - return ERROR; - } - ExceptStack *except_stack = make_except_stack(); - if (except_stack == NULL) { - PyMem_Free(todo_stack); - PyErr_NoMemory(); - return ERROR; - } - except_stack->depth = 0; - todo_stack[0] = entryblock; - entryblock->b_visited = 1; - entryblock->b_exceptstack = except_stack; - basicblock **todo = &todo_stack[1]; - basicblock *handler = NULL; - while (todo > todo_stack) { - todo--; - basicblock *b = todo[0]; - assert(b->b_visited == 1); - except_stack = b->b_exceptstack; - assert(except_stack != NULL); - b->b_exceptstack = NULL; - handler = except_stack_top(except_stack); - for (int i = 0; i < b->b_iused; i++) { - struct cfg_instr *instr = &b->b_instr[i]; - if (is_block_push(instr)) { - if (!instr->i_target->b_visited) { - ExceptStack *copy = copy_except_stack(except_stack); - if (copy == NULL) { - goto error; - } - instr->i_target->b_exceptstack = copy; - todo[0] = instr->i_target; - instr->i_target->b_visited = 1; - todo++; - } - handler = push_except_block(except_stack, instr); - } - else if (instr->i_opcode == POP_BLOCK) { - handler = pop_except_block(except_stack); - } - else if (is_jump(instr)) { - instr->i_except = handler; - assert(i == b->b_iused -1); - if (!instr->i_target->b_visited) { - if (BB_HAS_FALLTHROUGH(b)) { - ExceptStack *copy = copy_except_stack(except_stack); - if (copy == NULL) { - goto error; - } - instr->i_target->b_exceptstack = copy; - } - else { - instr->i_target->b_exceptstack = except_stack; - except_stack = NULL; - } - todo[0] = instr->i_target; - instr->i_target->b_visited = 1; - todo++; - } - } - else { - if (instr->i_opcode == YIELD_VALUE) { - instr->i_oparg = except_stack->depth; - } - instr->i_except = handler; - } - } - if (BB_HAS_FALLTHROUGH(b) && !b->b_next->b_visited) { - assert(except_stack != NULL); - b->b_next->b_exceptstack = except_stack; - todo[0] = b->b_next; - b->b_next->b_visited = 1; - todo++; - } - else if (except_stack != NULL) { - PyMem_Free(except_stack); + PyObject *varname, *cellindex; + Py_ssize_t pos = 0; + while (PyDict_Next(umd->u_cellvars, &pos, &varname, &cellindex)) { + PyObject *varindex = PyDict_GetItem(umd->u_varnames, varname); + if (varindex != NULL) { + assert(PyLong_AS_LONG(cellindex) < INT_MAX); + assert(PyLong_AS_LONG(varindex) < INT_MAX); + int oldindex = (int)PyLong_AS_LONG(cellindex); + int argoffset = (int)PyLong_AS_LONG(varindex); + fixed[oldindex] = argoffset; } } -#ifdef Py_DEBUG - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - assert(b->b_exceptstack == NULL); - } -#endif - PyMem_Free(todo_stack); - return SUCCESS; -error: - PyMem_Free(todo_stack); - PyMem_Free(except_stack); - return ERROR; -} - -static int -mark_except_handlers(basicblock *entryblock) { -#ifndef NDEBUG - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - assert(!b->b_except_handler); - } -#endif - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - for (int i=0; i < b->b_iused; i++) { - struct cfg_instr *instr = &b->b_instr[i]; - if (is_block_push(instr)) { - instr->i_target->b_except_handler = 1; - } - } - } - return SUCCESS; + return fixed; } static int -mark_warm(basicblock *entryblock) { - basicblock **stack = make_cfg_traversal_stack(entryblock); - if (stack == NULL) { - return ERROR; - } - basicblock **sp = stack; - - *sp++ = entryblock; - entryblock->b_visited = 1; - while (sp > stack) { - basicblock *b = *(--sp); - assert(!b->b_except_handler); - b->b_warm = 1; - basicblock *next = b->b_next; - if (next && BB_HAS_FALLTHROUGH(b) && !next->b_visited) { - *sp++ = next; - next->b_visited = 1; - } - for (int i=0; i < b->b_iused; i++) { - struct cfg_instr *instr = &b->b_instr[i]; - if (is_jump(instr) && !instr->i_target->b_visited) { - *sp++ = instr->i_target; - instr->i_target->b_visited = 1; - } - } +insert_prefix_instructions(_PyCompile_CodeUnitMetadata *umd, basicblock *entryblock, + int *fixed, int nfreevars, int code_flags) +{ + assert(umd->u_firstlineno > 0); + + /* Add the generator prefix instructions. */ + if (code_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR)) { + cfg_instr make_gen = { + .i_opcode = RETURN_GENERATOR, + .i_oparg = 0, + .i_loc = LOCATION(umd->u_firstlineno, umd->u_firstlineno, -1, -1), + .i_target = NULL, + }; + RETURN_IF_ERROR(_PyBasicblock_InsertInstruction(entryblock, 0, &make_gen)); + cfg_instr pop_top = { + .i_opcode = POP_TOP, + .i_oparg = 0, + .i_loc = NO_LOCATION, + .i_target = NULL, + }; + RETURN_IF_ERROR(_PyBasicblock_InsertInstruction(entryblock, 1, &pop_top)); } - PyMem_Free(stack); - return SUCCESS; -} -static int -mark_cold(basicblock *entryblock) { - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - assert(!b->b_cold && !b->b_warm); - } - if (mark_warm(entryblock) < 0) { - return ERROR; - } - - basicblock **stack = make_cfg_traversal_stack(entryblock); - if (stack == NULL) { - return ERROR; - } - - basicblock **sp = stack; - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - if (b->b_except_handler) { - assert(!b->b_warm); - *sp++ = b; - b->b_visited = 1; - } - } - - while (sp > stack) { - basicblock *b = *(--sp); - b->b_cold = 1; - basicblock *next = b->b_next; - if (next && BB_HAS_FALLTHROUGH(b)) { - if (!next->b_warm && !next->b_visited) { - *sp++ = next; - next->b_visited = 1; - } - } - for (int i = 0; i < b->b_iused; i++) { - struct cfg_instr *instr = &b->b_instr[i]; - if (is_jump(instr)) { - assert(i == b->b_iused - 1); - basicblock *target = b->b_instr[i].i_target; - if (!target->b_warm && !target->b_visited) { - *sp++ = target; - target->b_visited = 1; - } - } - } - } - PyMem_Free(stack); - return SUCCESS; -} - -static int -remove_redundant_jumps(cfg_builder *g); - -static int -push_cold_blocks_to_end(cfg_builder *g, int code_flags) { - basicblock *entryblock = g->g_entryblock; - if (entryblock->b_next == NULL) { - /* single basicblock, no need to reorder */ - return SUCCESS; - } - RETURN_IF_ERROR(mark_cold(entryblock)); - - /* If we have a cold block with fallthrough to a warm block, add */ - /* an explicit jump instead of fallthrough */ - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - if (b->b_cold && BB_HAS_FALLTHROUGH(b) && b->b_next && b->b_next->b_warm) { - basicblock *explicit_jump = cfg_builder_new_block(g); - if (explicit_jump == NULL) { - return ERROR; - } - basicblock_addop(explicit_jump, JUMP, b->b_next->b_label.id, NO_LOCATION); - explicit_jump->b_cold = 1; - explicit_jump->b_next = b->b_next; - b->b_next = explicit_jump; - - /* set target */ - struct cfg_instr *last = basicblock_last_instr(explicit_jump); - last->i_target = explicit_jump->b_next; - } - } - - assert(!entryblock->b_cold); /* First block can't be cold */ - basicblock *cold_blocks = NULL; - basicblock *cold_blocks_tail = NULL; - - basicblock *b = entryblock; - while(b->b_next) { - assert(!b->b_cold); - while (b->b_next && !b->b_next->b_cold) { - b = b->b_next; - } - if (b->b_next == NULL) { - /* no more cold blocks */ - break; - } - - /* b->b_next is the beginning of a cold streak */ - assert(!b->b_cold && b->b_next->b_cold); - - basicblock *b_end = b->b_next; - while (b_end->b_next && b_end->b_next->b_cold) { - b_end = b_end->b_next; - } - - /* b_end is the end of the cold streak */ - assert(b_end && b_end->b_cold); - assert(b_end->b_next == NULL || !b_end->b_next->b_cold); - - if (cold_blocks == NULL) { - cold_blocks = b->b_next; - } - else { - cold_blocks_tail->b_next = b->b_next; - } - cold_blocks_tail = b_end; - b->b_next = b_end->b_next; - b_end->b_next = NULL; - } - assert(b != NULL && b->b_next == NULL); - b->b_next = cold_blocks; - - if (cold_blocks != NULL) { - RETURN_IF_ERROR(remove_redundant_jumps(g)); - } - return SUCCESS; -} - -static void -convert_pseudo_ops(basicblock *entryblock) { - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - for (int i = 0; i < b->b_iused; i++) { - struct cfg_instr *instr = &b->b_instr[i]; - if (is_block_push(instr) || instr->i_opcode == POP_BLOCK) { - INSTR_SET_OP0(instr, NOP); - } else if (instr->i_opcode == STORE_FAST_MAYBE_NULL) { - instr->i_opcode = STORE_FAST; - } - } - } - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - remove_redundant_nops(b); - } -} - -static inline void -write_except_byte(struct assembler *a, int byte) { - unsigned char *p = (unsigned char *) PyBytes_AS_STRING(a->a_except_table); - p[a->a_except_table_off++] = byte; -} - -#define CONTINUATION_BIT 64 - -static void -assemble_emit_exception_table_item(struct assembler *a, int value, int msb) -{ - assert ((msb | 128) == 128); - assert(value >= 0 && value < (1 << 30)); - if (value >= 1 << 24) { - write_except_byte(a, (value >> 24) | CONTINUATION_BIT | msb); - msb = 0; - } - if (value >= 1 << 18) { - write_except_byte(a, ((value >> 18)&0x3f) | CONTINUATION_BIT | msb); - msb = 0; - } - if (value >= 1 << 12) { - write_except_byte(a, ((value >> 12)&0x3f) | CONTINUATION_BIT | msb); - msb = 0; - } - if (value >= 1 << 6) { - write_except_byte(a, ((value >> 6)&0x3f) | CONTINUATION_BIT | msb); - msb = 0; - } - write_except_byte(a, (value&0x3f) | msb); -} - -/* See Objects/exception_handling_notes.txt for details of layout */ -#define MAX_SIZE_OF_ENTRY 20 - -static int -assemble_emit_exception_table_entry(struct assembler *a, int start, int end, basicblock *handler) -{ - Py_ssize_t len = PyBytes_GET_SIZE(a->a_except_table); - if (a->a_except_table_off + MAX_SIZE_OF_ENTRY >= len) { - RETURN_IF_ERROR(_PyBytes_Resize(&a->a_except_table, len * 2)); - } - int size = end-start; - assert(end > start); - int target = handler->b_offset; - int depth = handler->b_startdepth - 1; - if (handler->b_preserve_lasti) { - depth -= 1; - } - assert(depth >= 0); - int depth_lasti = (depth<<1) | handler->b_preserve_lasti; - assemble_emit_exception_table_item(a, start, (1<<7)); - assemble_emit_exception_table_item(a, size, 0); - assemble_emit_exception_table_item(a, target, 0); - assemble_emit_exception_table_item(a, depth_lasti, 0); - return SUCCESS; -} - -static int -assemble_exception_table(struct assembler *a, basicblock *entryblock) -{ - basicblock *b; - int ioffset = 0; - basicblock *handler = NULL; - int start = -1; - for (b = entryblock; b != NULL; b = b->b_next) { - ioffset = b->b_offset; - for (int i = 0; i < b->b_iused; i++) { - struct cfg_instr *instr = &b->b_instr[i]; - if (instr->i_except != handler) { - if (handler != NULL) { - RETURN_IF_ERROR( - assemble_emit_exception_table_entry(a, start, ioffset, handler)); - } - start = ioffset; - handler = instr->i_except; - } - ioffset += instr_size(instr); - } - } - if (handler != NULL) { - RETURN_IF_ERROR(assemble_emit_exception_table_entry(a, start, ioffset, handler)); - } - return SUCCESS; -} - -/* Code location emitting code. See locations.md for a description of the format. */ - -#define MSB 0x80 - -static void -write_location_byte(struct assembler* a, int val) -{ - PyBytes_AS_STRING(a->a_linetable)[a->a_location_off] = val&255; - a->a_location_off++; -} - - -static uint8_t * -location_pointer(struct assembler* a) -{ - return (uint8_t *)PyBytes_AS_STRING(a->a_linetable) + - a->a_location_off; -} - -static void -write_location_first_byte(struct assembler* a, int code, int length) -{ - a->a_location_off += write_location_entry_start( - location_pointer(a), code, length); -} - -static void -write_location_varint(struct assembler* a, unsigned int val) -{ - uint8_t *ptr = location_pointer(a); - a->a_location_off += write_varint(ptr, val); -} - - -static void -write_location_signed_varint(struct assembler* a, int val) -{ - uint8_t *ptr = location_pointer(a); - a->a_location_off += write_signed_varint(ptr, val); -} - -static void -write_location_info_short_form(struct assembler* a, int length, int column, int end_column) -{ - assert(length > 0 && length <= 8); - int column_low_bits = column & 7; - int column_group = column >> 3; - assert(column < 80); - assert(end_column >= column); - assert(end_column - column < 16); - write_location_first_byte(a, PY_CODE_LOCATION_INFO_SHORT0 + column_group, length); - write_location_byte(a, (column_low_bits << 4) | (end_column - column)); -} - -static void -write_location_info_oneline_form(struct assembler* a, int length, int line_delta, int column, int end_column) -{ - assert(length > 0 && length <= 8); - assert(line_delta >= 0 && line_delta < 3); - assert(column < 128); - assert(end_column < 128); - write_location_first_byte(a, PY_CODE_LOCATION_INFO_ONE_LINE0 + line_delta, length); - write_location_byte(a, column); - write_location_byte(a, end_column); -} - -static void -write_location_info_long_form(struct assembler* a, location loc, int length) -{ - assert(length > 0 && length <= 8); - write_location_first_byte(a, PY_CODE_LOCATION_INFO_LONG, length); - write_location_signed_varint(a, loc.lineno - a->a_lineno); - assert(loc.end_lineno >= loc.lineno); - write_location_varint(a, loc.end_lineno - loc.lineno); - write_location_varint(a, loc.col_offset + 1); - write_location_varint(a, loc.end_col_offset + 1); -} - -static void -write_location_info_none(struct assembler* a, int length) -{ - write_location_first_byte(a, PY_CODE_LOCATION_INFO_NONE, length); -} - -static void -write_location_info_no_column(struct assembler* a, int length, int line_delta) -{ - write_location_first_byte(a, PY_CODE_LOCATION_INFO_NO_COLUMNS, length); - write_location_signed_varint(a, line_delta); -} - -#define THEORETICAL_MAX_ENTRY_SIZE 25 /* 1 + 6 + 6 + 6 + 6 */ - -static int -write_location_info_entry(struct assembler* a, location loc, int isize) -{ - Py_ssize_t len = PyBytes_GET_SIZE(a->a_linetable); - if (a->a_location_off + THEORETICAL_MAX_ENTRY_SIZE >= len) { - assert(len > THEORETICAL_MAX_ENTRY_SIZE); - RETURN_IF_ERROR(_PyBytes_Resize(&a->a_linetable, len*2)); - } - if (loc.lineno < 0) { - write_location_info_none(a, isize); - return SUCCESS; - } - int line_delta = loc.lineno - a->a_lineno; - int column = loc.col_offset; - int end_column = loc.end_col_offset; - assert(column >= -1); - assert(end_column >= -1); - if (column < 0 || end_column < 0) { - if (loc.end_lineno == loc.lineno || loc.end_lineno == -1) { - write_location_info_no_column(a, isize, line_delta); - a->a_lineno = loc.lineno; - return SUCCESS; - } - } - else if (loc.end_lineno == loc.lineno) { - if (line_delta == 0 && column < 80 && end_column - column < 16 && end_column >= column) { - write_location_info_short_form(a, isize, column, end_column); - return SUCCESS; - } - if (line_delta >= 0 && line_delta < 3 && column < 128 && end_column < 128) { - write_location_info_oneline_form(a, isize, line_delta, column, end_column); - a->a_lineno = loc.lineno; - return SUCCESS; - } - } - write_location_info_long_form(a, loc, isize); - a->a_lineno = loc.lineno; - return SUCCESS; -} - -static int -assemble_emit_location(struct assembler* a, location loc, int isize) -{ - if (isize == 0) { - return SUCCESS; - } - while (isize > 8) { - RETURN_IF_ERROR(write_location_info_entry(a, loc, 8)); - isize -= 8; - } - return write_location_info_entry(a, loc, isize); -} - -static int -assemble_location_info(struct assembler *a, basicblock *entryblock, int firstlineno) -{ - a->a_lineno = firstlineno; - location loc = NO_LOCATION; - int size = 0; - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - for (int j = 0; j < b->b_iused; j++) { - if (!same_location(loc, b->b_instr[j].i_loc)) { - RETURN_IF_ERROR(assemble_emit_location(a, loc, size)); - loc = b->b_instr[j].i_loc; - size = 0; - } - size += instr_size(&b->b_instr[j]); - } - } - RETURN_IF_ERROR(assemble_emit_location(a, loc, size)); - return SUCCESS; -} - -/* assemble_emit_instr() - Extend the bytecode with a new instruction. - Update lnotab if necessary. -*/ - -static int -assemble_emit_instr(struct assembler *a, struct cfg_instr *i) -{ - Py_ssize_t len = PyBytes_GET_SIZE(a->a_bytecode); - _Py_CODEUNIT *code; - - int size = instr_size(i); - if (a->a_offset + size >= len / (int)sizeof(_Py_CODEUNIT)) { - if (len > PY_SSIZE_T_MAX / 2) { - return ERROR; - } - RETURN_IF_ERROR(_PyBytes_Resize(&a->a_bytecode, len * 2)); - } - code = (_Py_CODEUNIT *)PyBytes_AS_STRING(a->a_bytecode) + a->a_offset; - a->a_offset += size; - write_instr(code, i, size); - return SUCCESS; -} - -static int merge_const_one(PyObject *const_cache, PyObject **obj); - -static int -assemble_emit(struct assembler *a, basicblock *entryblock, int first_lineno, - PyObject *const_cache) -{ - RETURN_IF_ERROR(assemble_init(a, first_lineno)); - - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - for (int j = 0; j < b->b_iused; j++) { - RETURN_IF_ERROR(assemble_emit_instr(a, &b->b_instr[j])); - } - } - - RETURN_IF_ERROR(assemble_location_info(a, entryblock, a->a_lineno)); - - RETURN_IF_ERROR(assemble_exception_table(a, entryblock)); - - RETURN_IF_ERROR(_PyBytes_Resize(&a->a_except_table, a->a_except_table_off)); - RETURN_IF_ERROR(merge_const_one(const_cache, &a->a_except_table)); - - RETURN_IF_ERROR(_PyBytes_Resize(&a->a_linetable, a->a_location_off)); - RETURN_IF_ERROR(merge_const_one(const_cache, &a->a_linetable)); - - RETURN_IF_ERROR(_PyBytes_Resize(&a->a_bytecode, a->a_offset * sizeof(_Py_CODEUNIT))); - RETURN_IF_ERROR(merge_const_one(const_cache, &a->a_bytecode)); - return SUCCESS; -} - -static int -normalize_jumps_in_block(cfg_builder *g, basicblock *b) { - struct cfg_instr *last = basicblock_last_instr(b); - if (last == NULL || !is_jump(last)) { - return SUCCESS; - } - assert(!IS_ASSEMBLER_OPCODE(last->i_opcode)); - bool is_forward = last->i_target->b_visited == 0; - switch(last->i_opcode) { - case JUMP: - last->i_opcode = is_forward ? JUMP_FORWARD : JUMP_BACKWARD; - return SUCCESS; - case JUMP_NO_INTERRUPT: - last->i_opcode = is_forward ? - JUMP_FORWARD : JUMP_BACKWARD_NO_INTERRUPT; - return SUCCESS; - } - int reversed_opcode = 0; - switch(last->i_opcode) { - case POP_JUMP_IF_NOT_NONE: - reversed_opcode = POP_JUMP_IF_NONE; - break; - case POP_JUMP_IF_NONE: - reversed_opcode = POP_JUMP_IF_NOT_NONE; - break; - case POP_JUMP_IF_FALSE: - reversed_opcode = POP_JUMP_IF_TRUE; - break; - case POP_JUMP_IF_TRUE: - reversed_opcode = POP_JUMP_IF_FALSE; - break; - case JUMP_IF_TRUE_OR_POP: - case JUMP_IF_FALSE_OR_POP: - if (!is_forward) { - /* As far as we can tell, the compiler never emits - * these jumps with a backwards target. If/when this - * exception is raised, we have found a use case for - * a backwards version of this jump (or to replace - * it with the sequence (COPY 1, POP_JUMP_IF_T/F, POP) - */ - PyErr_Format(PyExc_SystemError, - "unexpected %s jumping backwards", - last->i_opcode == JUMP_IF_TRUE_OR_POP ? - "JUMP_IF_TRUE_OR_POP" : "JUMP_IF_FALSE_OR_POP"); - } - return SUCCESS; - } - if (is_forward) { - return SUCCESS; - } - - /* transform 'conditional jump T' to - * 'reversed_jump b_next' followed by 'jump_backwards T' - */ - - basicblock *target = last->i_target; - basicblock *backwards_jump = cfg_builder_new_block(g); - if (backwards_jump == NULL) { - return ERROR; - } - basicblock_addop(backwards_jump, JUMP, target->b_label.id, NO_LOCATION); - backwards_jump->b_instr[0].i_target = target; - last->i_opcode = reversed_opcode; - last->i_target = b->b_next; - - backwards_jump->b_cold = b->b_cold; - backwards_jump->b_next = b->b_next; - b->b_next = backwards_jump; - return SUCCESS; -} - -static int -normalize_jumps(cfg_builder *g) -{ - basicblock *entryblock = g->g_entryblock; - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - b->b_visited = 0; - } - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - b->b_visited = 1; - RETURN_IF_ERROR(normalize_jumps_in_block(g, b)); - } - return SUCCESS; -} - -static void -assemble_jump_offsets(basicblock *entryblock) -{ - int bsize, totsize, extended_arg_recompile; - - /* Compute the size of each block and fixup jump args. - Replace block pointer with position in bytecode. */ - do { - totsize = 0; - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - bsize = blocksize(b); - b->b_offset = totsize; - totsize += bsize; - } - extended_arg_recompile = 0; - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - bsize = b->b_offset; - for (int i = 0; i < b->b_iused; i++) { - struct cfg_instr *instr = &b->b_instr[i]; - int isize = instr_size(instr); - /* Relative jumps are computed relative to - the instruction pointer after fetching - the jump instruction. - */ - bsize += isize; - if (is_jump(instr)) { - instr->i_oparg = instr->i_target->b_offset; - if (is_relative_jump(instr)) { - if (instr->i_oparg < bsize) { - assert(IS_BACKWARDS_JUMP_OPCODE(instr->i_opcode)); - instr->i_oparg = bsize - instr->i_oparg; - } - else { - assert(!IS_BACKWARDS_JUMP_OPCODE(instr->i_opcode)); - instr->i_oparg -= bsize; - } - } - else { - assert(!IS_BACKWARDS_JUMP_OPCODE(instr->i_opcode)); - } - if (instr_size(instr) != isize) { - extended_arg_recompile = 1; - } - } - } - } - - /* XXX: This is an awful hack that could hurt performance, but - on the bright side it should work until we come up - with a better solution. - - The issue is that in the first loop blocksize() is called - which calls instr_size() which requires i_oparg be set - appropriately. There is a bootstrap problem because - i_oparg is calculated in the second loop above. - - So we loop until we stop seeing new EXTENDED_ARGs. - The only EXTENDED_ARGs that could be popping up are - ones in jump instructions. So this should converge - fairly quickly. - */ - } while (extended_arg_recompile); -} - - -// helper functions for add_checks_for_loads_of_unknown_variables -static inline void -maybe_push(basicblock *b, uint64_t unsafe_mask, basicblock ***sp) -{ - // Push b if the unsafe mask is giving us any new information. - // To avoid overflowing the stack, only allow each block once. - // Use b->b_visited=1 to mean that b is currently on the stack. - uint64_t both = b->b_unsafe_locals_mask | unsafe_mask; - if (b->b_unsafe_locals_mask != both) { - b->b_unsafe_locals_mask = both; - // More work left to do. - if (!b->b_visited) { - // not on the stack, so push it. - *(*sp)++ = b; - b->b_visited = 1; - } - } -} - -static void -scan_block_for_locals(basicblock *b, basicblock ***sp) -{ - // bit i is set if local i is potentially uninitialized - uint64_t unsafe_mask = b->b_unsafe_locals_mask; - for (int i = 0; i < b->b_iused; i++) { - struct cfg_instr *instr = &b->b_instr[i]; - assert(instr->i_opcode != EXTENDED_ARG); - assert(!IS_SUPERINSTRUCTION_OPCODE(instr->i_opcode)); - if (instr->i_except != NULL) { - maybe_push(instr->i_except, unsafe_mask, sp); - } - if (instr->i_oparg >= 64) { - continue; - } - assert(instr->i_oparg >= 0); - uint64_t bit = (uint64_t)1 << instr->i_oparg; - switch (instr->i_opcode) { - case DELETE_FAST: - case LOAD_FAST_AND_CLEAR: - case STORE_FAST_MAYBE_NULL: - unsafe_mask |= bit; - break; - case STORE_FAST: - unsafe_mask &= ~bit; - break; - case LOAD_FAST_CHECK: - // If this doesn't raise, then the local is defined. - unsafe_mask &= ~bit; - break; - case LOAD_FAST: - if (unsafe_mask & bit) { - instr->i_opcode = LOAD_FAST_CHECK; - } - unsafe_mask &= ~bit; - break; - } - } - if (b->b_next && BB_HAS_FALLTHROUGH(b)) { - maybe_push(b->b_next, unsafe_mask, sp); - } - struct cfg_instr *last = basicblock_last_instr(b); - if (last && is_jump(last)) { - assert(last->i_target != NULL); - maybe_push(last->i_target, unsafe_mask, sp); - } -} - -static int -fast_scan_many_locals(basicblock *entryblock, int nlocals) -{ - assert(nlocals > 64); - Py_ssize_t *states = PyMem_Calloc(nlocals - 64, sizeof(Py_ssize_t)); - if (states == NULL) { - PyErr_NoMemory(); - return ERROR; - } - Py_ssize_t blocknum = 0; - // state[i - 64] == blocknum if local i is guaranteed to - // be initialized, i.e., if it has had a previous LOAD_FAST or - // STORE_FAST within that basicblock (not followed by DELETE_FAST). - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - blocknum++; - for (int i = 0; i < b->b_iused; i++) { - struct cfg_instr *instr = &b->b_instr[i]; - assert(instr->i_opcode != EXTENDED_ARG); - assert(!IS_SUPERINSTRUCTION_OPCODE(instr->i_opcode)); - int arg = instr->i_oparg; - if (arg < 64) { - continue; - } - assert(arg >= 0); - switch (instr->i_opcode) { - case DELETE_FAST: - case LOAD_FAST_AND_CLEAR: - case STORE_FAST_MAYBE_NULL: - states[arg - 64] = blocknum - 1; - break; - case STORE_FAST: - states[arg - 64] = blocknum; - break; - case LOAD_FAST: - if (states[arg - 64] != blocknum) { - instr->i_opcode = LOAD_FAST_CHECK; - } - states[arg - 64] = blocknum; - break; - case LOAD_FAST_CHECK: - Py_UNREACHABLE(); - } - } - } - PyMem_Free(states); - return SUCCESS; -} - -static int -add_checks_for_loads_of_uninitialized_variables(basicblock *entryblock, - int nlocals, - int nparams) -{ - if (nlocals == 0) { - return SUCCESS; - } - if (nlocals > 64) { - // To avoid O(nlocals**2) compilation, locals beyond the first - // 64 are only analyzed one basicblock at a time: initialization - // info is not passed between basicblocks. - if (fast_scan_many_locals(entryblock, nlocals) < 0) { - return ERROR; - } - nlocals = 64; - } - basicblock **stack = make_cfg_traversal_stack(entryblock); - if (stack == NULL) { - return ERROR; - } - basicblock **sp = stack; - - // First origin of being uninitialized: - // The non-parameter locals in the entry block. - uint64_t start_mask = 0; - for (int i = nparams; i < nlocals; i++) { - start_mask |= (uint64_t)1 << i; - } - maybe_push(entryblock, start_mask, &sp); - - // Second origin of being uninitialized: - // There could be DELETE_FAST somewhere, so - // be sure to scan each basicblock at least once. - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - scan_block_for_locals(b, &sp); - } - - // Now propagate the uncertainty from the origins we found: Use - // LOAD_FAST_CHECK for any LOAD_FAST where the local could be undefined. - while (sp > stack) { - basicblock *b = *--sp; - // mark as no longer on stack - b->b_visited = 0; - scan_block_for_locals(b, &sp); - } - PyMem_Free(stack); - return SUCCESS; -} - -static PyObject * -dict_keys_inorder(PyObject *dict, Py_ssize_t offset) -{ - PyObject *tuple, *k, *v; - Py_ssize_t i, pos = 0, size = PyDict_GET_SIZE(dict); - - tuple = PyTuple_New(size); - if (tuple == NULL) - return NULL; - while (PyDict_Next(dict, &pos, &k, &v)) { - i = PyLong_AS_LONG(v); - assert((i - offset) < size); - assert((i - offset) >= 0); - PyTuple_SET_ITEM(tuple, i - offset, Py_NewRef(k)); - } - return tuple; -} - -static PyObject * -consts_dict_keys_inorder(PyObject *dict) -{ - PyObject *consts, *k, *v; - Py_ssize_t i, pos = 0, size = PyDict_GET_SIZE(dict); - - consts = PyList_New(size); /* PyCode_Optimize() requires a list */ - if (consts == NULL) - return NULL; - while (PyDict_Next(dict, &pos, &k, &v)) { - i = PyLong_AS_LONG(v); - /* The keys of the dictionary can be tuples wrapping a constant. - * (see dict_add_o and _PyCode_ConstantKey). In that case - * the object we want is always second. */ - if (PyTuple_CheckExact(k)) { - k = PyTuple_GET_ITEM(k, 1); - } - assert(i < size); - assert(i >= 0); - PyList_SET_ITEM(consts, i, Py_NewRef(k)); - } - return consts; -} - -static int -compute_code_flags(struct compiler *c) -{ - PySTEntryObject *ste = c->u->u_ste; - int flags = 0; - if (ste->ste_type == FunctionBlock) { - flags |= CO_NEWLOCALS | CO_OPTIMIZED; - if (ste->ste_nested) - flags |= CO_NESTED; - if (ste->ste_generator && !ste->ste_coroutine) - flags |= CO_GENERATOR; - if (!ste->ste_generator && ste->ste_coroutine) - flags |= CO_COROUTINE; - if (ste->ste_generator && ste->ste_coroutine) - flags |= CO_ASYNC_GENERATOR; - if (ste->ste_varargs) - flags |= CO_VARARGS; - if (ste->ste_varkeywords) - flags |= CO_VARKEYWORDS; - } - - /* (Only) inherit compilerflags in PyCF_MASK */ - flags |= (c->c_flags.cf_flags & PyCF_MASK); - - if ((IS_TOP_LEVEL_AWAIT(c)) && - ste->ste_coroutine && - !ste->ste_generator) { - flags |= CO_COROUTINE; - } - - return flags; -} - -// Merge *obj* with constant cache. -// Unlike merge_consts_recursive(), this function doesn't work recursively. -static int -merge_const_one(PyObject *const_cache, PyObject **obj) -{ - assert(PyDict_CheckExact(const_cache)); - PyObject *key = _PyCode_ConstantKey(*obj); - if (key == NULL) { - return ERROR; - } - - // t is borrowed reference - PyObject *t = PyDict_SetDefault(const_cache, key, key); - Py_DECREF(key); - if (t == NULL) { - return ERROR; - } - if (t == key) { // obj is new constant. - return SUCCESS; - } - - if (PyTuple_CheckExact(t)) { - // t is still borrowed reference - t = PyTuple_GET_ITEM(t, 1); - } - - Py_SETREF(*obj, Py_NewRef(t)); - return SUCCESS; -} - -// This is in codeobject.c. -extern void _Py_set_localsplus_info(int, PyObject *, unsigned char, - PyObject *, PyObject *); - -static void -compute_localsplus_info(struct compiler_unit *u, int nlocalsplus, - PyObject *names, PyObject *kinds) -{ - PyObject *k, *v; - Py_ssize_t pos = 0; - while (PyDict_Next(u->u_varnames, &pos, &k, &v)) { - int offset = (int)PyLong_AS_LONG(v); - assert(offset >= 0); - assert(offset < nlocalsplus); - // For now we do not distinguish arg kinds. - _PyLocals_Kind kind = CO_FAST_LOCAL; - if (PyDict_Contains(u->u_fasthidden, k)) { - kind |= CO_FAST_HIDDEN; - } - if (PyDict_GetItem(u->u_cellvars, k) != NULL) { - kind |= CO_FAST_CELL; - } - _Py_set_localsplus_info(offset, k, kind, names, kinds); - } - int nlocals = (int)PyDict_GET_SIZE(u->u_varnames); - - // This counter mirrors the fix done in fix_cell_offsets(). - int numdropped = 0; - pos = 0; - while (PyDict_Next(u->u_cellvars, &pos, &k, &v)) { - if (PyDict_GetItem(u->u_varnames, k) != NULL) { - // Skip cells that are already covered by locals. - numdropped += 1; - continue; - } - int offset = (int)PyLong_AS_LONG(v); - assert(offset >= 0); - offset += nlocals - numdropped; - assert(offset < nlocalsplus); - _Py_set_localsplus_info(offset, k, CO_FAST_CELL, names, kinds); - } - - pos = 0; - while (PyDict_Next(u->u_freevars, &pos, &k, &v)) { - int offset = (int)PyLong_AS_LONG(v); - assert(offset >= 0); - offset += nlocals - numdropped; - assert(offset < nlocalsplus); - _Py_set_localsplus_info(offset, k, CO_FAST_FREE, names, kinds); - } -} - -static PyCodeObject * -makecode(struct compiler_unit *u, struct assembler *a, PyObject *const_cache, - PyObject *constslist, int maxdepth, int nlocalsplus, int code_flags, - PyObject *filename) -{ - PyCodeObject *co = NULL; - PyObject *names = NULL; - PyObject *consts = NULL; - PyObject *localsplusnames = NULL; - PyObject *localspluskinds = NULL; - names = dict_keys_inorder(u->u_names, 0); - if (!names) { - goto error; - } - if (merge_const_one(const_cache, &names) < 0) { - goto error; - } - - consts = PyList_AsTuple(constslist); /* PyCode_New requires a tuple */ - if (consts == NULL) { - goto error; - } - if (merge_const_one(const_cache, &consts) < 0) { - goto error; - } - - assert(u->u_posonlyargcount < INT_MAX); - assert(u->u_argcount < INT_MAX); - assert(u->u_kwonlyargcount < INT_MAX); - int posonlyargcount = (int)u->u_posonlyargcount; - int posorkwargcount = (int)u->u_argcount; - assert(INT_MAX - posonlyargcount - posorkwargcount > 0); - int kwonlyargcount = (int)u->u_kwonlyargcount; - - localsplusnames = PyTuple_New(nlocalsplus); - if (localsplusnames == NULL) { - goto error; - } - localspluskinds = PyBytes_FromStringAndSize(NULL, nlocalsplus); - if (localspluskinds == NULL) { - goto error; - } - compute_localsplus_info(u, nlocalsplus, localsplusnames, localspluskinds); - - struct _PyCodeConstructor con = { - .filename = filename, - .name = u->u_name, - .qualname = u->u_qualname ? u->u_qualname : u->u_name, - .flags = code_flags, - - .code = a->a_bytecode, - .firstlineno = u->u_firstlineno, - .linetable = a->a_linetable, - - .consts = consts, - .names = names, - - .localsplusnames = localsplusnames, - .localspluskinds = localspluskinds, - - .argcount = posonlyargcount + posorkwargcount, - .posonlyargcount = posonlyargcount, - .kwonlyargcount = kwonlyargcount, - - .stacksize = maxdepth, - - .exceptiontable = a->a_except_table, - }; - - if (_PyCode_Validate(&con) < 0) { - goto error; - } - - if (merge_const_one(const_cache, &localsplusnames) < 0) { - goto error; - } - con.localsplusnames = localsplusnames; - - co = _PyCode_New(&con); - if (co == NULL) { - goto error; - } - - error: - Py_XDECREF(names); - Py_XDECREF(consts); - Py_XDECREF(localsplusnames); - Py_XDECREF(localspluskinds); - return co; -} - - -/* For debugging purposes only */ -#if 0 -static void -dump_instr(struct cfg_instr *i) -{ - const char *jrel = (is_relative_jump(i)) ? "jrel " : ""; - const char *jabs = (is_jump(i) && !is_relative_jump(i))? "jabs " : ""; - - char arg[128]; - - *arg = '\0'; - if (HAS_ARG(i->i_opcode)) { - sprintf(arg, "arg: %d ", i->i_oparg); - } - if (HAS_TARGET(i->i_opcode)) { - sprintf(arg, "target: %p [%d] ", i->i_target, i->i_oparg); - } - fprintf(stderr, "line: %d, opcode: %d %s%s%s\n", - i->i_loc.lineno, i->i_opcode, arg, jabs, jrel); -} - -static inline int -basicblock_returns(const basicblock *b) { - struct cfg_instr *last = basicblock_last_instr(b); - return last && (last->i_opcode == RETURN_VALUE || last->i_opcode == RETURN_CONST); -} - -static void -dump_basicblock(const basicblock *b) -{ - const char *b_return = basicblock_returns(b) ? "return " : ""; - fprintf(stderr, "%d: [EH=%d CLD=%d WRM=%d NO_FT=%d %p] used: %d, depth: %d, offset: %d %s\n", - b->b_label.id, b->b_except_handler, b->b_cold, b->b_warm, BB_NO_FALLTHROUGH(b), b, b->b_iused, - b->b_startdepth, b->b_offset, b_return); - if (b->b_instr) { - int i; - for (i = 0; i < b->b_iused; i++) { - fprintf(stderr, " [%02d] ", i); - dump_instr(b->b_instr + i); - } - } -} -#endif - - -static int -translate_jump_labels_to_targets(basicblock *entryblock); - -static int -optimize_cfg(cfg_builder *g, PyObject *consts, PyObject *const_cache); - -static int -remove_unused_consts(basicblock *entryblock, PyObject *consts); - -/* Duplicates exit BBs, so that line numbers can be propagated to them */ -static int -duplicate_exits_without_lineno(cfg_builder *g); - -static int * -build_cellfixedoffsets(struct compiler_unit *u) -{ - int nlocals = (int)PyDict_GET_SIZE(u->u_varnames); - int ncellvars = (int)PyDict_GET_SIZE(u->u_cellvars); - int nfreevars = (int)PyDict_GET_SIZE(u->u_freevars); - - int noffsets = ncellvars + nfreevars; - int *fixed = PyMem_New(int, noffsets); - if (fixed == NULL) { - PyErr_NoMemory(); - return NULL; - } - for (int i = 0; i < noffsets; i++) { - fixed[i] = nlocals + i; - } - - PyObject *varname, *cellindex; - Py_ssize_t pos = 0; - while (PyDict_Next(u->u_cellvars, &pos, &varname, &cellindex)) { - PyObject *varindex = PyDict_GetItem(u->u_varnames, varname); - if (varindex != NULL) { - assert(PyLong_AS_LONG(cellindex) < INT_MAX); - assert(PyLong_AS_LONG(varindex) < INT_MAX); - int oldindex = (int)PyLong_AS_LONG(cellindex); - int argoffset = (int)PyLong_AS_LONG(varindex); - fixed[oldindex] = argoffset; - } - } - - return fixed; -} - -static int -insert_prefix_instructions(struct compiler_unit *u, basicblock *entryblock, - int *fixed, int nfreevars, int code_flags) -{ - assert(u->u_firstlineno > 0); - - /* Add the generator prefix instructions. */ - if (code_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR)) { - struct cfg_instr make_gen = { - .i_opcode = RETURN_GENERATOR, - .i_oparg = 0, - .i_loc = LOCATION(u->u_firstlineno, u->u_firstlineno, -1, -1), - .i_target = NULL, - }; - RETURN_IF_ERROR(insert_instruction(entryblock, 0, &make_gen)); - struct cfg_instr pop_top = { - .i_opcode = POP_TOP, - .i_oparg = 0, - .i_loc = NO_LOCATION, - .i_target = NULL, - }; - RETURN_IF_ERROR(insert_instruction(entryblock, 1, &pop_top)); - } - - /* Set up cells for any variable that escapes, to be put in a closure. */ - const int ncellvars = (int)PyDict_GET_SIZE(u->u_cellvars); - if (ncellvars) { - // u->u_cellvars has the cells out of order so we sort them - // before adding the MAKE_CELL instructions. Note that we - // adjust for arg cells, which come first. - const int nvars = ncellvars + (int)PyDict_GET_SIZE(u->u_varnames); - int *sorted = PyMem_RawCalloc(nvars, sizeof(int)); - if (sorted == NULL) { - PyErr_NoMemory(); - return ERROR; - } - for (int i = 0; i < ncellvars; i++) { - sorted[fixed[i]] = i + 1; - } - for (int i = 0, ncellsused = 0; ncellsused < ncellvars; i++) { - int oldindex = sorted[i] - 1; - if (oldindex == -1) { - continue; - } - struct cfg_instr make_cell = { - .i_opcode = MAKE_CELL, - // This will get fixed in offset_derefs(). - .i_oparg = oldindex, - .i_loc = NO_LOCATION, - .i_target = NULL, - }; - RETURN_IF_ERROR(insert_instruction(entryblock, ncellsused, &make_cell)); - ncellsused += 1; - } - PyMem_RawFree(sorted); - } - - if (nfreevars) { - struct cfg_instr copy_frees = { - .i_opcode = COPY_FREE_VARS, - .i_oparg = nfreevars, - .i_loc = NO_LOCATION, - .i_target = NULL, - }; - RETURN_IF_ERROR(insert_instruction(entryblock, 0, ©_frees)); - } - - return SUCCESS; -} - -/* Make sure that all returns have a line number, even if early passes - * have failed to propagate a correct line number. - * The resulting line number may not be correct according to PEP 626, - * but should be "good enough", and no worse than in older versions. */ -static void -guarantee_lineno_for_exits(basicblock *entryblock, int firstlineno) { - int lineno = firstlineno; - assert(lineno > 0); - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - struct cfg_instr *last = basicblock_last_instr(b); - if (last == NULL) { - continue; - } - if (last->i_loc.lineno < 0) { - if (last->i_opcode == RETURN_VALUE) { - for (int i = 0; i < b->b_iused; i++) { - assert(b->b_instr[i].i_loc.lineno < 0); - - b->b_instr[i].i_loc.lineno = lineno; - } - } - } - else { - lineno = last->i_loc.lineno; - } - } -} - -static int -fix_cell_offsets(struct compiler_unit *u, basicblock *entryblock, int *fixedmap) -{ - int nlocals = (int)PyDict_GET_SIZE(u->u_varnames); - int ncellvars = (int)PyDict_GET_SIZE(u->u_cellvars); - int nfreevars = (int)PyDict_GET_SIZE(u->u_freevars); - int noffsets = ncellvars + nfreevars; - - // First deal with duplicates (arg cells). - int numdropped = 0; - for (int i = 0; i < noffsets ; i++) { - if (fixedmap[i] == i + nlocals) { - fixedmap[i] -= numdropped; - } - else { - // It was a duplicate (cell/arg). - numdropped += 1; - } - } - - // Then update offsets, either relative to locals or by cell2arg. - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - for (int i = 0; i < b->b_iused; i++) { - struct cfg_instr *inst = &b->b_instr[i]; - // This is called before extended args are generated. - assert(inst->i_opcode != EXTENDED_ARG); - int oldoffset = inst->i_oparg; - switch(inst->i_opcode) { - case MAKE_CELL: - case LOAD_CLOSURE: - case LOAD_DEREF: - case STORE_DEREF: - case DELETE_DEREF: - case LOAD_CLASSDEREF: - assert(oldoffset >= 0); - assert(oldoffset < noffsets); - assert(fixedmap[oldoffset] >= 0); - inst->i_oparg = fixedmap[oldoffset]; - } - } - } - - return numdropped; -} - - -#ifndef NDEBUG - -static bool -no_redundant_nops(cfg_builder *g) { - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - if (remove_redundant_nops(b) != 0) { - return false; - } - } - return true; -} - -static bool -no_redundant_jumps(cfg_builder *g) { - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - struct cfg_instr *last = basicblock_last_instr(b); - if (last != NULL) { - if (IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode)) { - assert(last->i_target != b->b_next); - if (last->i_target == b->b_next) { - return false; - } - } - } - } - return true; -} - -static bool -opcode_metadata_is_sane(cfg_builder *g) { - bool result = true; - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - for (int i = 0; i < b->b_iused; i++) { - struct cfg_instr *instr = &b->b_instr[i]; - int opcode = instr->i_opcode; - int oparg = instr->i_oparg; - assert(opcode <= MAX_REAL_OPCODE); - for (int jump = 0; jump <= 1; jump++) { - int popped = _PyOpcode_num_popped(opcode, oparg, jump ? true : false); - int pushed = _PyOpcode_num_pushed(opcode, oparg, jump ? true : false); - assert((pushed < 0) == (popped < 0)); - if (pushed >= 0) { - assert(_PyOpcode_opcode_metadata[opcode].valid_entry); - int effect = stack_effect(opcode, instr->i_oparg, jump); - if (effect != pushed - popped) { - fprintf(stderr, - "op=%d arg=%d jump=%d: stack_effect (%d) != pushed (%d) - popped (%d)\n", - opcode, oparg, jump, effect, pushed, popped); - result = false; - } - } - } - } - } - return result; -} - -static bool -no_empty_basic_blocks(cfg_builder *g) { - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - if (b->b_iused == 0) { - return false; - } - } - return true; -} -#endif - -static int -remove_redundant_jumps(cfg_builder *g) { - /* If a non-empty block ends with a jump instruction, check if the next - * non-empty block reached through normal flow control is the target - * of that jump. If it is, then the jump instruction is redundant and - * can be deleted. - */ - assert(no_empty_basic_blocks(g)); - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - struct cfg_instr *last = basicblock_last_instr(b); - assert(last != NULL); - assert(!IS_ASSEMBLER_OPCODE(last->i_opcode)); - if (IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode)) { - if (last->i_target == NULL) { - PyErr_SetString(PyExc_SystemError, "jump with NULL target"); - return ERROR; - } - if (last->i_target == b->b_next) { - assert(b->b_next->b_iused); - INSTR_SET_OP0(last, NOP); - } - } - } - return SUCCESS; -} - -static int -prepare_localsplus(struct compiler_unit* u, cfg_builder *g, int code_flags) -{ - assert(PyDict_GET_SIZE(u->u_varnames) < INT_MAX); - assert(PyDict_GET_SIZE(u->u_cellvars) < INT_MAX); - assert(PyDict_GET_SIZE(u->u_freevars) < INT_MAX); - int nlocals = (int)PyDict_GET_SIZE(u->u_varnames); - int ncellvars = (int)PyDict_GET_SIZE(u->u_cellvars); - int nfreevars = (int)PyDict_GET_SIZE(u->u_freevars); - assert(INT_MAX - nlocals - ncellvars > 0); - assert(INT_MAX - nlocals - ncellvars - nfreevars > 0); - int nlocalsplus = nlocals + ncellvars + nfreevars; - int* cellfixedoffsets = build_cellfixedoffsets(u); - if (cellfixedoffsets == NULL) { - return ERROR; - } - - - // This must be called before fix_cell_offsets(). - if (insert_prefix_instructions(u, g->g_entryblock, cellfixedoffsets, nfreevars, code_flags)) { - PyMem_Free(cellfixedoffsets); - return ERROR; - } - - int numdropped = fix_cell_offsets(u, g->g_entryblock, cellfixedoffsets); - PyMem_Free(cellfixedoffsets); // At this point we're done with it. - cellfixedoffsets = NULL; - if (numdropped < 0) { - return ERROR; - } - - nlocalsplus -= numdropped; - return nlocalsplus; -} - -static int -add_return_at_end(struct compiler *c, int addNone) -{ - /* Make sure every instruction stream that falls off the end returns None. - * This also ensures that no jump target offsets are out of bounds. - */ - if (addNone) { - ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); - } - ADDOP(c, NO_LOCATION, RETURN_VALUE); - return SUCCESS; -} - -static void propagate_line_numbers(basicblock *entryblock); - -static int -resolve_line_numbers(struct compiler_unit *u, cfg_builder *g) -{ - /* Set firstlineno if it wasn't explicitly set. */ - if (!u->u_firstlineno) { - if (g->g_entryblock->b_instr && g->g_entryblock->b_instr->i_loc.lineno) { - u->u_firstlineno = g->g_entryblock->b_instr->i_loc.lineno; - } - else { - u->u_firstlineno = 1; - } - } - RETURN_IF_ERROR(duplicate_exits_without_lineno(g)); - propagate_line_numbers(g->g_entryblock); - guarantee_lineno_for_exits(g->g_entryblock, u->u_firstlineno); - return SUCCESS; -} - -static int -optimize_code_unit(cfg_builder *g, PyObject *consts, PyObject *const_cache, - int code_flags, int nlocals, int nparams) -{ - assert(cfg_builder_check(g)); - /** Preprocessing **/ - /* Map labels to targets and mark exception handlers */ - RETURN_IF_ERROR(translate_jump_labels_to_targets(g->g_entryblock)); - RETURN_IF_ERROR(mark_except_handlers(g->g_entryblock)); - RETURN_IF_ERROR(label_exception_targets(g->g_entryblock)); - - /** Optimization **/ - RETURN_IF_ERROR(optimize_cfg(g, consts, const_cache)); - RETURN_IF_ERROR(remove_unused_consts(g->g_entryblock, consts)); - RETURN_IF_ERROR( - add_checks_for_loads_of_uninitialized_variables( - g->g_entryblock, nlocals, nparams)); - - RETURN_IF_ERROR(push_cold_blocks_to_end(g, code_flags)); - return SUCCESS; -} - -static PyCodeObject * -assemble_code_unit(struct compiler_unit *u, PyObject *const_cache, - int code_flags, PyObject *filename) -{ - PyCodeObject *co = NULL; - PyObject *consts = consts_dict_keys_inorder(u->u_consts); - if (consts == NULL) { - goto error; - } - cfg_builder g; - if (instr_sequence_to_cfg(&u->u_instr_sequence, &g) < 0) { - goto error; - } - int nparams = (int)PyList_GET_SIZE(u->u_ste->ste_varnames); - int nlocals = (int)PyDict_GET_SIZE(u->u_varnames); - if (optimize_code_unit(&g, consts, const_cache, code_flags, nlocals, nparams) < 0) { - goto error; - } - - /** Assembly **/ - - if (resolve_line_numbers(u, &g) < 0) { - goto error; - } - - int nlocalsplus = prepare_localsplus(u, &g, code_flags); - if (nlocalsplus < 0) { - goto error; - } - - int maxdepth = stackdepth(g.g_entryblock, code_flags); - if (maxdepth < 0) { - goto error; - } - /* TO DO -- For 3.12, make sure that `maxdepth <= MAX_ALLOWED_STACK_USE` */ - - convert_pseudo_ops(g.g_entryblock); - - /* Order of basic blocks must have been determined by now */ - if (normalize_jumps(&g) < 0) { - goto error; - } - assert(no_redundant_jumps(&g)); - assert(opcode_metadata_is_sane(&g)); - - /* Can't modify the bytecode after computing jump offsets. */ - assemble_jump_offsets(g.g_entryblock); - - struct assembler a; - int res = assemble_emit(&a, g.g_entryblock, u->u_firstlineno, const_cache); - if (res == SUCCESS) { - co = makecode(u, &a, const_cache, consts, maxdepth, nlocalsplus, - code_flags, filename); - } - assemble_free(&a); - - error: - Py_XDECREF(consts); - cfg_builder_fini(&g); - return co; -} - -static PyCodeObject * -assemble(struct compiler *c, int addNone) -{ - struct compiler_unit *u = c->u; - PyObject *const_cache = c->c_const_cache; - PyObject *filename = c->c_filename; - - int code_flags = compute_code_flags(c); - if (code_flags < 0) { - return NULL; - } - - if (add_return_at_end(c, addNone) < 0) { - return NULL; - } - - return assemble_code_unit(u, const_cache, code_flags, filename); -} - -static PyObject* -get_const_value(int opcode, int oparg, PyObject *co_consts) -{ - PyObject *constant = NULL; - assert(HAS_CONST(opcode)); - if (opcode == LOAD_CONST) { - constant = PyList_GET_ITEM(co_consts, oparg); - } - - if (constant == NULL) { - PyErr_SetString(PyExc_SystemError, - "Internal error: failed to get value of a constant"); - return NULL; - } - return Py_NewRef(constant); -} - -/* Replace LOAD_CONST c1, LOAD_CONST c2 ... LOAD_CONST cn, BUILD_TUPLE n - with LOAD_CONST (c1, c2, ... cn). - The consts table must still be in list form so that the - new constant (c1, c2, ... cn) can be appended. - Called with codestr pointing to the first LOAD_CONST. -*/ -static int -fold_tuple_on_constants(PyObject *const_cache, - struct cfg_instr *inst, - int n, PyObject *consts) -{ - /* Pre-conditions */ - assert(PyDict_CheckExact(const_cache)); - assert(PyList_CheckExact(consts)); - assert(inst[n].i_opcode == BUILD_TUPLE); - assert(inst[n].i_oparg == n); - - for (int i = 0; i < n; i++) { - if (!HAS_CONST(inst[i].i_opcode)) { - return SUCCESS; - } - } - - /* Buildup new tuple of constants */ - PyObject *newconst = PyTuple_New(n); - if (newconst == NULL) { - return ERROR; - } - for (int i = 0; i < n; i++) { - int op = inst[i].i_opcode; - int arg = inst[i].i_oparg; - PyObject *constant = get_const_value(op, arg, consts); - if (constant == NULL) { - return ERROR; - } - PyTuple_SET_ITEM(newconst, i, constant); - } - if (merge_const_one(const_cache, &newconst) < 0) { - Py_DECREF(newconst); - return ERROR; - } - - Py_ssize_t index; - for (index = 0; index < PyList_GET_SIZE(consts); index++) { - if (PyList_GET_ITEM(consts, index) == newconst) { - break; - } - } - if (index == PyList_GET_SIZE(consts)) { - if ((size_t)index >= (size_t)INT_MAX - 1) { - Py_DECREF(newconst); - PyErr_SetString(PyExc_OverflowError, "too many constants"); - return ERROR; - } - if (PyList_Append(consts, newconst)) { - Py_DECREF(newconst); + /* Set up cells for any variable that escapes, to be put in a closure. */ + const int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); + if (ncellvars) { + // umd->u_cellvars has the cells out of order so we sort them + // before adding the MAKE_CELL instructions. Note that we + // adjust for arg cells, which come first. + const int nvars = ncellvars + (int)PyDict_GET_SIZE(umd->u_varnames); + int *sorted = PyMem_RawCalloc(nvars, sizeof(int)); + if (sorted == NULL) { + PyErr_NoMemory(); return ERROR; } - } - Py_DECREF(newconst); - for (int i = 0; i < n; i++) { - INSTR_SET_OP0(&inst[i], NOP); - } - INSTR_SET_OP1(&inst[n], LOAD_CONST, (int)index); - return SUCCESS; -} - -#define VISITED (-1) - -// Replace an arbitrary run of SWAPs and NOPs with an optimal one that has the -// same effect. -static int -swaptimize(basicblock *block, int *ix) -{ - // NOTE: "./python -m test test_patma" serves as a good, quick stress test - // for this function. Make sure to blow away cached *.pyc files first! - assert(*ix < block->b_iused); - struct cfg_instr *instructions = &block->b_instr[*ix]; - // Find the length of the current sequence of SWAPs and NOPs, and record the - // maximum depth of the stack manipulations: - assert(instructions[0].i_opcode == SWAP); - int depth = instructions[0].i_oparg; - int len = 0; - int more = false; - int limit = block->b_iused - *ix; - while (++len < limit) { - int opcode = instructions[len].i_opcode; - if (opcode == SWAP) { - depth = Py_MAX(depth, instructions[len].i_oparg); - more = true; - } - else if (opcode != NOP) { - break; - } - } - // It's already optimal if there's only one SWAP: - if (!more) { - return SUCCESS; - } - // Create an array with elements {0, 1, 2, ..., depth - 1}: - int *stack = PyMem_Malloc(depth * sizeof(int)); - if (stack == NULL) { - PyErr_NoMemory(); - return ERROR; - } - for (int i = 0; i < depth; i++) { - stack[i] = i; - } - // Simulate the combined effect of these instructions by "running" them on - // our "stack": - for (int i = 0; i < len; i++) { - if (instructions[i].i_opcode == SWAP) { - int oparg = instructions[i].i_oparg; - int top = stack[0]; - // SWAPs are 1-indexed: - stack[0] = stack[oparg - 1]; - stack[oparg - 1] = top; - } - } - // Now we can begin! Our approach here is based on a solution to a closely - // related problem (https://cs.stackexchange.com/a/13938). It's easiest to - // think of this algorithm as determining the steps needed to efficiently - // "un-shuffle" our stack. By performing the moves in *reverse* order, - // though, we can efficiently *shuffle* it! For this reason, we will be - // replacing instructions starting from the *end* of the run. Since the - // solution is optimal, we don't need to worry about running out of space: - int current = len - 1; - for (int i = 0; i < depth; i++) { - // Skip items that have already been visited, or just happen to be in - // the correct location: - if (stack[i] == VISITED || stack[i] == i) { - continue; - } - // Okay, we've found an item that hasn't been visited. It forms a cycle - // with other items; traversing the cycle and swapping each item with - // the next will put them all in the correct place. The weird - // loop-and-a-half is necessary to insert 0 into every cycle, since we - // can only swap from that position: - int j = i; - while (true) { - // Skip the actual swap if our item is zero, since swapping the top - // item with itself is pointless: - if (j) { - assert(0 <= current); - // SWAPs are 1-indexed: - instructions[current].i_opcode = SWAP; - instructions[current--].i_oparg = j + 1; - } - if (stack[j] == VISITED) { - // Completed the cycle: - assert(j == i); - break; - } - int next_j = stack[j]; - stack[j] = VISITED; - j = next_j; - } - } - // NOP out any unused instructions: - while (0 <= current) { - INSTR_SET_OP0(&instructions[current--], NOP); - } - PyMem_Free(stack); - *ix += len - 1; - return SUCCESS; -} - -// This list is pretty small, since it's only okay to reorder opcodes that: -// - can't affect control flow (like jumping or raising exceptions) -// - can't invoke arbitrary code (besides finalizers) -// - only touch the TOS (and pop it when finished) -#define SWAPPABLE(opcode) \ - ((opcode) == STORE_FAST || \ - (opcode) == STORE_FAST_MAYBE_NULL || \ - (opcode) == POP_TOP) - -static int -next_swappable_instruction(basicblock *block, int i, int lineno) -{ - while (++i < block->b_iused) { - struct cfg_instr *instruction = &block->b_instr[i]; - if (0 <= lineno && instruction->i_loc.lineno != lineno) { - // Optimizing across this instruction could cause user-visible - // changes in the names bound between line tracing events! - return -1; - } - if (instruction->i_opcode == NOP) { - continue; - } - if (SWAPPABLE(instruction->i_opcode)) { - return i; - } - return -1; - } - return -1; -} - -// Attempt to apply SWAPs statically by swapping *instructions* rather than -// stack items. For example, we can replace SWAP(2), POP_TOP, STORE_FAST(42) -// with the more efficient NOP, STORE_FAST(42), POP_TOP. -static void -apply_static_swaps(basicblock *block, int i) -{ - // SWAPs are to our left, and potential swaperands are to our right: - for (; 0 <= i; i--) { - assert(i < block->b_iused); - struct cfg_instr *swap = &block->b_instr[i]; - if (swap->i_opcode != SWAP) { - if (swap->i_opcode == NOP || SWAPPABLE(swap->i_opcode)) { - // Nope, but we know how to handle these. Keep looking: - continue; - } - // We can't reason about what this instruction does. Bail: - return; - } - int j = next_swappable_instruction(block, i, -1); - if (j < 0) { - return; - } - int k = j; - int lineno = block->b_instr[j].i_loc.lineno; - for (int count = swap->i_oparg - 1; 0 < count; count--) { - k = next_swappable_instruction(block, k, lineno); - if (k < 0) { - return; - } - } - // Success! - INSTR_SET_OP0(swap, NOP); - struct cfg_instr temp = block->b_instr[j]; - block->b_instr[j] = block->b_instr[k]; - block->b_instr[k] = temp; - } -} - -// Attempt to eliminate jumps to jumps by updating inst to jump to -// target->i_target using the provided opcode. Return whether or not the -// optimization was successful. -static bool -jump_thread(struct cfg_instr *inst, struct cfg_instr *target, int opcode) -{ - assert(is_jump(inst)); - assert(is_jump(target)); - // bpo-45773: If inst->i_target == target->i_target, then nothing actually - // changes (and we fall into an infinite loop): - if ((inst->i_loc.lineno == target->i_loc.lineno || target->i_loc.lineno == -1) && - inst->i_target != target->i_target) - { - inst->i_target = target->i_target; - inst->i_opcode = opcode; - return true; - } - return false; -} - -/* Maximum size of basic block that should be copied in optimizer */ -#define MAX_COPY_SIZE 4 - -/* Optimization */ -static int -optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) -{ - assert(PyDict_CheckExact(const_cache)); - assert(PyList_CheckExact(consts)); - struct cfg_instr nop; - INSTR_SET_OP0(&nop, NOP); - struct cfg_instr *target; - for (int i = 0; i < bb->b_iused; i++) { - struct cfg_instr *inst = &bb->b_instr[i]; - int oparg = inst->i_oparg; - int nextop = i+1 < bb->b_iused ? bb->b_instr[i+1].i_opcode : 0; - if (HAS_TARGET(inst->i_opcode)) { - assert(inst->i_target->b_iused > 0); - target = &inst->i_target->b_instr[0]; - assert(!IS_ASSEMBLER_OPCODE(target->i_opcode)); - } - else { - target = &nop; - } - assert(!IS_ASSEMBLER_OPCODE(inst->i_opcode)); - switch (inst->i_opcode) { - /* Remove LOAD_CONST const; conditional jump */ - case LOAD_CONST: - { - PyObject* cnt; - int is_true; - int jump_if_true; - switch(nextop) { - case POP_JUMP_IF_FALSE: - case POP_JUMP_IF_TRUE: - cnt = get_const_value(inst->i_opcode, oparg, consts); - if (cnt == NULL) { - goto error; - } - is_true = PyObject_IsTrue(cnt); - Py_DECREF(cnt); - if (is_true == -1) { - goto error; - } - INSTR_SET_OP0(inst, NOP); - jump_if_true = nextop == POP_JUMP_IF_TRUE; - if (is_true == jump_if_true) { - bb->b_instr[i+1].i_opcode = JUMP; - } - else { - INSTR_SET_OP0(&bb->b_instr[i + 1], NOP); - } - break; - case JUMP_IF_FALSE_OR_POP: - case JUMP_IF_TRUE_OR_POP: - cnt = get_const_value(inst->i_opcode, oparg, consts); - if (cnt == NULL) { - goto error; - } - is_true = PyObject_IsTrue(cnt); - Py_DECREF(cnt); - if (is_true == -1) { - goto error; - } - jump_if_true = nextop == JUMP_IF_TRUE_OR_POP; - if (is_true == jump_if_true) { - bb->b_instr[i+1].i_opcode = JUMP; - } - else { - INSTR_SET_OP0(inst, NOP); - INSTR_SET_OP0(&bb->b_instr[i + 1], NOP); - } - break; - case IS_OP: - cnt = get_const_value(inst->i_opcode, oparg, consts); - if (cnt == NULL) { - goto error; - } - int jump_op = i+2 < bb->b_iused ? bb->b_instr[i+2].i_opcode : 0; - if (Py_IsNone(cnt) && (jump_op == POP_JUMP_IF_FALSE || jump_op == POP_JUMP_IF_TRUE)) { - unsigned char nextarg = bb->b_instr[i+1].i_oparg; - INSTR_SET_OP0(inst, NOP); - INSTR_SET_OP0(&bb->b_instr[i + 1], NOP); - bb->b_instr[i+2].i_opcode = nextarg ^ (jump_op == POP_JUMP_IF_FALSE) ? - POP_JUMP_IF_NOT_NONE : POP_JUMP_IF_NONE; - } - Py_DECREF(cnt); - break; - case RETURN_VALUE: - INSTR_SET_OP0(inst, NOP); - INSTR_SET_OP1(&bb->b_instr[++i], RETURN_CONST, oparg); - break; - } - break; - } - - /* Try to fold tuples of constants. - Skip over BUILD_TUPLE(1) UNPACK_SEQUENCE(1). - Replace BUILD_TUPLE(2) UNPACK_SEQUENCE(2) with SWAP(2). - Replace BUILD_TUPLE(3) UNPACK_SEQUENCE(3) with SWAP(3). */ - case BUILD_TUPLE: - if (nextop == UNPACK_SEQUENCE && oparg == bb->b_instr[i+1].i_oparg) { - switch(oparg) { - case 1: - INSTR_SET_OP0(inst, NOP); - INSTR_SET_OP0(&bb->b_instr[i + 1], NOP); - continue; - case 2: - case 3: - INSTR_SET_OP0(inst, NOP); - bb->b_instr[i+1].i_opcode = SWAP; - continue; - } - } - if (i >= oparg) { - if (fold_tuple_on_constants(const_cache, inst-oparg, oparg, consts)) { - goto error; - } - } - break; - - /* Simplify conditional jump to conditional jump where the - result of the first test implies the success of a similar - test or the failure of the opposite test. - Arises in code like: - "a and b or c" - "(a and b) and c" - "(a or b) or c" - "(a or b) and c" - x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_FALSE_OR_POP z - --> x:JUMP_IF_FALSE_OR_POP z - x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_TRUE_OR_POP z - --> x:POP_JUMP_IF_FALSE y+1 - where y+1 is the instruction following the second test. - */ - case JUMP_IF_FALSE_OR_POP: - switch (target->i_opcode) { - case POP_JUMP_IF_FALSE: - i -= jump_thread(inst, target, POP_JUMP_IF_FALSE); - break; - case JUMP: - case JUMP_IF_FALSE_OR_POP: - i -= jump_thread(inst, target, JUMP_IF_FALSE_OR_POP); - break; - case JUMP_IF_TRUE_OR_POP: - case POP_JUMP_IF_TRUE: - if (inst->i_loc.lineno == target->i_loc.lineno) { - // We don't need to bother checking for loops here, - // since a block's b_next cannot point to itself: - assert(inst->i_target != inst->i_target->b_next); - inst->i_opcode = POP_JUMP_IF_FALSE; - inst->i_target = inst->i_target->b_next; - --i; - } - break; - } - break; - case JUMP_IF_TRUE_OR_POP: - switch (target->i_opcode) { - case POP_JUMP_IF_TRUE: - i -= jump_thread(inst, target, POP_JUMP_IF_TRUE); - break; - case JUMP: - case JUMP_IF_TRUE_OR_POP: - i -= jump_thread(inst, target, JUMP_IF_TRUE_OR_POP); - break; - case JUMP_IF_FALSE_OR_POP: - case POP_JUMP_IF_FALSE: - if (inst->i_loc.lineno == target->i_loc.lineno) { - // We don't need to bother checking for loops here, - // since a block's b_next cannot point to itself: - assert(inst->i_target != inst->i_target->b_next); - inst->i_opcode = POP_JUMP_IF_TRUE; - inst->i_target = inst->i_target->b_next; - --i; - } - break; - } - break; - case POP_JUMP_IF_NOT_NONE: - case POP_JUMP_IF_NONE: - switch (target->i_opcode) { - case JUMP: - i -= jump_thread(inst, target, inst->i_opcode); - } - break; - case POP_JUMP_IF_FALSE: - switch (target->i_opcode) { - case JUMP: - i -= jump_thread(inst, target, POP_JUMP_IF_FALSE); - } - break; - case POP_JUMP_IF_TRUE: - switch (target->i_opcode) { - case JUMP: - i -= jump_thread(inst, target, POP_JUMP_IF_TRUE); - } - break; - case JUMP: - switch (target->i_opcode) { - case JUMP: - i -= jump_thread(inst, target, JUMP); - } - break; - case FOR_ITER: - if (target->i_opcode == JUMP) { - /* This will not work now because the jump (at target) could - * be forward or backward and FOR_ITER only jumps forward. We - * can re-enable this if ever we implement a backward version - * of FOR_ITER. - */ - /* - i -= jump_thread(inst, target, FOR_ITER); - */ - } - break; - case SWAP: - if (oparg == 1) { - INSTR_SET_OP0(inst, NOP); - break; - } - if (swaptimize(bb, &i) < 0) { - goto error; - } - apply_static_swaps(bb, i); - break; - case KW_NAMES: - break; - case PUSH_NULL: - if (nextop == LOAD_GLOBAL && (inst[1].i_opcode & 1) == 0) { - INSTR_SET_OP0(inst, NOP); - inst[1].i_oparg |= 1; - } - break; - default: - /* All HAS_CONST opcodes should be handled with LOAD_CONST */ - assert (!HAS_CONST(inst->i_opcode)); + for (int i = 0; i < ncellvars; i++) { + sorted[fixed[i]] = i + 1; } - } - return SUCCESS; -error: - return ERROR; -} - -/* If this block ends with an unconditional jump to a small exit block, then - * remove the jump and extend this block with the target. - * Returns 1 if extended, 0 if no change, and -1 on error. - */ -static int -inline_small_exit_blocks(basicblock *bb) { - struct cfg_instr *last = basicblock_last_instr(bb); - if (last == NULL) { - return 0; - } - if (!IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode)) { - return 0; - } - basicblock *target = last->i_target; - if (basicblock_exits_scope(target) && target->b_iused <= MAX_COPY_SIZE) { - INSTR_SET_OP0(last, NOP); - RETURN_IF_ERROR(basicblock_append_instructions(bb, target)); - return 1; - } - return 0; -} - -static int -remove_redundant_nops(basicblock *bb) { - /* Remove NOPs when legal to do so. */ - int dest = 0; - int prev_lineno = -1; - for (int src = 0; src < bb->b_iused; src++) { - int lineno = bb->b_instr[src].i_loc.lineno; - if (bb->b_instr[src].i_opcode == NOP) { - /* Eliminate no-op if it doesn't have a line number */ - if (lineno < 0) { - continue; - } - /* or, if the previous instruction had the same line number. */ - if (prev_lineno == lineno) { + for (int i = 0, ncellsused = 0; ncellsused < ncellvars; i++) { + int oldindex = sorted[i] - 1; + if (oldindex == -1) { continue; } - /* or, if the next instruction has same line number or no line number */ - if (src < bb->b_iused - 1) { - int next_lineno = bb->b_instr[src+1].i_loc.lineno; - if (next_lineno == lineno) { - continue; - } - if (next_lineno < 0) { - bb->b_instr[src+1].i_loc = bb->b_instr[src].i_loc; - continue; - } - } - else { - basicblock* next = bb->b_next; - while (next && next->b_iused == 0) { - next = next->b_next; - } - /* or if last instruction in BB and next BB has same line number */ - if (next) { - if (lineno == next->b_instr[0].i_loc.lineno) { - continue; - } - } - } - - } - if (dest != src) { - bb->b_instr[dest] = bb->b_instr[src]; - } - dest++; - prev_lineno = lineno; - } - assert(dest <= bb->b_iused); - int num_removed = bb->b_iused - dest; - bb->b_iused = dest; - return num_removed; -} - -static int -check_cfg(cfg_builder *g) { - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - /* Raise SystemError if jump or exit is not last instruction in the block. */ - for (int i = 0; i < b->b_iused; i++) { - int opcode = b->b_instr[i].i_opcode; - assert(!IS_ASSEMBLER_OPCODE(opcode)); - if (IS_TERMINATOR_OPCODE(opcode)) { - if (i != b->b_iused - 1) { - PyErr_SetString(PyExc_SystemError, "malformed control flow graph."); - return ERROR; - } - } - } - } - return SUCCESS; -} - -static int -mark_reachable(basicblock *entryblock) { - basicblock **stack = make_cfg_traversal_stack(entryblock); - if (stack == NULL) { - return ERROR; - } - basicblock **sp = stack; - entryblock->b_predecessors = 1; - *sp++ = entryblock; - while (sp > stack) { - basicblock *b = *(--sp); - b->b_visited = 1; - if (b->b_next && BB_HAS_FALLTHROUGH(b)) { - if (!b->b_next->b_visited) { - assert(b->b_next->b_predecessors == 0); - *sp++ = b->b_next; - } - b->b_next->b_predecessors++; - } - for (int i = 0; i < b->b_iused; i++) { - basicblock *target; - struct cfg_instr *instr = &b->b_instr[i]; - if (is_jump(instr) || is_block_push(instr)) { - target = instr->i_target; - if (!target->b_visited) { - assert(target->b_predecessors == 0 || target == b->b_next); - *sp++ = target; - } - target->b_predecessors++; + cfg_instr make_cell = { + .i_opcode = MAKE_CELL, + // This will get fixed in offset_derefs(). + .i_oparg = oldindex, + .i_loc = NO_LOCATION, + .i_target = NULL, + }; + if (_PyBasicblock_InsertInstruction(entryblock, ncellsused, &make_cell) < 0) { + PyMem_RawFree(sorted); + return ERROR; } + ncellsused += 1; } + PyMem_RawFree(sorted); + } + + if (nfreevars) { + cfg_instr copy_frees = { + .i_opcode = COPY_FREE_VARS, + .i_oparg = nfreevars, + .i_loc = NO_LOCATION, + .i_target = NULL, + }; + RETURN_IF_ERROR(_PyBasicblock_InsertInstruction(entryblock, 0, ©_frees)); } - PyMem_Free(stack); + return SUCCESS; } -static void -eliminate_empty_basic_blocks(cfg_builder *g) { - /* Eliminate empty blocks */ - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - basicblock *next = b->b_next; - while (next && next->b_iused == 0) { - next = next->b_next; +static int +fix_cell_offsets(_PyCompile_CodeUnitMetadata *umd, basicblock *entryblock, int *fixedmap) +{ + int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames); + int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); + int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars); + int noffsets = ncellvars + nfreevars; + + // First deal with duplicates (arg cells). + int numdropped = 0; + for (int i = 0; i < noffsets ; i++) { + if (fixedmap[i] == i + nlocals) { + fixedmap[i] -= numdropped; } - b->b_next = next; - } - while(g->g_entryblock && g->g_entryblock->b_iused == 0) { - g->g_entryblock = g->g_entryblock->b_next; - } - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - assert(b->b_iused > 0); - for (int i = 0; i < b->b_iused; i++) { - struct cfg_instr *instr = &b->b_instr[i]; - if (HAS_TARGET(instr->i_opcode)) { - basicblock *target = instr->i_target; - while (target->b_iused == 0) { - target = target->b_next; - } - instr->i_target = target; - assert(instr->i_target && instr->i_target->b_iused > 0); - } + else { + // It was a duplicate (cell/arg). + numdropped += 1; } } -} - -/* If an instruction has no line number, but it's predecessor in the BB does, - * then copy the line number. If a successor block has no line number, and only - * one predecessor, then inherit the line number. - * This ensures that all exit blocks (with one predecessor) receive a line number. - * Also reduces the size of the line number table, - * but has no impact on the generated line number events. - */ -static void -propagate_line_numbers(basicblock *entryblock) { + // Then update offsets, either relative to locals or by cell2arg. for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - struct cfg_instr *last = basicblock_last_instr(b); - if (last == NULL) { - continue; - } - - location prev_location = NO_LOCATION; for (int i = 0; i < b->b_iused; i++) { - if (b->b_instr[i].i_loc.lineno < 0) { - b->b_instr[i].i_loc = prev_location; - } - else { - prev_location = b->b_instr[i].i_loc; - } - } - if (BB_HAS_FALLTHROUGH(b) && b->b_next->b_predecessors == 1) { - assert(b->b_next->b_iused); - if (b->b_next->b_instr[0].i_loc.lineno < 0) { - b->b_next->b_instr[0].i_loc = prev_location; - } - } - if (is_jump(last)) { - basicblock *target = last->i_target; - if (target->b_predecessors == 1) { - if (target->b_instr[0].i_loc.lineno < 0) { - target->b_instr[0].i_loc = prev_location; - } + cfg_instr *inst = &b->b_instr[i]; + // This is called before extended args are generated. + assert(inst->i_opcode != EXTENDED_ARG); + int oldoffset = inst->i_oparg; + switch(inst->i_opcode) { + case MAKE_CELL: + case LOAD_CLOSURE: + case LOAD_DEREF: + case STORE_DEREF: + case DELETE_DEREF: + case LOAD_CLASSDEREF: + assert(oldoffset >= 0); + assert(oldoffset < noffsets); + assert(fixedmap[oldoffset] >= 0); + inst->i_oparg = fixedmap[oldoffset]; } } } + + return numdropped; } -/* Calculate the actual jump target from the target_label */ static int -translate_jump_labels_to_targets(basicblock *entryblock) +prepare_localsplus(_PyCompile_CodeUnitMetadata *umd, cfg_builder *g, int code_flags) { - int max_label = -1; - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - if (b->b_label.id > max_label) { - max_label = b->b_label.id; - } - } - size_t mapsize = sizeof(basicblock *) * (max_label + 1); - basicblock **label2block = (basicblock **)PyMem_Malloc(mapsize); - if (!label2block) { - PyErr_NoMemory(); + assert(PyDict_GET_SIZE(umd->u_varnames) < INT_MAX); + assert(PyDict_GET_SIZE(umd->u_cellvars) < INT_MAX); + assert(PyDict_GET_SIZE(umd->u_freevars) < INT_MAX); + int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames); + int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); + int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars); + assert(INT_MAX - nlocals - ncellvars > 0); + assert(INT_MAX - nlocals - ncellvars - nfreevars > 0); + int nlocalsplus = nlocals + ncellvars + nfreevars; + int* cellfixedoffsets = build_cellfixedoffsets(umd); + if (cellfixedoffsets == NULL) { return ERROR; } - memset(label2block, 0, mapsize); - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - if (b->b_label.id >= 0) { - label2block[b->b_label.id] = b; - } - } - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - for (int i = 0; i < b->b_iused; i++) { - struct cfg_instr *instr = &b->b_instr[i]; - assert(instr->i_target == NULL); - if (HAS_TARGET(instr->i_opcode)) { - int lbl = instr->i_oparg; - assert(lbl >= 0 && lbl <= max_label); - instr->i_target = label2block[lbl]; - assert(instr->i_target != NULL); - assert(instr->i_target->b_label.id == lbl); - } - } + + + // This must be called before fix_cell_offsets(). + if (insert_prefix_instructions(umd, g->g_entryblock, cellfixedoffsets, nfreevars, code_flags)) { + PyMem_Free(cellfixedoffsets); + return ERROR; } - PyMem_Free(label2block); - return SUCCESS; -} -/* Perform optimizations on a control flow graph. - The consts object should still be in list form to allow new constants - to be appended. + int numdropped = fix_cell_offsets(umd, g->g_entryblock, cellfixedoffsets); + PyMem_Free(cellfixedoffsets); // At this point we're done with it. + cellfixedoffsets = NULL; + if (numdropped < 0) { + return ERROR; + } - Code trasnformations that reduce code size initially fill the gaps with - NOPs. Later those NOPs are removed. -*/ + nlocalsplus -= numdropped; + return nlocalsplus; +} static int -optimize_cfg(cfg_builder *g, PyObject *consts, PyObject *const_cache) +add_return_at_end(struct compiler *c, int addNone) { - assert(PyDict_CheckExact(const_cache)); - RETURN_IF_ERROR(check_cfg(g)); - eliminate_empty_basic_blocks(g); - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - RETURN_IF_ERROR(inline_small_exit_blocks(b)); - } - assert(no_empty_basic_blocks(g)); - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - RETURN_IF_ERROR(optimize_basic_block(const_cache, b, consts)); - remove_redundant_nops(b); - assert(b->b_predecessors == 0); - } - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - RETURN_IF_ERROR(inline_small_exit_blocks(b)); - } - RETURN_IF_ERROR(mark_reachable(g->g_entryblock)); - - /* Delete unreachable instructions */ - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - if (b->b_predecessors == 0) { - b->b_iused = 0; - } - } - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - remove_redundant_nops(b); + /* Make sure every instruction stream that falls off the end returns None. + * This also ensures that no jump target offsets are out of bounds. + */ + if (addNone) { + ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); } - eliminate_empty_basic_blocks(g); - assert(no_redundant_nops(g)); - RETURN_IF_ERROR(remove_redundant_jumps(g)); + ADDOP(c, NO_LOCATION, RETURN_VALUE); return SUCCESS; } +static int cfg_to_instr_sequence(cfg_builder *g, instr_sequence *seq); -static int -remove_unused_consts(basicblock *entryblock, PyObject *consts) +static PyCodeObject * +optimize_and_assemble_code_unit(struct compiler_unit *u, PyObject *const_cache, + int code_flags, PyObject *filename) { - assert(PyList_CheckExact(consts)); - Py_ssize_t nconsts = PyList_GET_SIZE(consts); - if (nconsts == 0) { - return SUCCESS; /* nothing to do */ - } + instr_sequence optimized_instrs; + memset(&optimized_instrs, 0, sizeof(instr_sequence)); - Py_ssize_t *index_map = NULL; - Py_ssize_t *reverse_index_map = NULL; - int err = ERROR; - - index_map = PyMem_Malloc(nconsts * sizeof(Py_ssize_t)); - if (index_map == NULL) { - goto end; + PyCodeObject *co = NULL; + PyObject *consts = consts_dict_keys_inorder(u->u_metadata.u_consts); + if (consts == NULL) { + goto error; } - for (Py_ssize_t i = 1; i < nconsts; i++) { - index_map[i] = -1; + cfg_builder g; + if (instr_sequence_to_cfg(&u->u_instr_sequence, &g) < 0) { + goto error; } - // The first constant may be docstring; keep it always. - index_map[0] = 0; - - /* mark used consts */ - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - for (int i = 0; i < b->b_iused; i++) { - if (HAS_CONST(b->b_instr[i].i_opcode)) { - int index = b->b_instr[i].i_oparg; - index_map[index] = index; - } - } + int nparams = (int)PyList_GET_SIZE(u->u_ste->ste_varnames); + int nlocals = (int)PyDict_GET_SIZE(u->u_metadata.u_varnames); + assert(u->u_metadata.u_firstlineno); + if (_PyCfg_OptimizeCodeUnit(&g, consts, const_cache, code_flags, nlocals, + nparams, u->u_metadata.u_firstlineno) < 0) { + goto error; } - /* now index_map[i] == i if consts[i] is used, -1 otherwise */ - /* condense consts */ - Py_ssize_t n_used_consts = 0; - for (int i = 0; i < nconsts; i++) { - if (index_map[i] != -1) { - assert(index_map[i] == i); - index_map[n_used_consts++] = index_map[i]; - } - } - if (n_used_consts == nconsts) { - /* nothing to do */ - err = SUCCESS; - goto end; + /** Assembly **/ + int nlocalsplus = prepare_localsplus(&u->u_metadata, &g, code_flags); + if (nlocalsplus < 0) { + goto error; } - /* move all used consts to the beginning of the consts list */ - assert(n_used_consts < nconsts); - for (Py_ssize_t i = 0; i < n_used_consts; i++) { - Py_ssize_t old_index = index_map[i]; - assert(i <= old_index && old_index < nconsts); - if (i != old_index) { - PyObject *value = PyList_GET_ITEM(consts, index_map[i]); - assert(value != NULL); - PyList_SetItem(consts, i, Py_NewRef(value)); - } + int maxdepth = _PyCfg_Stackdepth(g.g_entryblock, code_flags); + if (maxdepth < 0) { + goto error; } - /* truncate the consts list at its new size */ - if (PyList_SetSlice(consts, n_used_consts, nconsts, NULL) < 0) { - goto end; - } + _PyCfg_ConvertPseudoOps(g.g_entryblock); - /* adjust const indices in the bytecode */ - reverse_index_map = PyMem_Malloc(nconsts * sizeof(Py_ssize_t)); - if (reverse_index_map == NULL) { - goto end; - } - for (Py_ssize_t i = 0; i < nconsts; i++) { - reverse_index_map[i] = -1; - } - for (Py_ssize_t i = 0; i < n_used_consts; i++) { - assert(index_map[i] != -1); - assert(reverse_index_map[index_map[i]] == -1); - reverse_index_map[index_map[i]] = i; + /* Order of basic blocks must have been determined by now */ + + if (_PyCfg_ResolveJumps(&g) < 0) { + goto error; } - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - for (int i = 0; i < b->b_iused; i++) { - if (HAS_CONST(b->b_instr[i].i_opcode)) { - int index = b->b_instr[i].i_oparg; - assert(reverse_index_map[index] >= 0); - assert(reverse_index_map[index] < n_used_consts); - b->b_instr[i].i_oparg = (int)reverse_index_map[index]; - } - } + /* Can't modify the bytecode after computing jump offsets. */ + + if (cfg_to_instr_sequence(&g, &optimized_instrs) < 0) { + goto error; } - err = SUCCESS; -end: - PyMem_Free(index_map); - PyMem_Free(reverse_index_map); - return err; + co = _PyAssemble_MakeCodeObject(&u->u_metadata, const_cache, consts, + maxdepth, &optimized_instrs, nlocalsplus, + code_flags, filename); + +error: + Py_XDECREF(consts); + instr_sequence_fini(&optimized_instrs); + _PyCfgBuilder_Fini(&g); + return co; } -static inline bool -is_exit_without_lineno(basicblock *b) { - if (!basicblock_exits_scope(b)) { - return false; +static PyCodeObject * +optimize_and_assemble(struct compiler *c, int addNone) +{ + struct compiler_unit *u = c->u; + PyObject *const_cache = c->c_const_cache; + PyObject *filename = c->c_filename; + + int code_flags = compute_code_flags(c); + if (code_flags < 0) { + return NULL; } - for (int i = 0; i < b->b_iused; i++) { - if (b->b_instr[i].i_loc.lineno >= 0) { - return false; - } + + if (add_return_at_end(c, addNone) < 0) { + return NULL; } - return true; + + return optimize_and_assemble_code_unit(u, const_cache, code_flags, filename); } -/* PEP 626 mandates that the f_lineno of a frame is correct - * after a frame terminates. It would be prohibitively expensive - * to continuously update the f_lineno field at runtime, - * so we make sure that all exiting instruction (raises and returns) - * have a valid line number, allowing us to compute f_lineno lazily. - * We can do this by duplicating the exit blocks without line number - * so that none have more than one predecessor. We can then safely - * copy the line number from the sole predecessor block. - */ static int -duplicate_exits_without_lineno(cfg_builder *g) +cfg_to_instr_sequence(cfg_builder *g, instr_sequence *seq) { - assert(no_empty_basic_blocks(g)); - /* Copy all exit blocks without line number that are targets of a jump. - */ - basicblock *entryblock = g->g_entryblock; - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - struct cfg_instr *last = basicblock_last_instr(b); - assert(last != NULL); - if (is_jump(last)) { - basicblock *target = last->i_target; - if (is_exit_without_lineno(target) && target->b_predecessors > 1) { - basicblock *new_target = copy_basicblock(g, target); - if (new_target == NULL) { - return ERROR; - } - new_target->b_instr[0].i_loc = last->i_loc; - last->i_target = new_target; - target->b_predecessors--; - new_target->b_predecessors = 1; - new_target->b_next = target->b_next; - target->b_next = new_target; - } - } + int lbl = 0; + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + b->b_label = (jump_target_label){lbl}; + lbl += b->b_iused; } + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + RETURN_IF_ERROR(instr_sequence_use_label(seq, b->b_label.id)); + for (int i = 0; i < b->b_iused; i++) { + cfg_instr *instr = &b->b_instr[i]; + RETURN_IF_ERROR( + instr_sequence_addop(seq, instr->i_opcode, instr->i_oparg, instr->i_loc)); - /* Any remaining reachable exit blocks without line number can only be reached by - * fall through, and thus can only have a single predecessor */ - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - if (BB_HAS_FALLTHROUGH(b) && b->b_next && b->b_iused > 0) { - if (is_exit_without_lineno(b->b_next)) { - struct cfg_instr *last = basicblock_last_instr(b); - assert(last != NULL); - b->b_next->b_instr[0].i_loc = last->i_loc; + _PyCompile_ExceptHandlerInfo *hi = &seq->s_instrs[seq->s_used-1].i_except_handler_info; + if (instr->i_except != NULL) { + hi->h_offset = instr->i_except->b_offset; + hi->h_startdepth = instr->i_except->b_startdepth; + hi->h_preserve_lasti = instr->i_except->b_preserve_lasti; + } + else { + hi->h_offset = -1; } } } @@ -10005,7 +7270,7 @@ duplicate_exits_without_lineno(cfg_builder *g) */ static int -instructions_to_cfg(PyObject *instructions, cfg_builder *g) +instructions_to_instr_sequence(PyObject *instructions, instr_sequence *seq) { assert(PyList_Check(instructions)); @@ -10039,8 +7304,9 @@ instructions_to_cfg(PyObject *instructions, cfg_builder *g) for (int i = 0; i < num_insts; i++) { if (is_target[i]) { - jump_target_label lbl = {i}; - RETURN_IF_ERROR(cfg_builder_use_label(g, lbl)); + if (instr_sequence_use_label(seq, i) < 0) { + goto error; + } } PyObject *item = PyList_GET_ITEM(instructions, i); if (!PyTuple_Check(item) || PyTuple_GET_SIZE(item) != 6) { @@ -10078,11 +7344,9 @@ instructions_to_cfg(PyObject *instructions, cfg_builder *g) if (PyErr_Occurred()) { goto error; } - RETURN_IF_ERROR(cfg_builder_addop(g, opcode, oparg, loc)); - } - struct cfg_instr *last = basicblock_last_instr(g->g_curblock); - if (last && !IS_TERMINATOR_OPCODE(last->i_opcode)) { - RETURN_IF_ERROR(cfg_builder_addop(g, RETURN_VALUE, 0, NO_LOCATION)); + if (instr_sequence_addop(seq, opcode, oparg, loc) < 0) { + goto error; + } } PyMem_Free(is_target); return SUCCESS; @@ -10091,8 +7355,28 @@ instructions_to_cfg(PyObject *instructions, cfg_builder *g) return ERROR; } +static int +instructions_to_cfg(PyObject *instructions, cfg_builder *g) +{ + instr_sequence seq; + memset(&seq, 0, sizeof(instr_sequence)); + + if (instructions_to_instr_sequence(instructions, &seq) < 0) { + goto error; + } + if (instr_sequence_to_cfg(&seq, g) < 0) { + goto error; + } + instr_sequence_fini(&seq); + return SUCCESS; +error: + instr_sequence_fini(&seq); + return ERROR; +} + static PyObject * -instr_sequence_to_instructions(instr_sequence *seq) { +instr_sequence_to_instructions(instr_sequence *seq) +{ PyObject *instructions = PyList_New(0); if (instructions == NULL) { return NULL; @@ -10137,7 +7421,7 @@ cfg_to_instructions(cfg_builder *g) } for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { for (int i = 0; i < b->b_iused; i++) { - struct cfg_instr *instr = &b->b_instr[i]; + cfg_instr *instr = &b->b_instr[i]; location loc = instr->i_loc; int arg = HAS_TARGET(instr->i_opcode) ? instr->i_target->b_label.id : instr->i_oparg; @@ -10215,27 +7499,85 @@ _PyCompile_OptimizeCfg(PyObject *instructions, PyObject *consts) } cfg_builder g; - memset(&g, 0, sizeof(cfg_builder)); - if (cfg_builder_init(&g) < 0) { - goto error; - } if (instructions_to_cfg(instructions, &g) < 0) { goto error; } - int code_flags = 0, nlocals = 0, nparams = 0; - if (optimize_code_unit(&g, consts, const_cache, code_flags, nlocals, nparams) < 0) { + int code_flags = 0, nlocals = 0, nparams = 0, firstlineno = 1; + if (_PyCfg_OptimizeCodeUnit(&g, consts, const_cache, code_flags, nlocals, + nparams, firstlineno) < 0) { goto error; } res = cfg_to_instructions(&g); error: Py_DECREF(const_cache); - cfg_builder_fini(&g); + _PyCfgBuilder_Fini(&g); return res; } +int _PyCfg_JumpLabelsToTargets(basicblock *entryblock); + +PyCodeObject * +_PyCompile_Assemble(_PyCompile_CodeUnitMetadata *umd, PyObject *filename, + PyObject *instructions) +{ + PyCodeObject *co = NULL; + instr_sequence optimized_instrs; + memset(&optimized_instrs, 0, sizeof(instr_sequence)); + + PyObject *const_cache = PyDict_New(); + if (const_cache == NULL) { + return NULL; + } + + cfg_builder g; + if (instructions_to_cfg(instructions, &g) < 0) { + goto error; + } + + if (_PyCfg_JumpLabelsToTargets(g.g_entryblock) < 0) { + goto error; + } + + int code_flags = 0; + int nlocalsplus = prepare_localsplus(umd, &g, code_flags); + if (nlocalsplus < 0) { + goto error; + } + + int maxdepth = _PyCfg_Stackdepth(g.g_entryblock, code_flags); + if (maxdepth < 0) { + goto error; + } + + _PyCfg_ConvertPseudoOps(g.g_entryblock); + + /* Order of basic blocks must have been determined by now */ + + if (_PyCfg_ResolveJumps(&g) < 0) { + goto error; + } + + /* Can't modify the bytecode after computing jump offsets. */ + + if (cfg_to_instr_sequence(&g, &optimized_instrs) < 0) { + goto error; + } + + PyObject *consts = umd->u_consts; + co = _PyAssemble_MakeCodeObject(umd, const_cache, + consts, maxdepth, &optimized_instrs, + nlocalsplus, code_flags, filename); + +error: + Py_DECREF(const_cache); + _PyCfgBuilder_Fini(&g); + instr_sequence_fini(&optimized_instrs); + return co; +} + /* Retained for API compatibility. - * Optimization is now done in optimize_cfg */ + * Optimization is now done in _PyCfg_OptimizeCodeUnit */ PyObject * PyCode_Optimize(PyObject *code, PyObject* Py_UNUSED(consts), diff --git a/Python/errors.c b/Python/errors.c index bdcbac317eb9ee..ce72049b92de27 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -6,7 +6,7 @@ #include "pycore_initconfig.h" // _PyStatus_ERR() #include "pycore_pyerrors.h" // _PyErr_Format() #include "pycore_pystate.h" // _PyThreadState_GET() -#include "pycore_structseq.h" // _PyStructSequence_FiniType() +#include "pycore_structseq.h" // _PyStructSequence_FiniBuiltin() #include "pycore_sysmodule.h" // _PySys_Audit() #include "pycore_traceback.h" // _PyTraceBack_FromFrame() @@ -666,17 +666,15 @@ _PyErr_ChainExceptions(PyObject *typ, PyObject *val, PyObject *tb) } if (_PyErr_Occurred(tstate)) { - PyObject *typ2, *val2, *tb2; - _PyErr_Fetch(tstate, &typ2, &val2, &tb2); _PyErr_NormalizeException(tstate, &typ, &val, &tb); if (tb != NULL) { PyException_SetTraceback(val, tb); Py_DECREF(tb); } Py_DECREF(typ); - _PyErr_NormalizeException(tstate, &typ2, &val2, &tb2); - PyException_SetContext(val2, val); - _PyErr_Restore(tstate, typ2, val2, tb2); + PyObject *exc2 = _PyErr_GetRaisedException(tstate); + PyException_SetContext(exc2, val); + _PyErr_SetRaisedException(tstate, exc2); } else { _PyErr_Restore(tstate, typ, val, tb); @@ -757,27 +755,15 @@ static PyObject * _PyErr_FormatVFromCause(PyThreadState *tstate, PyObject *exception, const char *format, va_list vargs) { - PyObject *exc, *val, *val2, *tb; - assert(_PyErr_Occurred(tstate)); - _PyErr_Fetch(tstate, &exc, &val, &tb); - _PyErr_NormalizeException(tstate, &exc, &val, &tb); - if (tb != NULL) { - PyException_SetTraceback(val, tb); - Py_DECREF(tb); - } - Py_DECREF(exc); + PyObject *exc = _PyErr_GetRaisedException(tstate); assert(!_PyErr_Occurred(tstate)); - _PyErr_FormatV(tstate, exception, format, vargs); - - _PyErr_Fetch(tstate, &exc, &val2, &tb); - _PyErr_NormalizeException(tstate, &exc, &val2, &tb); - PyException_SetCause(val2, Py_NewRef(val)); - PyException_SetContext(val2, Py_NewRef(val)); - Py_DECREF(val); - _PyErr_Restore(tstate, exc, val2, tb); - + PyObject *exc2 = _PyErr_GetRaisedException(tstate); + PyException_SetCause(exc2, Py_NewRef(exc)); + PyException_SetContext(exc2, Py_NewRef(exc)); + Py_DECREF(exc); + _PyErr_SetRaisedException(tstate, exc2); return NULL; } @@ -1214,6 +1200,33 @@ PyErr_Format(PyObject *exception, const char *format, ...) } +/* Adds a note to the current exception (if any) */ +void +_PyErr_FormatNote(const char *format, ...) +{ + PyObject *exc = PyErr_GetRaisedException(); + if (exc == NULL) { + return; + } + va_list vargs; + va_start(vargs, format); + PyObject *note = PyUnicode_FromFormatV(format, vargs); + va_end(vargs); + if (note == NULL) { + goto error; + } + int res = _PyException_AddNote(exc, note); + Py_DECREF(note); + if (res < 0) { + goto error; + } + PyErr_SetRaisedException(exc); + return; +error: + _PyErr_ChainExceptions1(exc); +} + + PyObject * PyErr_NewException(const char *name, PyObject *base, PyObject *dict) { @@ -1329,15 +1342,9 @@ static PyStructSequence_Desc UnraisableHookArgs_desc = { PyStatus _PyErr_InitTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - - if (UnraisableHookArgsType.tp_name == NULL) { - if (_PyStructSequence_InitBuiltin(&UnraisableHookArgsType, - &UnraisableHookArgs_desc) < 0) { - return _PyStatus_ERR("failed to initialize UnraisableHookArgs type"); - } + if (_PyStructSequence_InitBuiltin(&UnraisableHookArgsType, + &UnraisableHookArgs_desc) < 0) { + return _PyStatus_ERR("failed to initialize UnraisableHookArgs type"); } return _PyStatus_OK(); } @@ -1350,7 +1357,7 @@ _PyErr_FiniTypes(PyInterpreterState *interp) return; } - _PyStructSequence_FiniType(&UnraisableHookArgsType); + _PyStructSequence_FiniBuiltin(&UnraisableHookArgsType); } @@ -1698,19 +1705,18 @@ static void PyErr_SyntaxLocationObjectEx(PyObject *filename, int lineno, int col_offset, int end_lineno, int end_col_offset) { - PyObject *exc, *v, *tb, *tmp; PyThreadState *tstate = _PyThreadState_GET(); /* add attributes for the line number and filename for the error */ - _PyErr_Fetch(tstate, &exc, &v, &tb); - _PyErr_NormalizeException(tstate, &exc, &v, &tb); + PyObject *exc = _PyErr_GetRaisedException(tstate); /* XXX check that it is, indeed, a syntax error. It might not * be, though. */ - tmp = PyLong_FromLong(lineno); - if (tmp == NULL) + PyObject *tmp = PyLong_FromLong(lineno); + if (tmp == NULL) { _PyErr_Clear(tstate); + } else { - if (PyObject_SetAttr(v, &_Py_ID(lineno), tmp)) { + if (PyObject_SetAttr(exc, &_Py_ID(lineno), tmp)) { _PyErr_Clear(tstate); } Py_DECREF(tmp); @@ -1722,7 +1728,7 @@ PyErr_SyntaxLocationObjectEx(PyObject *filename, int lineno, int col_offset, _PyErr_Clear(tstate); } } - if (PyObject_SetAttr(v, &_Py_ID(offset), tmp ? tmp : Py_None)) { + if (PyObject_SetAttr(exc, &_Py_ID(offset), tmp ? tmp : Py_None)) { _PyErr_Clear(tstate); } Py_XDECREF(tmp); @@ -1734,7 +1740,7 @@ PyErr_SyntaxLocationObjectEx(PyObject *filename, int lineno, int col_offset, _PyErr_Clear(tstate); } } - if (PyObject_SetAttr(v, &_Py_ID(end_lineno), tmp ? tmp : Py_None)) { + if (PyObject_SetAttr(exc, &_Py_ID(end_lineno), tmp ? tmp : Py_None)) { _PyErr_Clear(tstate); } Py_XDECREF(tmp); @@ -1746,20 +1752,20 @@ PyErr_SyntaxLocationObjectEx(PyObject *filename, int lineno, int col_offset, _PyErr_Clear(tstate); } } - if (PyObject_SetAttr(v, &_Py_ID(end_offset), tmp ? tmp : Py_None)) { + if (PyObject_SetAttr(exc, &_Py_ID(end_offset), tmp ? tmp : Py_None)) { _PyErr_Clear(tstate); } Py_XDECREF(tmp); tmp = NULL; if (filename != NULL) { - if (PyObject_SetAttr(v, &_Py_ID(filename), filename)) { + if (PyObject_SetAttr(exc, &_Py_ID(filename), filename)) { _PyErr_Clear(tstate); } tmp = PyErr_ProgramTextObject(filename, lineno); if (tmp) { - if (PyObject_SetAttr(v, &_Py_ID(text), tmp)) { + if (PyObject_SetAttr(exc, &_Py_ID(text), tmp)) { _PyErr_Clear(tstate); } Py_DECREF(tmp); @@ -1768,17 +1774,17 @@ PyErr_SyntaxLocationObjectEx(PyObject *filename, int lineno, int col_offset, _PyErr_Clear(tstate); } } - if (exc != PyExc_SyntaxError) { - if (_PyObject_LookupAttr(v, &_Py_ID(msg), &tmp) < 0) { + if ((PyObject *)Py_TYPE(exc) != PyExc_SyntaxError) { + if (_PyObject_LookupAttr(exc, &_Py_ID(msg), &tmp) < 0) { _PyErr_Clear(tstate); } else if (tmp) { Py_DECREF(tmp); } else { - tmp = PyObject_Str(v); + tmp = PyObject_Str(exc); if (tmp) { - if (PyObject_SetAttr(v, &_Py_ID(msg), tmp)) { + if (PyObject_SetAttr(exc, &_Py_ID(msg), tmp)) { _PyErr_Clear(tstate); } Py_DECREF(tmp); @@ -1788,19 +1794,19 @@ PyErr_SyntaxLocationObjectEx(PyObject *filename, int lineno, int col_offset, } } - if (_PyObject_LookupAttr(v, &_Py_ID(print_file_and_line), &tmp) < 0) { + if (_PyObject_LookupAttr(exc, &_Py_ID(print_file_and_line), &tmp) < 0) { _PyErr_Clear(tstate); } else if (tmp) { Py_DECREF(tmp); } else { - if (PyObject_SetAttr(v, &_Py_ID(print_file_and_line), Py_None)) { + if (PyObject_SetAttr(exc, &_Py_ID(print_file_and_line), Py_None)) { _PyErr_Clear(tstate); } } } - _PyErr_Restore(tstate, exc, v, tb); + _PyErr_SetRaisedException(tstate, exc); } void diff --git a/Python/flowgraph.c b/Python/flowgraph.c new file mode 100644 index 00000000000000..fbaceef70edb3a --- /dev/null +++ b/Python/flowgraph.c @@ -0,0 +1,2164 @@ + +#include <stdbool.h> + +#include "Python.h" +#include "pycore_flowgraph.h" +#include "pycore_compile.h" +#include "pycore_pymem.h" // _PyMem_IsPtrFreed() + +#include "pycore_opcode_utils.h" +#define NEED_OPCODE_METADATA +#include "opcode_metadata.h" // _PyOpcode_opcode_metadata, _PyOpcode_num_popped/pushed +#undef NEED_OPCODE_METADATA + + +#undef SUCCESS +#undef ERROR +#define SUCCESS 0 +#define ERROR -1 + +#define RETURN_IF_ERROR(X) \ + if ((X) == -1) { \ + return ERROR; \ + } + +#define DEFAULT_BLOCK_SIZE 16 + +typedef _PyCompilerSrcLocation location; +typedef _PyCfgJumpTargetLabel jump_target_label; +typedef _PyCfgBasicblock basicblock; +typedef _PyCfgBuilder cfg_builder; +typedef _PyCfgInstruction cfg_instr; + +static const jump_target_label NO_LABEL = {-1}; + +#define SAME_LABEL(L1, L2) ((L1).id == (L2).id) +#define IS_LABEL(L) (!SAME_LABEL((L), (NO_LABEL))) + + +static inline int +is_block_push(cfg_instr *i) +{ + return IS_BLOCK_PUSH_OPCODE(i->i_opcode); +} + +static inline int +is_jump(cfg_instr *i) +{ + return IS_JUMP_OPCODE(i->i_opcode); +} + +/* One arg*/ +#define INSTR_SET_OP1(I, OP, ARG) \ + do { \ + assert(HAS_ARG(OP)); \ + _PyCfgInstruction *_instr__ptr_ = (I); \ + _instr__ptr_->i_opcode = (OP); \ + _instr__ptr_->i_oparg = (ARG); \ + } while (0); + +/* No args*/ +#define INSTR_SET_OP0(I, OP) \ + do { \ + assert(!HAS_ARG(OP)); \ + _PyCfgInstruction *_instr__ptr_ = (I); \ + _instr__ptr_->i_opcode = (OP); \ + _instr__ptr_->i_oparg = 0; \ + } while (0); + +/***** Blocks *****/ + +/* Returns the offset of the next instruction in the current block's + b_instr array. Resizes the b_instr as necessary. + Returns -1 on failure. +*/ +static int +basicblock_next_instr(basicblock *b) +{ + assert(b != NULL); + RETURN_IF_ERROR( + _PyCompile_EnsureArrayLargeEnough( + b->b_iused + 1, + (void**)&b->b_instr, + &b->b_ialloc, + DEFAULT_BLOCK_SIZE, + sizeof(cfg_instr))); + return b->b_iused++; +} + +/* Allocate a new block and return a pointer to it. + Returns NULL on error. +*/ + +static basicblock * +cfg_builder_new_block(cfg_builder *g) +{ + basicblock *b = (basicblock *)PyObject_Calloc(1, sizeof(basicblock)); + if (b == NULL) { + PyErr_NoMemory(); + return NULL; + } + /* Extend the singly linked list of blocks with new block. */ + b->b_list = g->g_block_list; + g->g_block_list = b; + b->b_label = NO_LABEL; + return b; +} + +static int +basicblock_addop(basicblock *b, int opcode, int oparg, location loc) +{ + assert(IS_WITHIN_OPCODE_RANGE(opcode)); + assert(!IS_ASSEMBLER_OPCODE(opcode)); + assert(HAS_ARG(opcode) || HAS_TARGET(opcode) || oparg == 0); + assert(0 <= oparg && oparg < (1 << 30)); + + int off = basicblock_next_instr(b); + if (off < 0) { + return ERROR; + } + cfg_instr *i = &b->b_instr[off]; + i->i_opcode = opcode; + i->i_oparg = oparg; + i->i_target = NULL; + i->i_loc = loc; + + return SUCCESS; +} + +static inline int +basicblock_append_instructions(basicblock *target, basicblock *source) +{ + for (int i = 0; i < source->b_iused; i++) { + int n = basicblock_next_instr(target); + if (n < 0) { + return ERROR; + } + target->b_instr[n] = source->b_instr[i]; + } + return SUCCESS; +} + +static basicblock * +copy_basicblock(cfg_builder *g, basicblock *block) +{ + /* Cannot copy a block if it has a fallthrough, since + * a block can only have one fallthrough predecessor. + */ + assert(BB_NO_FALLTHROUGH(block)); + basicblock *result = cfg_builder_new_block(g); + if (result == NULL) { + return NULL; + } + if (basicblock_append_instructions(result, block) < 0) { + return NULL; + } + return result; +} + +int +_PyBasicblock_InsertInstruction(basicblock *block, int pos, cfg_instr *instr) { + RETURN_IF_ERROR(basicblock_next_instr(block)); + for (int i = block->b_iused - 1; i > pos; i--) { + block->b_instr[i] = block->b_instr[i-1]; + } + block->b_instr[pos] = *instr; + return SUCCESS; +} + +static int +instr_size(cfg_instr *instruction) +{ + return _PyCompile_InstrSize(instruction->i_opcode, instruction->i_oparg); +} + +static int +blocksize(basicblock *b) +{ + int size = 0; + for (int i = 0; i < b->b_iused; i++) { + size += instr_size(&b->b_instr[i]); + } + return size; +} + +/* For debugging purposes only */ +#if 0 +static void +dump_instr(cfg_instr *i) +{ + const char *jump = is_jump(i) ? "jump " : ""; + + char arg[128]; + + *arg = '\0'; + if (HAS_ARG(i->i_opcode)) { + sprintf(arg, "arg: %d ", i->i_oparg); + } + if (HAS_TARGET(i->i_opcode)) { + sprintf(arg, "target: %p [%d] ", i->i_target, i->i_oparg); + } + fprintf(stderr, "line: %d, opcode: %d %s%s\n", + i->i_loc.lineno, i->i_opcode, arg, jump); +} + +static inline int +basicblock_returns(const basicblock *b) { + cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + return last && (last->i_opcode == RETURN_VALUE || last->i_opcode == RETURN_CONST); +} + +static void +dump_basicblock(const basicblock *b) +{ + const char *b_return = basicblock_returns(b) ? "return " : ""; + fprintf(stderr, "%d: [EH=%d CLD=%d WRM=%d NO_FT=%d %p] used: %d, depth: %d, offset: %d %s\n", + b->b_label.id, b->b_except_handler, b->b_cold, b->b_warm, BB_NO_FALLTHROUGH(b), b, b->b_iused, + b->b_startdepth, b->b_offset, b_return); + if (b->b_instr) { + int i; + for (i = 0; i < b->b_iused; i++) { + fprintf(stderr, " [%02d] ", i); + dump_instr(b->b_instr + i); + } + } +} + +void +_PyCfgBuilder_DumpGraph(const basicblock *entryblock) +{ + for (const basicblock *b = entryblock; b != NULL; b = b->b_next) { + dump_basicblock(b); + } +} + +#endif + + +/***** CFG construction and modification *****/ + +static basicblock * +cfg_builder_use_next_block(cfg_builder *g, basicblock *block) +{ + assert(block != NULL); + g->g_curblock->b_next = block; + g->g_curblock = block; + return block; +} + +cfg_instr * +_PyCfg_BasicblockLastInstr(const basicblock *b) { + assert(b->b_iused >= 0); + if (b->b_iused > 0) { + assert(b->b_instr != NULL); + return &b->b_instr[b->b_iused - 1]; + } + return NULL; +} + +static inline int +basicblock_exits_scope(const basicblock *b) { + cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + return last && IS_SCOPE_EXIT_OPCODE(last->i_opcode); +} + +static bool +cfg_builder_current_block_is_terminated(cfg_builder *g) +{ + cfg_instr *last = _PyCfg_BasicblockLastInstr(g->g_curblock); + if (last && IS_TERMINATOR_OPCODE(last->i_opcode)) { + return true; + } + if (IS_LABEL(g->g_current_label)) { + if (last || IS_LABEL(g->g_curblock->b_label)) { + return true; + } + else { + /* current block is empty, label it */ + g->g_curblock->b_label = g->g_current_label; + g->g_current_label = NO_LABEL; + } + } + return false; +} + +static int +cfg_builder_maybe_start_new_block(cfg_builder *g) +{ + if (cfg_builder_current_block_is_terminated(g)) { + basicblock *b = cfg_builder_new_block(g); + if (b == NULL) { + return ERROR; + } + b->b_label = g->g_current_label; + g->g_current_label = NO_LABEL; + cfg_builder_use_next_block(g, b); + } + return SUCCESS; +} + +#ifndef NDEBUG +static bool +cfg_builder_check(cfg_builder *g) +{ + assert(g->g_entryblock->b_iused > 0); + for (basicblock *block = g->g_block_list; block != NULL; block = block->b_list) { + assert(!_PyMem_IsPtrFreed(block)); + if (block->b_instr != NULL) { + assert(block->b_ialloc > 0); + assert(block->b_iused >= 0); + assert(block->b_ialloc >= block->b_iused); + } + else { + assert (block->b_iused == 0); + assert (block->b_ialloc == 0); + } + } + return true; +} +#endif + +int +_PyCfgBuilder_Init(cfg_builder *g) +{ + g->g_block_list = NULL; + basicblock *block = cfg_builder_new_block(g); + if (block == NULL) { + return ERROR; + } + g->g_curblock = g->g_entryblock = block; + g->g_current_label = NO_LABEL; + return SUCCESS; +} + +void +_PyCfgBuilder_Fini(cfg_builder* g) +{ + assert(cfg_builder_check(g)); + basicblock *b = g->g_block_list; + while (b != NULL) { + if (b->b_instr) { + PyObject_Free((void *)b->b_instr); + } + basicblock *next = b->b_list; + PyObject_Free((void *)b); + b = next; + } +} + +int +_PyCfgBuilder_UseLabel(cfg_builder *g, jump_target_label lbl) +{ + g->g_current_label = lbl; + return cfg_builder_maybe_start_new_block(g); +} + +int +_PyCfgBuilder_Addop(cfg_builder *g, int opcode, int oparg, location loc) +{ + RETURN_IF_ERROR(cfg_builder_maybe_start_new_block(g)); + return basicblock_addop(g->g_curblock, opcode, oparg, loc); +} + + +/***** debugging helpers *****/ + +#ifndef NDEBUG +static int remove_redundant_nops(basicblock *bb); + +static bool +no_redundant_nops(cfg_builder *g) { + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + if (remove_redundant_nops(b) != 0) { + return false; + } + } + return true; +} + +static bool +no_empty_basic_blocks(cfg_builder *g) { + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + if (b->b_iused == 0) { + return false; + } + } + return true; +} + +static bool +no_redundant_jumps(cfg_builder *g) { + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + if (last != NULL) { + if (IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode)) { + assert(last->i_target != b->b_next); + if (last->i_target == b->b_next) { + return false; + } + } + } + } + return true; +} + +#endif + +/***** CFG preprocessing (jump targets and exceptions) *****/ + +static int +normalize_jumps_in_block(cfg_builder *g, basicblock *b) { + cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + if (last == NULL || !is_jump(last)) { + return SUCCESS; + } + assert(!IS_ASSEMBLER_OPCODE(last->i_opcode)); + bool is_forward = last->i_target->b_visited == 0; + switch(last->i_opcode) { + case JUMP: + last->i_opcode = is_forward ? JUMP_FORWARD : JUMP_BACKWARD; + return SUCCESS; + case JUMP_NO_INTERRUPT: + last->i_opcode = is_forward ? + JUMP_FORWARD : JUMP_BACKWARD_NO_INTERRUPT; + return SUCCESS; + } + int reversed_opcode = 0; + switch(last->i_opcode) { + case POP_JUMP_IF_NOT_NONE: + reversed_opcode = POP_JUMP_IF_NONE; + break; + case POP_JUMP_IF_NONE: + reversed_opcode = POP_JUMP_IF_NOT_NONE; + break; + case POP_JUMP_IF_FALSE: + reversed_opcode = POP_JUMP_IF_TRUE; + break; + case POP_JUMP_IF_TRUE: + reversed_opcode = POP_JUMP_IF_FALSE; + break; + } + if (is_forward) { + return SUCCESS; + } + /* transform 'conditional jump T' to + * 'reversed_jump b_next' followed by 'jump_backwards T' + */ + + basicblock *target = last->i_target; + basicblock *backwards_jump = cfg_builder_new_block(g); + if (backwards_jump == NULL) { + return ERROR; + } + basicblock_addop(backwards_jump, JUMP, target->b_label.id, NO_LOCATION); + backwards_jump->b_instr[0].i_target = target; + last->i_opcode = reversed_opcode; + last->i_target = b->b_next; + + backwards_jump->b_cold = b->b_cold; + backwards_jump->b_next = b->b_next; + b->b_next = backwards_jump; + return SUCCESS; +} + + +static int +normalize_jumps(_PyCfgBuilder *g) +{ + basicblock *entryblock = g->g_entryblock; + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + b->b_visited = 0; + } + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + b->b_visited = 1; + RETURN_IF_ERROR(normalize_jumps_in_block(g, b)); + } + return SUCCESS; +} + +static void +resolve_jump_offsets(basicblock *entryblock) +{ + int bsize, totsize, extended_arg_recompile; + + /* Compute the size of each block and fixup jump args. + Replace block pointer with position in bytecode. */ + do { + totsize = 0; + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + bsize = blocksize(b); + b->b_offset = totsize; + totsize += bsize; + } + extended_arg_recompile = 0; + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + bsize = b->b_offset; + for (int i = 0; i < b->b_iused; i++) { + cfg_instr *instr = &b->b_instr[i]; + int isize = instr_size(instr); + /* jump offsets are computed relative to + * the instruction pointer after fetching + * the jump instruction. + */ + bsize += isize; + if (is_jump(instr)) { + instr->i_oparg = instr->i_target->b_offset; + if (instr->i_oparg < bsize) { + assert(IS_BACKWARDS_JUMP_OPCODE(instr->i_opcode)); + instr->i_oparg = bsize - instr->i_oparg; + } + else { + assert(!IS_BACKWARDS_JUMP_OPCODE(instr->i_opcode)); + instr->i_oparg -= bsize; + } + if (instr_size(instr) != isize) { + extended_arg_recompile = 1; + } + } + } + } + + /* XXX: This is an awful hack that could hurt performance, but + on the bright side it should work until we come up + with a better solution. + + The issue is that in the first loop blocksize() is called + which calls instr_size() which requires i_oparg be set + appropriately. There is a bootstrap problem because + i_oparg is calculated in the second loop above. + + So we loop until we stop seeing new EXTENDED_ARGs. + The only EXTENDED_ARGs that could be popping up are + ones in jump instructions. So this should converge + fairly quickly. + */ + } while (extended_arg_recompile); +} + +int +_PyCfg_ResolveJumps(_PyCfgBuilder *g) +{ + RETURN_IF_ERROR(normalize_jumps(g)); + assert(no_redundant_jumps(g)); + resolve_jump_offsets(g->g_entryblock); + return SUCCESS; +} + +static int +check_cfg(cfg_builder *g) { + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + /* Raise SystemError if jump or exit is not last instruction in the block. */ + for (int i = 0; i < b->b_iused; i++) { + int opcode = b->b_instr[i].i_opcode; + assert(!IS_ASSEMBLER_OPCODE(opcode)); + if (IS_TERMINATOR_OPCODE(opcode)) { + if (i != b->b_iused - 1) { + PyErr_SetString(PyExc_SystemError, "malformed control flow graph."); + return ERROR; + } + } + } + } + return SUCCESS; +} + +/* Calculate the actual jump target from the target_label */ +static int +translate_jump_labels_to_targets(basicblock *entryblock) +{ + int max_label = -1; + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + if (b->b_label.id > max_label) { + max_label = b->b_label.id; + } + } + size_t mapsize = sizeof(basicblock *) * (max_label + 1); + basicblock **label2block = (basicblock **)PyMem_Malloc(mapsize); + if (!label2block) { + PyErr_NoMemory(); + return ERROR; + } + memset(label2block, 0, mapsize); + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + if (b->b_label.id >= 0) { + label2block[b->b_label.id] = b; + } + } + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + for (int i = 0; i < b->b_iused; i++) { + cfg_instr *instr = &b->b_instr[i]; + assert(instr->i_target == NULL); + if (HAS_TARGET(instr->i_opcode)) { + int lbl = instr->i_oparg; + assert(lbl >= 0 && lbl <= max_label); + instr->i_target = label2block[lbl]; + assert(instr->i_target != NULL); + assert(instr->i_target->b_label.id == lbl); + } + } + } + PyMem_Free(label2block); + return SUCCESS; +} + +int +_PyCfg_JumpLabelsToTargets(basicblock *entryblock) +{ + return translate_jump_labels_to_targets(entryblock); +} + +static int +mark_except_handlers(basicblock *entryblock) { +#ifndef NDEBUG + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + assert(!b->b_except_handler); + } +#endif + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + for (int i=0; i < b->b_iused; i++) { + cfg_instr *instr = &b->b_instr[i]; + if (is_block_push(instr)) { + instr->i_target->b_except_handler = 1; + } + } + } + return SUCCESS; +} + + +typedef _PyCfgExceptStack ExceptStack; + +static basicblock * +push_except_block(ExceptStack *stack, cfg_instr *setup) { + assert(is_block_push(setup)); + int opcode = setup->i_opcode; + basicblock * target = setup->i_target; + if (opcode == SETUP_WITH || opcode == SETUP_CLEANUP) { + target->b_preserve_lasti = 1; + } + stack->handlers[++stack->depth] = target; + return target; +} + +static basicblock * +pop_except_block(ExceptStack *stack) { + assert(stack->depth > 0); + return stack->handlers[--stack->depth]; +} + +static basicblock * +except_stack_top(ExceptStack *stack) { + return stack->handlers[stack->depth]; +} + +static ExceptStack * +make_except_stack(void) { + ExceptStack *new = PyMem_Malloc(sizeof(ExceptStack)); + if (new == NULL) { + PyErr_NoMemory(); + return NULL; + } + new->depth = 0; + new->handlers[0] = NULL; + return new; +} + +static ExceptStack * +copy_except_stack(ExceptStack *stack) { + ExceptStack *copy = PyMem_Malloc(sizeof(ExceptStack)); + if (copy == NULL) { + PyErr_NoMemory(); + return NULL; + } + memcpy(copy, stack, sizeof(ExceptStack)); + return copy; +} + +static basicblock** +make_cfg_traversal_stack(basicblock *entryblock) { + int nblocks = 0; + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + b->b_visited = 0; + nblocks++; + } + basicblock **stack = (basicblock **)PyMem_Malloc(sizeof(basicblock *) * nblocks); + if (!stack) { + PyErr_NoMemory(); + } + return stack; +} + +Py_LOCAL_INLINE(void) +stackdepth_push(basicblock ***sp, basicblock *b, int depth) +{ + assert(b->b_startdepth < 0 || b->b_startdepth == depth); + if (b->b_startdepth < depth && b->b_startdepth < 100) { + assert(b->b_startdepth < 0); + b->b_startdepth = depth; + *(*sp)++ = b; + } +} + +/* Find the flow path that needs the largest stack. We assume that + * cycles in the flow graph have no net effect on the stack depth. + */ +int +_PyCfg_Stackdepth(basicblock *entryblock, int code_flags) +{ + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + b->b_startdepth = INT_MIN; + } + basicblock **stack = make_cfg_traversal_stack(entryblock); + if (!stack) { + return ERROR; + } + + int maxdepth = 0; + basicblock **sp = stack; + if (code_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR)) { + stackdepth_push(&sp, entryblock, 1); + } else { + stackdepth_push(&sp, entryblock, 0); + } + + while (sp != stack) { + basicblock *b = *--sp; + int depth = b->b_startdepth; + assert(depth >= 0); + basicblock *next = b->b_next; + for (int i = 0; i < b->b_iused; i++) { + cfg_instr *instr = &b->b_instr[i]; + int effect = PyCompile_OpcodeStackEffectWithJump(instr->i_opcode, instr->i_oparg, 0); + if (effect == PY_INVALID_STACK_EFFECT) { + PyErr_Format(PyExc_SystemError, + "compiler PyCompile_OpcodeStackEffectWithJump(opcode=%d, arg=%i) failed", + instr->i_opcode, instr->i_oparg); + return ERROR; + } + int new_depth = depth + effect; + assert(new_depth >= 0); /* invalid code or bug in stackdepth() */ + if (new_depth > maxdepth) { + maxdepth = new_depth; + } + if (HAS_TARGET(instr->i_opcode)) { + effect = PyCompile_OpcodeStackEffectWithJump(instr->i_opcode, instr->i_oparg, 1); + assert(effect != PY_INVALID_STACK_EFFECT); + int target_depth = depth + effect; + assert(target_depth >= 0); /* invalid code or bug in stackdepth() */ + if (target_depth > maxdepth) { + maxdepth = target_depth; + } + stackdepth_push(&sp, instr->i_target, target_depth); + } + depth = new_depth; + assert(!IS_ASSEMBLER_OPCODE(instr->i_opcode)); + if (IS_UNCONDITIONAL_JUMP_OPCODE(instr->i_opcode) || + IS_SCOPE_EXIT_OPCODE(instr->i_opcode)) + { + /* remaining code is dead */ + next = NULL; + break; + } + } + if (next != NULL) { + assert(BB_HAS_FALLTHROUGH(b)); + stackdepth_push(&sp, next, depth); + } + } + PyMem_Free(stack); + return maxdepth; +} + +static int +label_exception_targets(basicblock *entryblock) { + basicblock **todo_stack = make_cfg_traversal_stack(entryblock); + if (todo_stack == NULL) { + return ERROR; + } + ExceptStack *except_stack = make_except_stack(); + if (except_stack == NULL) { + PyMem_Free(todo_stack); + PyErr_NoMemory(); + return ERROR; + } + except_stack->depth = 0; + todo_stack[0] = entryblock; + entryblock->b_visited = 1; + entryblock->b_exceptstack = except_stack; + basicblock **todo = &todo_stack[1]; + basicblock *handler = NULL; + while (todo > todo_stack) { + todo--; + basicblock *b = todo[0]; + assert(b->b_visited == 1); + except_stack = b->b_exceptstack; + assert(except_stack != NULL); + b->b_exceptstack = NULL; + handler = except_stack_top(except_stack); + for (int i = 0; i < b->b_iused; i++) { + cfg_instr *instr = &b->b_instr[i]; + if (is_block_push(instr)) { + if (!instr->i_target->b_visited) { + ExceptStack *copy = copy_except_stack(except_stack); + if (copy == NULL) { + goto error; + } + instr->i_target->b_exceptstack = copy; + todo[0] = instr->i_target; + instr->i_target->b_visited = 1; + todo++; + } + handler = push_except_block(except_stack, instr); + } + else if (instr->i_opcode == POP_BLOCK) { + handler = pop_except_block(except_stack); + } + else if (is_jump(instr)) { + instr->i_except = handler; + assert(i == b->b_iused -1); + if (!instr->i_target->b_visited) { + if (BB_HAS_FALLTHROUGH(b)) { + ExceptStack *copy = copy_except_stack(except_stack); + if (copy == NULL) { + goto error; + } + instr->i_target->b_exceptstack = copy; + } + else { + instr->i_target->b_exceptstack = except_stack; + except_stack = NULL; + } + todo[0] = instr->i_target; + instr->i_target->b_visited = 1; + todo++; + } + } + else { + if (instr->i_opcode == YIELD_VALUE) { + instr->i_oparg = except_stack->depth; + } + instr->i_except = handler; + } + } + if (BB_HAS_FALLTHROUGH(b) && !b->b_next->b_visited) { + assert(except_stack != NULL); + b->b_next->b_exceptstack = except_stack; + todo[0] = b->b_next; + b->b_next->b_visited = 1; + todo++; + } + else if (except_stack != NULL) { + PyMem_Free(except_stack); + } + } +#ifdef Py_DEBUG + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + assert(b->b_exceptstack == NULL); + } +#endif + PyMem_Free(todo_stack); + return SUCCESS; +error: + PyMem_Free(todo_stack); + PyMem_Free(except_stack); + return ERROR; +} + +/***** CFG optimizations *****/ + +static int +mark_reachable(basicblock *entryblock) { + basicblock **stack = make_cfg_traversal_stack(entryblock); + if (stack == NULL) { + return ERROR; + } + basicblock **sp = stack; + entryblock->b_predecessors = 1; + *sp++ = entryblock; + while (sp > stack) { + basicblock *b = *(--sp); + b->b_visited = 1; + if (b->b_next && BB_HAS_FALLTHROUGH(b)) { + if (!b->b_next->b_visited) { + assert(b->b_next->b_predecessors == 0); + *sp++ = b->b_next; + } + b->b_next->b_predecessors++; + } + for (int i = 0; i < b->b_iused; i++) { + basicblock *target; + cfg_instr *instr = &b->b_instr[i]; + if (is_jump(instr) || is_block_push(instr)) { + target = instr->i_target; + if (!target->b_visited) { + assert(target->b_predecessors == 0 || target == b->b_next); + *sp++ = target; + } + target->b_predecessors++; + } + } + } + PyMem_Free(stack); + return SUCCESS; +} + +static void +eliminate_empty_basic_blocks(cfg_builder *g) { + /* Eliminate empty blocks */ + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + basicblock *next = b->b_next; + while (next && next->b_iused == 0) { + next = next->b_next; + } + b->b_next = next; + } + while(g->g_entryblock && g->g_entryblock->b_iused == 0) { + g->g_entryblock = g->g_entryblock->b_next; + } + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + assert(b->b_iused > 0); + for (int i = 0; i < b->b_iused; i++) { + cfg_instr *instr = &b->b_instr[i]; + if (HAS_TARGET(instr->i_opcode)) { + basicblock *target = instr->i_target; + while (target->b_iused == 0) { + target = target->b_next; + } + instr->i_target = target; + assert(instr->i_target && instr->i_target->b_iused > 0); + } + } + } +} + +static int +remove_redundant_nops(basicblock *bb) { + /* Remove NOPs when legal to do so. */ + int dest = 0; + int prev_lineno = -1; + for (int src = 0; src < bb->b_iused; src++) { + int lineno = bb->b_instr[src].i_loc.lineno; + if (bb->b_instr[src].i_opcode == NOP) { + /* Eliminate no-op if it doesn't have a line number */ + if (lineno < 0) { + continue; + } + /* or, if the previous instruction had the same line number. */ + if (prev_lineno == lineno) { + continue; + } + /* or, if the next instruction has same line number or no line number */ + if (src < bb->b_iused - 1) { + int next_lineno = bb->b_instr[src+1].i_loc.lineno; + if (next_lineno == lineno) { + continue; + } + if (next_lineno < 0) { + bb->b_instr[src+1].i_loc = bb->b_instr[src].i_loc; + continue; + } + } + else { + basicblock* next = bb->b_next; + while (next && next->b_iused == 0) { + next = next->b_next; + } + /* or if last instruction in BB and next BB has same line number */ + if (next) { + if (lineno == next->b_instr[0].i_loc.lineno) { + continue; + } + } + } + + } + if (dest != src) { + bb->b_instr[dest] = bb->b_instr[src]; + } + dest++; + prev_lineno = lineno; + } + assert(dest <= bb->b_iused); + int num_removed = bb->b_iused - dest; + bb->b_iused = dest; + return num_removed; +} + +static int +remove_redundant_nops_and_pairs(basicblock *entryblock) +{ + bool done = false; + + while (! done) { + done = true; + cfg_instr *prev_instr = NULL; + cfg_instr *instr = NULL; + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + remove_redundant_nops(b); + if (IS_LABEL(b->b_label)) { + /* this block is a jump target, forget instr */ + instr = NULL; + } + for (int i = 0; i < b->b_iused; i++) { + prev_instr = instr; + instr = &b->b_instr[i]; + int prev_opcode = prev_instr ? prev_instr->i_opcode : 0; + int prev_oparg = prev_instr ? prev_instr->i_oparg : 0; + int opcode = instr->i_opcode; + bool is_redundant_pair = false; + if (opcode == POP_TOP) { + if (prev_opcode == LOAD_CONST) { + is_redundant_pair = true; + } + else if (prev_opcode == COPY && prev_oparg == 1) { + is_redundant_pair = true; + } + } + if (is_redundant_pair) { + INSTR_SET_OP0(prev_instr, NOP); + INSTR_SET_OP0(instr, NOP); + done = false; + } + } + if ((instr && is_jump(instr)) || !BB_HAS_FALLTHROUGH(b)) { + instr = NULL; + } + } + } + return SUCCESS; +} + +static int +remove_redundant_jumps(cfg_builder *g) { + /* If a non-empty block ends with a jump instruction, check if the next + * non-empty block reached through normal flow control is the target + * of that jump. If it is, then the jump instruction is redundant and + * can be deleted. + */ + assert(no_empty_basic_blocks(g)); + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + assert(last != NULL); + assert(!IS_ASSEMBLER_OPCODE(last->i_opcode)); + if (IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode)) { + if (last->i_target == NULL) { + PyErr_SetString(PyExc_SystemError, "jump with NULL target"); + return ERROR; + } + if (last->i_target == b->b_next) { + assert(b->b_next->b_iused); + INSTR_SET_OP0(last, NOP); + } + } + } + return SUCCESS; +} + +/* Maximum size of basic block that should be copied in optimizer */ +#define MAX_COPY_SIZE 4 + +/* If this block ends with an unconditional jump to a small exit block, then + * remove the jump and extend this block with the target. + * Returns 1 if extended, 0 if no change, and -1 on error. + */ +static int +inline_small_exit_blocks(basicblock *bb) { + cfg_instr *last = _PyCfg_BasicblockLastInstr(bb); + if (last == NULL) { + return 0; + } + if (!IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode)) { + return 0; + } + basicblock *target = last->i_target; + if (basicblock_exits_scope(target) && target->b_iused <= MAX_COPY_SIZE) { + INSTR_SET_OP0(last, NOP); + RETURN_IF_ERROR(basicblock_append_instructions(bb, target)); + return 1; + } + return 0; +} + +// Attempt to eliminate jumps to jumps by updating inst to jump to +// target->i_target using the provided opcode. Return whether or not the +// optimization was successful. +static bool +jump_thread(cfg_instr *inst, cfg_instr *target, int opcode) +{ + assert(is_jump(inst)); + assert(is_jump(target)); + // bpo-45773: If inst->i_target == target->i_target, then nothing actually + // changes (and we fall into an infinite loop): + if ((inst->i_loc.lineno == target->i_loc.lineno || target->i_loc.lineno == -1) && + inst->i_target != target->i_target) + { + inst->i_target = target->i_target; + inst->i_opcode = opcode; + return true; + } + return false; +} + +static PyObject* +get_const_value(int opcode, int oparg, PyObject *co_consts) +{ + PyObject *constant = NULL; + assert(HAS_CONST(opcode)); + if (opcode == LOAD_CONST) { + constant = PyList_GET_ITEM(co_consts, oparg); + } + + if (constant == NULL) { + PyErr_SetString(PyExc_SystemError, + "Internal error: failed to get value of a constant"); + return NULL; + } + return Py_NewRef(constant); +} + +/* Replace LOAD_CONST c1, LOAD_CONST c2 ... LOAD_CONST cn, BUILD_TUPLE n + with LOAD_CONST (c1, c2, ... cn). + The consts table must still be in list form so that the + new constant (c1, c2, ... cn) can be appended. + Called with codestr pointing to the first LOAD_CONST. +*/ +static int +fold_tuple_on_constants(PyObject *const_cache, + cfg_instr *inst, + int n, PyObject *consts) +{ + /* Pre-conditions */ + assert(PyDict_CheckExact(const_cache)); + assert(PyList_CheckExact(consts)); + assert(inst[n].i_opcode == BUILD_TUPLE); + assert(inst[n].i_oparg == n); + + for (int i = 0; i < n; i++) { + if (!HAS_CONST(inst[i].i_opcode)) { + return SUCCESS; + } + } + + /* Buildup new tuple of constants */ + PyObject *newconst = PyTuple_New(n); + if (newconst == NULL) { + return ERROR; + } + for (int i = 0; i < n; i++) { + int op = inst[i].i_opcode; + int arg = inst[i].i_oparg; + PyObject *constant = get_const_value(op, arg, consts); + if (constant == NULL) { + return ERROR; + } + PyTuple_SET_ITEM(newconst, i, constant); + } + if (_PyCompile_ConstCacheMergeOne(const_cache, &newconst) < 0) { + Py_DECREF(newconst); + return ERROR; + } + + Py_ssize_t index; + for (index = 0; index < PyList_GET_SIZE(consts); index++) { + if (PyList_GET_ITEM(consts, index) == newconst) { + break; + } + } + if (index == PyList_GET_SIZE(consts)) { + if ((size_t)index >= (size_t)INT_MAX - 1) { + Py_DECREF(newconst); + PyErr_SetString(PyExc_OverflowError, "too many constants"); + return ERROR; + } + if (PyList_Append(consts, newconst)) { + Py_DECREF(newconst); + return ERROR; + } + } + Py_DECREF(newconst); + for (int i = 0; i < n; i++) { + INSTR_SET_OP0(&inst[i], NOP); + } + INSTR_SET_OP1(&inst[n], LOAD_CONST, (int)index); + return SUCCESS; +} + +#define VISITED (-1) + +// Replace an arbitrary run of SWAPs and NOPs with an optimal one that has the +// same effect. +static int +swaptimize(basicblock *block, int *ix) +{ + // NOTE: "./python -m test test_patma" serves as a good, quick stress test + // for this function. Make sure to blow away cached *.pyc files first! + assert(*ix < block->b_iused); + cfg_instr *instructions = &block->b_instr[*ix]; + // Find the length of the current sequence of SWAPs and NOPs, and record the + // maximum depth of the stack manipulations: + assert(instructions[0].i_opcode == SWAP); + int depth = instructions[0].i_oparg; + int len = 0; + int more = false; + int limit = block->b_iused - *ix; + while (++len < limit) { + int opcode = instructions[len].i_opcode; + if (opcode == SWAP) { + depth = Py_MAX(depth, instructions[len].i_oparg); + more = true; + } + else if (opcode != NOP) { + break; + } + } + // It's already optimal if there's only one SWAP: + if (!more) { + return SUCCESS; + } + // Create an array with elements {0, 1, 2, ..., depth - 1}: + int *stack = PyMem_Malloc(depth * sizeof(int)); + if (stack == NULL) { + PyErr_NoMemory(); + return ERROR; + } + for (int i = 0; i < depth; i++) { + stack[i] = i; + } + // Simulate the combined effect of these instructions by "running" them on + // our "stack": + for (int i = 0; i < len; i++) { + if (instructions[i].i_opcode == SWAP) { + int oparg = instructions[i].i_oparg; + int top = stack[0]; + // SWAPs are 1-indexed: + stack[0] = stack[oparg - 1]; + stack[oparg - 1] = top; + } + } + // Now we can begin! Our approach here is based on a solution to a closely + // related problem (https://cs.stackexchange.com/a/13938). It's easiest to + // think of this algorithm as determining the steps needed to efficiently + // "un-shuffle" our stack. By performing the moves in *reverse* order, + // though, we can efficiently *shuffle* it! For this reason, we will be + // replacing instructions starting from the *end* of the run. Since the + // solution is optimal, we don't need to worry about running out of space: + int current = len - 1; + for (int i = 0; i < depth; i++) { + // Skip items that have already been visited, or just happen to be in + // the correct location: + if (stack[i] == VISITED || stack[i] == i) { + continue; + } + // Okay, we've found an item that hasn't been visited. It forms a cycle + // with other items; traversing the cycle and swapping each item with + // the next will put them all in the correct place. The weird + // loop-and-a-half is necessary to insert 0 into every cycle, since we + // can only swap from that position: + int j = i; + while (true) { + // Skip the actual swap if our item is zero, since swapping the top + // item with itself is pointless: + if (j) { + assert(0 <= current); + // SWAPs are 1-indexed: + instructions[current].i_opcode = SWAP; + instructions[current--].i_oparg = j + 1; + } + if (stack[j] == VISITED) { + // Completed the cycle: + assert(j == i); + break; + } + int next_j = stack[j]; + stack[j] = VISITED; + j = next_j; + } + } + // NOP out any unused instructions: + while (0 <= current) { + INSTR_SET_OP0(&instructions[current--], NOP); + } + PyMem_Free(stack); + *ix += len - 1; + return SUCCESS; +} + + +// This list is pretty small, since it's only okay to reorder opcodes that: +// - can't affect control flow (like jumping or raising exceptions) +// - can't invoke arbitrary code (besides finalizers) +// - only touch the TOS (and pop it when finished) +#define SWAPPABLE(opcode) \ + ((opcode) == STORE_FAST || \ + (opcode) == STORE_FAST_MAYBE_NULL || \ + (opcode) == POP_TOP) + +static int +next_swappable_instruction(basicblock *block, int i, int lineno) +{ + while (++i < block->b_iused) { + cfg_instr *instruction = &block->b_instr[i]; + if (0 <= lineno && instruction->i_loc.lineno != lineno) { + // Optimizing across this instruction could cause user-visible + // changes in the names bound between line tracing events! + return -1; + } + if (instruction->i_opcode == NOP) { + continue; + } + if (SWAPPABLE(instruction->i_opcode)) { + return i; + } + return -1; + } + return -1; +} + +// Attempt to apply SWAPs statically by swapping *instructions* rather than +// stack items. For example, we can replace SWAP(2), POP_TOP, STORE_FAST(42) +// with the more efficient NOP, STORE_FAST(42), POP_TOP. +static void +apply_static_swaps(basicblock *block, int i) +{ + // SWAPs are to our left, and potential swaperands are to our right: + for (; 0 <= i; i--) { + assert(i < block->b_iused); + cfg_instr *swap = &block->b_instr[i]; + if (swap->i_opcode != SWAP) { + if (swap->i_opcode == NOP || SWAPPABLE(swap->i_opcode)) { + // Nope, but we know how to handle these. Keep looking: + continue; + } + // We can't reason about what this instruction does. Bail: + return; + } + int j = next_swappable_instruction(block, i, -1); + if (j < 0) { + return; + } + int k = j; + int lineno = block->b_instr[j].i_loc.lineno; + for (int count = swap->i_oparg - 1; 0 < count; count--) { + k = next_swappable_instruction(block, k, lineno); + if (k < 0) { + return; + } + } + // Success! + INSTR_SET_OP0(swap, NOP); + cfg_instr temp = block->b_instr[j]; + block->b_instr[j] = block->b_instr[k]; + block->b_instr[k] = temp; + } +} + +static int +optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) +{ + assert(PyDict_CheckExact(const_cache)); + assert(PyList_CheckExact(consts)); + cfg_instr nop; + INSTR_SET_OP0(&nop, NOP); + cfg_instr *target = &nop; + int opcode = 0; + int oparg = 0; + int nextop = 0; + for (int i = 0; i < bb->b_iused; i++) { + cfg_instr *inst = &bb->b_instr[i]; + bool is_copy_of_load_const = (opcode == LOAD_CONST && + inst->i_opcode == COPY && + inst->i_oparg == 1); + if (! is_copy_of_load_const) { + opcode = inst->i_opcode; + oparg = inst->i_oparg; + if (HAS_TARGET(opcode)) { + assert(inst->i_target->b_iused > 0); + target = &inst->i_target->b_instr[0]; + assert(!IS_ASSEMBLER_OPCODE(target->i_opcode)); + } + else { + target = &nop; + } + } + nextop = i+1 < bb->b_iused ? bb->b_instr[i+1].i_opcode : 0; + assert(!IS_ASSEMBLER_OPCODE(opcode)); + switch (opcode) { + /* Remove LOAD_CONST const; conditional jump */ + case LOAD_CONST: + { + PyObject* cnt; + int is_true; + int jump_if_true; + switch(nextop) { + case POP_JUMP_IF_FALSE: + case POP_JUMP_IF_TRUE: + cnt = get_const_value(opcode, oparg, consts); + if (cnt == NULL) { + goto error; + } + is_true = PyObject_IsTrue(cnt); + Py_DECREF(cnt); + if (is_true == -1) { + goto error; + } + INSTR_SET_OP0(inst, NOP); + jump_if_true = nextop == POP_JUMP_IF_TRUE; + if (is_true == jump_if_true) { + bb->b_instr[i+1].i_opcode = JUMP; + } + else { + INSTR_SET_OP0(&bb->b_instr[i + 1], NOP); + } + break; + case IS_OP: + cnt = get_const_value(opcode, oparg, consts); + if (cnt == NULL) { + goto error; + } + int jump_op = i+2 < bb->b_iused ? bb->b_instr[i+2].i_opcode : 0; + if (Py_IsNone(cnt) && (jump_op == POP_JUMP_IF_FALSE || jump_op == POP_JUMP_IF_TRUE)) { + unsigned char nextarg = bb->b_instr[i+1].i_oparg; + INSTR_SET_OP0(inst, NOP); + INSTR_SET_OP0(&bb->b_instr[i + 1], NOP); + bb->b_instr[i+2].i_opcode = nextarg ^ (jump_op == POP_JUMP_IF_FALSE) ? + POP_JUMP_IF_NOT_NONE : POP_JUMP_IF_NONE; + } + Py_DECREF(cnt); + break; + case RETURN_VALUE: + INSTR_SET_OP0(inst, NOP); + INSTR_SET_OP1(&bb->b_instr[++i], RETURN_CONST, oparg); + break; + } + break; + } + /* Try to fold tuples of constants. + Skip over BUILD_TUPLE(1) UNPACK_SEQUENCE(1). + Replace BUILD_TUPLE(2) UNPACK_SEQUENCE(2) with SWAP(2). + Replace BUILD_TUPLE(3) UNPACK_SEQUENCE(3) with SWAP(3). */ + case BUILD_TUPLE: + if (nextop == UNPACK_SEQUENCE && oparg == bb->b_instr[i+1].i_oparg) { + switch(oparg) { + case 1: + INSTR_SET_OP0(inst, NOP); + INSTR_SET_OP0(&bb->b_instr[i + 1], NOP); + continue; + case 2: + case 3: + INSTR_SET_OP0(inst, NOP); + bb->b_instr[i+1].i_opcode = SWAP; + continue; + } + } + if (i >= oparg) { + if (fold_tuple_on_constants(const_cache, inst-oparg, oparg, consts)) { + goto error; + } + } + break; + case POP_JUMP_IF_NOT_NONE: + case POP_JUMP_IF_NONE: + switch (target->i_opcode) { + case JUMP: + i -= jump_thread(inst, target, inst->i_opcode); + } + break; + case POP_JUMP_IF_FALSE: + switch (target->i_opcode) { + case JUMP: + i -= jump_thread(inst, target, POP_JUMP_IF_FALSE); + } + break; + case POP_JUMP_IF_TRUE: + switch (target->i_opcode) { + case JUMP: + i -= jump_thread(inst, target, POP_JUMP_IF_TRUE); + } + break; + case JUMP: + switch (target->i_opcode) { + case JUMP: + i -= jump_thread(inst, target, JUMP); + } + break; + case FOR_ITER: + if (target->i_opcode == JUMP) { + /* This will not work now because the jump (at target) could + * be forward or backward and FOR_ITER only jumps forward. We + * can re-enable this if ever we implement a backward version + * of FOR_ITER. + */ + /* + i -= jump_thread(inst, target, FOR_ITER); + */ + } + break; + case SWAP: + if (oparg == 1) { + INSTR_SET_OP0(inst, NOP); + break; + } + if (swaptimize(bb, &i) < 0) { + goto error; + } + apply_static_swaps(bb, i); + break; + case KW_NAMES: + break; + case PUSH_NULL: + if (nextop == LOAD_GLOBAL && (inst[1].i_opcode & 1) == 0) { + INSTR_SET_OP0(inst, NOP); + inst[1].i_oparg |= 1; + } + break; + default: + /* All HAS_CONST opcodes should be handled with LOAD_CONST */ + assert (!HAS_CONST(inst->i_opcode)); + } + } + return SUCCESS; +error: + return ERROR; +} + + +/* Perform optimizations on a control flow graph. + The consts object should still be in list form to allow new constants + to be appended. + + Code trasnformations that reduce code size initially fill the gaps with + NOPs. Later those NOPs are removed. +*/ +static int +optimize_cfg(cfg_builder *g, PyObject *consts, PyObject *const_cache) +{ + assert(PyDict_CheckExact(const_cache)); + RETURN_IF_ERROR(check_cfg(g)); + eliminate_empty_basic_blocks(g); + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + RETURN_IF_ERROR(inline_small_exit_blocks(b)); + } + assert(no_empty_basic_blocks(g)); + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + RETURN_IF_ERROR(optimize_basic_block(const_cache, b, consts)); + assert(b->b_predecessors == 0); + } + RETURN_IF_ERROR(remove_redundant_nops_and_pairs(g->g_entryblock)); + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + RETURN_IF_ERROR(inline_small_exit_blocks(b)); + } + RETURN_IF_ERROR(mark_reachable(g->g_entryblock)); + + /* Delete unreachable instructions */ + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + if (b->b_predecessors == 0) { + b->b_iused = 0; + } + } + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + remove_redundant_nops(b); + } + eliminate_empty_basic_blocks(g); + assert(no_redundant_nops(g)); + RETURN_IF_ERROR(remove_redundant_jumps(g)); + return SUCCESS; +} + +// helper functions for add_checks_for_loads_of_unknown_variables +static inline void +maybe_push(basicblock *b, uint64_t unsafe_mask, basicblock ***sp) +{ + // Push b if the unsafe mask is giving us any new information. + // To avoid overflowing the stack, only allow each block once. + // Use b->b_visited=1 to mean that b is currently on the stack. + uint64_t both = b->b_unsafe_locals_mask | unsafe_mask; + if (b->b_unsafe_locals_mask != both) { + b->b_unsafe_locals_mask = both; + // More work left to do. + if (!b->b_visited) { + // not on the stack, so push it. + *(*sp)++ = b; + b->b_visited = 1; + } + } +} + +static void +scan_block_for_locals(basicblock *b, basicblock ***sp) +{ + // bit i is set if local i is potentially uninitialized + uint64_t unsafe_mask = b->b_unsafe_locals_mask; + for (int i = 0; i < b->b_iused; i++) { + cfg_instr *instr = &b->b_instr[i]; + assert(instr->i_opcode != EXTENDED_ARG); + assert(!IS_SUPERINSTRUCTION_OPCODE(instr->i_opcode)); + if (instr->i_except != NULL) { + maybe_push(instr->i_except, unsafe_mask, sp); + } + if (instr->i_oparg >= 64) { + continue; + } + assert(instr->i_oparg >= 0); + uint64_t bit = (uint64_t)1 << instr->i_oparg; + switch (instr->i_opcode) { + case DELETE_FAST: + case LOAD_FAST_AND_CLEAR: + case STORE_FAST_MAYBE_NULL: + unsafe_mask |= bit; + break; + case STORE_FAST: + unsafe_mask &= ~bit; + break; + case LOAD_FAST_CHECK: + // If this doesn't raise, then the local is defined. + unsafe_mask &= ~bit; + break; + case LOAD_FAST: + if (unsafe_mask & bit) { + instr->i_opcode = LOAD_FAST_CHECK; + } + unsafe_mask &= ~bit; + break; + } + } + if (b->b_next && BB_HAS_FALLTHROUGH(b)) { + maybe_push(b->b_next, unsafe_mask, sp); + } + cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + if (last && is_jump(last)) { + assert(last->i_target != NULL); + maybe_push(last->i_target, unsafe_mask, sp); + } +} + +static int +fast_scan_many_locals(basicblock *entryblock, int nlocals) +{ + assert(nlocals > 64); + Py_ssize_t *states = PyMem_Calloc(nlocals - 64, sizeof(Py_ssize_t)); + if (states == NULL) { + PyErr_NoMemory(); + return ERROR; + } + Py_ssize_t blocknum = 0; + // state[i - 64] == blocknum if local i is guaranteed to + // be initialized, i.e., if it has had a previous LOAD_FAST or + // STORE_FAST within that basicblock (not followed by DELETE_FAST). + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + blocknum++; + for (int i = 0; i < b->b_iused; i++) { + cfg_instr *instr = &b->b_instr[i]; + assert(instr->i_opcode != EXTENDED_ARG); + assert(!IS_SUPERINSTRUCTION_OPCODE(instr->i_opcode)); + int arg = instr->i_oparg; + if (arg < 64) { + continue; + } + assert(arg >= 0); + switch (instr->i_opcode) { + case DELETE_FAST: + case LOAD_FAST_AND_CLEAR: + case STORE_FAST_MAYBE_NULL: + states[arg - 64] = blocknum - 1; + break; + case STORE_FAST: + states[arg - 64] = blocknum; + break; + case LOAD_FAST: + if (states[arg - 64] != blocknum) { + instr->i_opcode = LOAD_FAST_CHECK; + } + states[arg - 64] = blocknum; + break; + Py_UNREACHABLE(); + } + } + } + PyMem_Free(states); + return SUCCESS; +} + +static int +remove_unused_consts(basicblock *entryblock, PyObject *consts) +{ + assert(PyList_CheckExact(consts)); + Py_ssize_t nconsts = PyList_GET_SIZE(consts); + if (nconsts == 0) { + return SUCCESS; /* nothing to do */ + } + + Py_ssize_t *index_map = NULL; + Py_ssize_t *reverse_index_map = NULL; + int err = ERROR; + + index_map = PyMem_Malloc(nconsts * sizeof(Py_ssize_t)); + if (index_map == NULL) { + goto end; + } + for (Py_ssize_t i = 1; i < nconsts; i++) { + index_map[i] = -1; + } + // The first constant may be docstring; keep it always. + index_map[0] = 0; + + /* mark used consts */ + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + for (int i = 0; i < b->b_iused; i++) { + if (HAS_CONST(b->b_instr[i].i_opcode)) { + int index = b->b_instr[i].i_oparg; + index_map[index] = index; + } + } + } + /* now index_map[i] == i if consts[i] is used, -1 otherwise */ + /* condense consts */ + Py_ssize_t n_used_consts = 0; + for (int i = 0; i < nconsts; i++) { + if (index_map[i] != -1) { + assert(index_map[i] == i); + index_map[n_used_consts++] = index_map[i]; + } + } + if (n_used_consts == nconsts) { + /* nothing to do */ + err = SUCCESS; + goto end; + } + + /* move all used consts to the beginning of the consts list */ + assert(n_used_consts < nconsts); + for (Py_ssize_t i = 0; i < n_used_consts; i++) { + Py_ssize_t old_index = index_map[i]; + assert(i <= old_index && old_index < nconsts); + if (i != old_index) { + PyObject *value = PyList_GET_ITEM(consts, index_map[i]); + assert(value != NULL); + PyList_SetItem(consts, i, Py_NewRef(value)); + } + } + + /* truncate the consts list at its new size */ + if (PyList_SetSlice(consts, n_used_consts, nconsts, NULL) < 0) { + goto end; + } + /* adjust const indices in the bytecode */ + reverse_index_map = PyMem_Malloc(nconsts * sizeof(Py_ssize_t)); + if (reverse_index_map == NULL) { + goto end; + } + for (Py_ssize_t i = 0; i < nconsts; i++) { + reverse_index_map[i] = -1; + } + for (Py_ssize_t i = 0; i < n_used_consts; i++) { + assert(index_map[i] != -1); + assert(reverse_index_map[index_map[i]] == -1); + reverse_index_map[index_map[i]] = i; + } + + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + for (int i = 0; i < b->b_iused; i++) { + if (HAS_CONST(b->b_instr[i].i_opcode)) { + int index = b->b_instr[i].i_oparg; + assert(reverse_index_map[index] >= 0); + assert(reverse_index_map[index] < n_used_consts); + b->b_instr[i].i_oparg = (int)reverse_index_map[index]; + } + } + } + + err = SUCCESS; +end: + PyMem_Free(index_map); + PyMem_Free(reverse_index_map); + return err; +} + + + +static int +add_checks_for_loads_of_uninitialized_variables(basicblock *entryblock, + int nlocals, + int nparams) +{ + if (nlocals == 0) { + return SUCCESS; + } + if (nlocals > 64) { + // To avoid O(nlocals**2) compilation, locals beyond the first + // 64 are only analyzed one basicblock at a time: initialization + // info is not passed between basicblocks. + if (fast_scan_many_locals(entryblock, nlocals) < 0) { + return ERROR; + } + nlocals = 64; + } + basicblock **stack = make_cfg_traversal_stack(entryblock); + if (stack == NULL) { + return ERROR; + } + basicblock **sp = stack; + + // First origin of being uninitialized: + // The non-parameter locals in the entry block. + uint64_t start_mask = 0; + for (int i = nparams; i < nlocals; i++) { + start_mask |= (uint64_t)1 << i; + } + maybe_push(entryblock, start_mask, &sp); + + // Second origin of being uninitialized: + // There could be DELETE_FAST somewhere, so + // be sure to scan each basicblock at least once. + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + scan_block_for_locals(b, &sp); + } + // Now propagate the uncertainty from the origins we found: Use + // LOAD_FAST_CHECK for any LOAD_FAST where the local could be undefined. + while (sp > stack) { + basicblock *b = *--sp; + // mark as no longer on stack + b->b_visited = 0; + scan_block_for_locals(b, &sp); + } + PyMem_Free(stack); + return SUCCESS; +} + + +static int +mark_warm(basicblock *entryblock) { + basicblock **stack = make_cfg_traversal_stack(entryblock); + if (stack == NULL) { + return ERROR; + } + basicblock **sp = stack; + + *sp++ = entryblock; + entryblock->b_visited = 1; + while (sp > stack) { + basicblock *b = *(--sp); + assert(!b->b_except_handler); + b->b_warm = 1; + basicblock *next = b->b_next; + if (next && BB_HAS_FALLTHROUGH(b) && !next->b_visited) { + *sp++ = next; + next->b_visited = 1; + } + for (int i=0; i < b->b_iused; i++) { + cfg_instr *instr = &b->b_instr[i]; + if (is_jump(instr) && !instr->i_target->b_visited) { + *sp++ = instr->i_target; + instr->i_target->b_visited = 1; + } + } + } + PyMem_Free(stack); + return SUCCESS; +} + +static int +mark_cold(basicblock *entryblock) { + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + assert(!b->b_cold && !b->b_warm); + } + if (mark_warm(entryblock) < 0) { + return ERROR; + } + + basicblock **stack = make_cfg_traversal_stack(entryblock); + if (stack == NULL) { + return ERROR; + } + + basicblock **sp = stack; + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + if (b->b_except_handler) { + assert(!b->b_warm); + *sp++ = b; + b->b_visited = 1; + } + } + + while (sp > stack) { + basicblock *b = *(--sp); + b->b_cold = 1; + basicblock *next = b->b_next; + if (next && BB_HAS_FALLTHROUGH(b)) { + if (!next->b_warm && !next->b_visited) { + *sp++ = next; + next->b_visited = 1; + } + } + for (int i = 0; i < b->b_iused; i++) { + cfg_instr *instr = &b->b_instr[i]; + if (is_jump(instr)) { + assert(i == b->b_iused - 1); + basicblock *target = b->b_instr[i].i_target; + if (!target->b_warm && !target->b_visited) { + *sp++ = target; + target->b_visited = 1; + } + } + } + } + PyMem_Free(stack); + return SUCCESS; +} + + +static int +push_cold_blocks_to_end(cfg_builder *g, int code_flags) { + basicblock *entryblock = g->g_entryblock; + if (entryblock->b_next == NULL) { + /* single basicblock, no need to reorder */ + return SUCCESS; + } + RETURN_IF_ERROR(mark_cold(entryblock)); + + /* If we have a cold block with fallthrough to a warm block, add */ + /* an explicit jump instead of fallthrough */ + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + if (b->b_cold && BB_HAS_FALLTHROUGH(b) && b->b_next && b->b_next->b_warm) { + basicblock *explicit_jump = cfg_builder_new_block(g); + if (explicit_jump == NULL) { + return ERROR; + } + basicblock_addop(explicit_jump, JUMP, b->b_next->b_label.id, NO_LOCATION); + explicit_jump->b_cold = 1; + explicit_jump->b_next = b->b_next; + b->b_next = explicit_jump; + + /* set target */ + cfg_instr *last = _PyCfg_BasicblockLastInstr(explicit_jump); + last->i_target = explicit_jump->b_next; + } + } + + assert(!entryblock->b_cold); /* First block can't be cold */ + basicblock *cold_blocks = NULL; + basicblock *cold_blocks_tail = NULL; + + basicblock *b = entryblock; + while(b->b_next) { + assert(!b->b_cold); + while (b->b_next && !b->b_next->b_cold) { + b = b->b_next; + } + if (b->b_next == NULL) { + /* no more cold blocks */ + break; + } + + /* b->b_next is the beginning of a cold streak */ + assert(!b->b_cold && b->b_next->b_cold); + + basicblock *b_end = b->b_next; + while (b_end->b_next && b_end->b_next->b_cold) { + b_end = b_end->b_next; + } + + /* b_end is the end of the cold streak */ + assert(b_end && b_end->b_cold); + assert(b_end->b_next == NULL || !b_end->b_next->b_cold); + + if (cold_blocks == NULL) { + cold_blocks = b->b_next; + } + else { + cold_blocks_tail->b_next = b->b_next; + } + cold_blocks_tail = b_end; + b->b_next = b_end->b_next; + b_end->b_next = NULL; + } + assert(b != NULL && b->b_next == NULL); + b->b_next = cold_blocks; + + if (cold_blocks != NULL) { + RETURN_IF_ERROR(remove_redundant_jumps(g)); + } + return SUCCESS; +} + +void +_PyCfg_ConvertPseudoOps(basicblock *entryblock) +{ + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + for (int i = 0; i < b->b_iused; i++) { + cfg_instr *instr = &b->b_instr[i]; + if (is_block_push(instr) || instr->i_opcode == POP_BLOCK) { + INSTR_SET_OP0(instr, NOP); + } else if (instr->i_opcode == STORE_FAST_MAYBE_NULL) { + instr->i_opcode = STORE_FAST; + } + } + } + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + remove_redundant_nops(b); + } +} + +static inline bool +is_exit_without_lineno(basicblock *b) { + if (!basicblock_exits_scope(b)) { + return false; + } + for (int i = 0; i < b->b_iused; i++) { + if (b->b_instr[i].i_loc.lineno >= 0) { + return false; + } + } + return true; +} + +/* PEP 626 mandates that the f_lineno of a frame is correct + * after a frame terminates. It would be prohibitively expensive + * to continuously update the f_lineno field at runtime, + * so we make sure that all exiting instruction (raises and returns) + * have a valid line number, allowing us to compute f_lineno lazily. + * We can do this by duplicating the exit blocks without line number + * so that none have more than one predecessor. We can then safely + * copy the line number from the sole predecessor block. + */ +static int +duplicate_exits_without_lineno(cfg_builder *g) +{ + assert(no_empty_basic_blocks(g)); + /* Copy all exit blocks without line number that are targets of a jump. + */ + basicblock *entryblock = g->g_entryblock; + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + assert(last != NULL); + if (is_jump(last)) { + basicblock *target = last->i_target; + if (is_exit_without_lineno(target) && target->b_predecessors > 1) { + basicblock *new_target = copy_basicblock(g, target); + if (new_target == NULL) { + return ERROR; + } + new_target->b_instr[0].i_loc = last->i_loc; + last->i_target = new_target; + target->b_predecessors--; + new_target->b_predecessors = 1; + new_target->b_next = target->b_next; + target->b_next = new_target; + } + } + } + + /* Any remaining reachable exit blocks without line number can only be reached by + * fall through, and thus can only have a single predecessor */ + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + if (BB_HAS_FALLTHROUGH(b) && b->b_next && b->b_iused > 0) { + if (is_exit_without_lineno(b->b_next)) { + cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + assert(last != NULL); + b->b_next->b_instr[0].i_loc = last->i_loc; + } + } + } + return SUCCESS; +} + + +/* If an instruction has no line number, but it's predecessor in the BB does, + * then copy the line number. If a successor block has no line number, and only + * one predecessor, then inherit the line number. + * This ensures that all exit blocks (with one predecessor) receive a line number. + * Also reduces the size of the line number table, + * but has no impact on the generated line number events. + */ +static void +propagate_line_numbers(basicblock *entryblock) { + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + if (last == NULL) { + continue; + } + + location prev_location = NO_LOCATION; + for (int i = 0; i < b->b_iused; i++) { + if (b->b_instr[i].i_loc.lineno < 0) { + b->b_instr[i].i_loc = prev_location; + } + else { + prev_location = b->b_instr[i].i_loc; + } + } + if (BB_HAS_FALLTHROUGH(b) && b->b_next->b_predecessors == 1) { + assert(b->b_next->b_iused); + if (b->b_next->b_instr[0].i_loc.lineno < 0) { + b->b_next->b_instr[0].i_loc = prev_location; + } + } + if (is_jump(last)) { + basicblock *target = last->i_target; + if (target->b_predecessors == 1) { + if (target->b_instr[0].i_loc.lineno < 0) { + target->b_instr[0].i_loc = prev_location; + } + } + } + } +} + +/* Make sure that all returns have a line number, even if early passes + * have failed to propagate a correct line number. + * The resulting line number may not be correct according to PEP 626, + * but should be "good enough", and no worse than in older versions. */ +static void +guarantee_lineno_for_exits(basicblock *entryblock, int firstlineno) { + int lineno = firstlineno; + assert(lineno > 0); + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + if (last == NULL) { + continue; + } + if (last->i_loc.lineno < 0) { + if (last->i_opcode == RETURN_VALUE) { + for (int i = 0; i < b->b_iused; i++) { + assert(b->b_instr[i].i_loc.lineno < 0); + + b->b_instr[i].i_loc.lineno = lineno; + } + } + } + else { + lineno = last->i_loc.lineno; + } + } +} + +static int +resolve_line_numbers(cfg_builder *g, int firstlineno) +{ + RETURN_IF_ERROR(duplicate_exits_without_lineno(g)); + propagate_line_numbers(g->g_entryblock); + guarantee_lineno_for_exits(g->g_entryblock, firstlineno); + return SUCCESS; +} + +int +_PyCfg_OptimizeCodeUnit(cfg_builder *g, PyObject *consts, PyObject *const_cache, + int code_flags, int nlocals, int nparams, int firstlineno) +{ + assert(cfg_builder_check(g)); + /** Preprocessing **/ + /* Map labels to targets and mark exception handlers */ + RETURN_IF_ERROR(translate_jump_labels_to_targets(g->g_entryblock)); + RETURN_IF_ERROR(mark_except_handlers(g->g_entryblock)); + RETURN_IF_ERROR(label_exception_targets(g->g_entryblock)); + + /** Optimization **/ + RETURN_IF_ERROR(optimize_cfg(g, consts, const_cache)); + RETURN_IF_ERROR(remove_unused_consts(g->g_entryblock, consts)); + RETURN_IF_ERROR( + add_checks_for_loads_of_uninitialized_variables( + g->g_entryblock, nlocals, nparams)); + + RETURN_IF_ERROR(push_cold_blocks_to_end(g, code_flags)); + RETURN_IF_ERROR(resolve_line_numbers(g, firstlineno)); + return SUCCESS; +} diff --git a/Python/frozen.c b/Python/frozen.c index 48b429519b6606..6b977710e6e342 100644 --- a/Python/frozen.c +++ b/Python/frozen.c @@ -41,6 +41,29 @@ #include <stdbool.h> /* Includes for frozen modules: */ +#include "frozen_modules/importlib._bootstrap.h" +#include "frozen_modules/importlib._bootstrap_external.h" +#include "frozen_modules/zipimport.h" +#include "frozen_modules/abc.h" +#include "frozen_modules/codecs.h" +#include "frozen_modules/io.h" +#include "frozen_modules/_collections_abc.h" +#include "frozen_modules/_sitebuiltins.h" +#include "frozen_modules/genericpath.h" +#include "frozen_modules/ntpath.h" +#include "frozen_modules/posixpath.h" +#include "frozen_modules/os.h" +#include "frozen_modules/site.h" +#include "frozen_modules/stat.h" +#include "frozen_modules/importlib.util.h" +#include "frozen_modules/importlib.machinery.h" +#include "frozen_modules/runpy.h" +#include "frozen_modules/__hello__.h" +#include "frozen_modules/__phello__.h" +#include "frozen_modules/__phello__.ham.h" +#include "frozen_modules/__phello__.ham.eggs.h" +#include "frozen_modules/__phello__.spam.h" +#include "frozen_modules/frozen_only.h" /* End includes */ #define GET_CODE(name) _Py_get_##name##_toplevel @@ -78,46 +101,46 @@ extern PyObject *_Py_get_frozen_only_toplevel(void); /* End extern declarations */ static const struct _frozen bootstrap_modules[] = { - {"_frozen_importlib", NULL, 0, false, GET_CODE(importlib__bootstrap)}, - {"_frozen_importlib_external", NULL, 0, false, GET_CODE(importlib__bootstrap_external)}, - {"zipimport", NULL, 0, false, GET_CODE(zipimport)}, + {"_frozen_importlib", _Py_M__importlib__bootstrap, (int)sizeof(_Py_M__importlib__bootstrap), false, GET_CODE(importlib__bootstrap)}, + {"_frozen_importlib_external", _Py_M__importlib__bootstrap_external, (int)sizeof(_Py_M__importlib__bootstrap_external), false, GET_CODE(importlib__bootstrap_external)}, + {"zipimport", _Py_M__zipimport, (int)sizeof(_Py_M__zipimport), false, GET_CODE(zipimport)}, {0, 0, 0} /* bootstrap sentinel */ }; static const struct _frozen stdlib_modules[] = { /* stdlib - startup, without site (python -S) */ - {"abc", NULL, 0, false, GET_CODE(abc)}, - {"codecs", NULL, 0, false, GET_CODE(codecs)}, - {"io", NULL, 0, false, GET_CODE(io)}, + {"abc", _Py_M__abc, (int)sizeof(_Py_M__abc), false, GET_CODE(abc)}, + {"codecs", _Py_M__codecs, (int)sizeof(_Py_M__codecs), false, GET_CODE(codecs)}, + {"io", _Py_M__io, (int)sizeof(_Py_M__io), false, GET_CODE(io)}, /* stdlib - startup, with site */ - {"_collections_abc", NULL, 0, false, GET_CODE(_collections_abc)}, - {"_sitebuiltins", NULL, 0, false, GET_CODE(_sitebuiltins)}, - {"genericpath", NULL, 0, false, GET_CODE(genericpath)}, - {"ntpath", NULL, 0, false, GET_CODE(ntpath)}, - {"posixpath", NULL, 0, false, GET_CODE(posixpath)}, - {"os.path", NULL, 0, false, GET_CODE(posixpath)}, - {"os", NULL, 0, false, GET_CODE(os)}, - {"site", NULL, 0, false, GET_CODE(site)}, - {"stat", NULL, 0, false, GET_CODE(stat)}, + {"_collections_abc", _Py_M___collections_abc, (int)sizeof(_Py_M___collections_abc), false, GET_CODE(_collections_abc)}, + {"_sitebuiltins", _Py_M___sitebuiltins, (int)sizeof(_Py_M___sitebuiltins), false, GET_CODE(_sitebuiltins)}, + {"genericpath", _Py_M__genericpath, (int)sizeof(_Py_M__genericpath), false, GET_CODE(genericpath)}, + {"ntpath", _Py_M__ntpath, (int)sizeof(_Py_M__ntpath), false, GET_CODE(ntpath)}, + {"posixpath", _Py_M__posixpath, (int)sizeof(_Py_M__posixpath), false, GET_CODE(posixpath)}, + {"os.path", _Py_M__posixpath, (int)sizeof(_Py_M__posixpath), false, GET_CODE(posixpath)}, + {"os", _Py_M__os, (int)sizeof(_Py_M__os), false, GET_CODE(os)}, + {"site", _Py_M__site, (int)sizeof(_Py_M__site), false, GET_CODE(site)}, + {"stat", _Py_M__stat, (int)sizeof(_Py_M__stat), false, GET_CODE(stat)}, /* runpy - run module with -m */ - {"importlib.util", NULL, 0, false, GET_CODE(importlib_util)}, - {"importlib.machinery", NULL, 0, false, GET_CODE(importlib_machinery)}, - {"runpy", NULL, 0, false, GET_CODE(runpy)}, + {"importlib.util", _Py_M__importlib_util, (int)sizeof(_Py_M__importlib_util), false, GET_CODE(importlib_util)}, + {"importlib.machinery", _Py_M__importlib_machinery, (int)sizeof(_Py_M__importlib_machinery), false, GET_CODE(importlib_machinery)}, + {"runpy", _Py_M__runpy, (int)sizeof(_Py_M__runpy), false, GET_CODE(runpy)}, {0, 0, 0} /* stdlib sentinel */ }; static const struct _frozen test_modules[] = { - {"__hello__", NULL, 0, false, GET_CODE(__hello__)}, - {"__hello_alias__", NULL, 0, false, GET_CODE(__hello__)}, - {"__phello_alias__", NULL, 0, true, GET_CODE(__hello__)}, - {"__phello_alias__.spam", NULL, 0, false, GET_CODE(__hello__)}, - {"__phello__", NULL, 0, true, GET_CODE(__phello__)}, - {"__phello__.__init__", NULL, 0, false, GET_CODE(__phello__)}, - {"__phello__.ham", NULL, 0, true, GET_CODE(__phello___ham)}, - {"__phello__.ham.__init__", NULL, 0, false, GET_CODE(__phello___ham)}, - {"__phello__.ham.eggs", NULL, 0, false, GET_CODE(__phello___ham_eggs)}, - {"__phello__.spam", NULL, 0, false, GET_CODE(__phello___spam)}, - {"__hello_only__", NULL, 0, false, GET_CODE(frozen_only)}, + {"__hello__", _Py_M____hello__, (int)sizeof(_Py_M____hello__), false, GET_CODE(__hello__)}, + {"__hello_alias__", _Py_M____hello__, (int)sizeof(_Py_M____hello__), false, GET_CODE(__hello__)}, + {"__phello_alias__", _Py_M____hello__, (int)sizeof(_Py_M____hello__), true, GET_CODE(__hello__)}, + {"__phello_alias__.spam", _Py_M____hello__, (int)sizeof(_Py_M____hello__), false, GET_CODE(__hello__)}, + {"__phello__", _Py_M____phello__, (int)sizeof(_Py_M____phello__), true, GET_CODE(__phello__)}, + {"__phello__.__init__", _Py_M____phello__, (int)sizeof(_Py_M____phello__), false, GET_CODE(__phello__)}, + {"__phello__.ham", _Py_M____phello___ham, (int)sizeof(_Py_M____phello___ham), true, GET_CODE(__phello___ham)}, + {"__phello__.ham.__init__", _Py_M____phello___ham, (int)sizeof(_Py_M____phello___ham), false, GET_CODE(__phello___ham)}, + {"__phello__.ham.eggs", _Py_M____phello___ham_eggs, (int)sizeof(_Py_M____phello___ham_eggs), false, GET_CODE(__phello___ham_eggs)}, + {"__phello__.spam", _Py_M____phello___spam, (int)sizeof(_Py_M____phello___spam), false, GET_CODE(__phello___spam)}, + {"__hello_only__", _Py_M__frozen_only, (int)sizeof(_Py_M__frozen_only), false, GET_CODE(frozen_only)}, {0, 0, 0} /* test sentinel */ }; const struct _frozen *_PyImport_FrozenBootstrap = bootstrap_modules; diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index b493f5858bc119..819c857c3c0119 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -8,24 +8,61 @@ } TARGET(RESUME) { - #line 89 "Python/bytecodes.c" + #line 137 "Python/bytecodes.c" assert(tstate->cframe == &cframe); assert(frame == cframe.current_frame); - if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) { + /* Possibly combine this with eval breaker */ + if (frame->f_code->_co_instrumentation_version != tstate->interp->monitoring_version) { + int err = _Py_Instrument(frame->f_code, tstate->interp); + if (err) goto error; + next_instr--; + } + else if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) { goto handle_eval_breaker; } - #line 18 "Python/generated_cases.c.h" + #line 24 "Python/generated_cases.c.h" + DISPATCH(); + } + + TARGET(INSTRUMENTED_RESUME) { + #line 151 "Python/bytecodes.c" + /* Possible performance enhancement: + * We need to check the eval breaker anyway, can we + * combine the instrument verison check and the eval breaker test? + */ + if (frame->f_code->_co_instrumentation_version != tstate->interp->monitoring_version) { + if (_Py_Instrument(frame->f_code, tstate->interp)) { + goto error; + } + next_instr--; + } + else { + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = _Py_call_instrumentation( + tstate, oparg > 0, frame, next_instr-1); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err) goto error; + if (frame->prev_instr != next_instr-1) { + /* Instrumentation has jumped */ + next_instr = frame->prev_instr; + DISPATCH(); + } + if (_Py_atomic_load_relaxed_int32(eval_breaker) && oparg < 2) { + goto handle_eval_breaker; + } + } + #line 55 "Python/generated_cases.c.h" DISPATCH(); } TARGET(LOAD_CLOSURE) { PyObject *value; - #line 97 "Python/bytecodes.c" + #line 179 "Python/bytecodes.c" /* We keep LOAD_CLOSURE so that the bytecode stays more readable. */ value = GETLOCAL(oparg); if (value == NULL) goto unbound_local_error; Py_INCREF(value); - #line 29 "Python/generated_cases.c.h" + #line 66 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = value; DISPATCH(); @@ -33,11 +70,11 @@ TARGET(LOAD_FAST_CHECK) { PyObject *value; - #line 104 "Python/bytecodes.c" + #line 186 "Python/bytecodes.c" value = GETLOCAL(oparg); if (value == NULL) goto unbound_local_error; Py_INCREF(value); - #line 41 "Python/generated_cases.c.h" + #line 78 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = value; DISPATCH(); @@ -45,11 +82,11 @@ TARGET(LOAD_FAST) { PyObject *value; - #line 110 "Python/bytecodes.c" + #line 192 "Python/bytecodes.c" value = GETLOCAL(oparg); assert(value != NULL); Py_INCREF(value); - #line 53 "Python/generated_cases.c.h" + #line 90 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = value; DISPATCH(); @@ -57,11 +94,11 @@ TARGET(LOAD_FAST_AND_CLEAR) { PyObject *value; - #line 116 "Python/bytecodes.c" + #line 198 "Python/bytecodes.c" value = GETLOCAL(oparg); // do not use SETLOCAL here, it decrefs the old value GETLOCAL(oparg) = NULL; - #line 65 "Python/generated_cases.c.h" + #line 102 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = value; DISPATCH(); @@ -70,10 +107,10 @@ TARGET(LOAD_CONST) { PREDICTED(LOAD_CONST); PyObject *value; - #line 122 "Python/bytecodes.c" + #line 204 "Python/bytecodes.c" value = GETITEM(frame->f_code->co_consts, oparg); Py_INCREF(value); - #line 77 "Python/generated_cases.c.h" + #line 114 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = value; DISPATCH(); @@ -81,9 +118,9 @@ TARGET(STORE_FAST) { PyObject *value = stack_pointer[-1]; - #line 127 "Python/bytecodes.c" + #line 209 "Python/bytecodes.c" SETLOCAL(oparg, value); - #line 87 "Python/generated_cases.c.h" + #line 124 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } @@ -93,21 +130,21 @@ PyObject *_tmp_2; { PyObject *value; - #line 110 "Python/bytecodes.c" + #line 192 "Python/bytecodes.c" value = GETLOCAL(oparg); assert(value != NULL); Py_INCREF(value); - #line 101 "Python/generated_cases.c.h" + #line 138 "Python/generated_cases.c.h" _tmp_2 = value; } oparg = (next_instr++)->op.arg; { PyObject *value; - #line 110 "Python/bytecodes.c" + #line 192 "Python/bytecodes.c" value = GETLOCAL(oparg); assert(value != NULL); Py_INCREF(value); - #line 111 "Python/generated_cases.c.h" + #line 148 "Python/generated_cases.c.h" _tmp_1 = value; } STACK_GROW(2); @@ -121,20 +158,20 @@ PyObject *_tmp_2; { PyObject *value; - #line 110 "Python/bytecodes.c" + #line 192 "Python/bytecodes.c" value = GETLOCAL(oparg); assert(value != NULL); Py_INCREF(value); - #line 129 "Python/generated_cases.c.h" + #line 166 "Python/generated_cases.c.h" _tmp_2 = value; } oparg = (next_instr++)->op.arg; { PyObject *value; - #line 122 "Python/bytecodes.c" + #line 204 "Python/bytecodes.c" value = GETITEM(frame->f_code->co_consts, oparg); Py_INCREF(value); - #line 138 "Python/generated_cases.c.h" + #line 175 "Python/generated_cases.c.h" _tmp_1 = value; } STACK_GROW(2); @@ -147,18 +184,18 @@ PyObject *_tmp_1 = stack_pointer[-1]; { PyObject *value = _tmp_1; - #line 127 "Python/bytecodes.c" + #line 209 "Python/bytecodes.c" SETLOCAL(oparg, value); - #line 153 "Python/generated_cases.c.h" + #line 190 "Python/generated_cases.c.h" } oparg = (next_instr++)->op.arg; { PyObject *value; - #line 110 "Python/bytecodes.c" + #line 192 "Python/bytecodes.c" value = GETLOCAL(oparg); assert(value != NULL); Py_INCREF(value); - #line 162 "Python/generated_cases.c.h" + #line 199 "Python/generated_cases.c.h" _tmp_1 = value; } stack_pointer[-1] = _tmp_1; @@ -170,16 +207,16 @@ PyObject *_tmp_2 = stack_pointer[-2]; { PyObject *value = _tmp_1; - #line 127 "Python/bytecodes.c" + #line 209 "Python/bytecodes.c" SETLOCAL(oparg, value); - #line 176 "Python/generated_cases.c.h" + #line 213 "Python/generated_cases.c.h" } oparg = (next_instr++)->op.arg; { PyObject *value = _tmp_2; - #line 127 "Python/bytecodes.c" + #line 209 "Python/bytecodes.c" SETLOCAL(oparg, value); - #line 183 "Python/generated_cases.c.h" + #line 220 "Python/generated_cases.c.h" } STACK_SHRINK(2); DISPATCH(); @@ -190,20 +227,20 @@ PyObject *_tmp_2; { PyObject *value; - #line 122 "Python/bytecodes.c" + #line 204 "Python/bytecodes.c" value = GETITEM(frame->f_code->co_consts, oparg); Py_INCREF(value); - #line 197 "Python/generated_cases.c.h" + #line 234 "Python/generated_cases.c.h" _tmp_2 = value; } oparg = (next_instr++)->op.arg; { PyObject *value; - #line 110 "Python/bytecodes.c" + #line 192 "Python/bytecodes.c" value = GETLOCAL(oparg); assert(value != NULL); Py_INCREF(value); - #line 207 "Python/generated_cases.c.h" + #line 244 "Python/generated_cases.c.h" _tmp_1 = value; } STACK_GROW(2); @@ -214,8 +251,8 @@ TARGET(POP_TOP) { PyObject *value = stack_pointer[-1]; - #line 137 "Python/bytecodes.c" - #line 219 "Python/generated_cases.c.h" + #line 219 "Python/bytecodes.c" + #line 256 "Python/generated_cases.c.h" Py_DECREF(value); STACK_SHRINK(1); DISPATCH(); @@ -223,9 +260,9 @@ TARGET(PUSH_NULL) { PyObject *res; - #line 141 "Python/bytecodes.c" + #line 223 "Python/bytecodes.c" res = NULL; - #line 229 "Python/generated_cases.c.h" + #line 266 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; DISPATCH(); @@ -236,30 +273,79 @@ PyObject *_tmp_2 = stack_pointer[-2]; { PyObject *value = _tmp_1; - #line 137 "Python/bytecodes.c" - #line 241 "Python/generated_cases.c.h" + #line 219 "Python/bytecodes.c" + #line 278 "Python/generated_cases.c.h" Py_DECREF(value); } { PyObject *value = _tmp_2; - #line 137 "Python/bytecodes.c" - #line 247 "Python/generated_cases.c.h" + #line 219 "Python/bytecodes.c" + #line 284 "Python/generated_cases.c.h" Py_DECREF(value); } STACK_SHRINK(2); DISPATCH(); } + TARGET(INSTRUMENTED_END_FOR) { + PyObject *value = stack_pointer[-1]; + PyObject *receiver = stack_pointer[-2]; + #line 229 "Python/bytecodes.c" + /* Need to create a fake StopIteration error here, + * to conform to PEP 380 */ + if (PyGen_Check(receiver)) { + PyErr_SetObject(PyExc_StopIteration, value); + if (monitor_stop_iteration(tstate, frame, next_instr-1)) { + goto error; + } + PyErr_SetRaisedException(NULL); + } + #line 304 "Python/generated_cases.c.h" + Py_DECREF(receiver); + Py_DECREF(value); + STACK_SHRINK(2); + DISPATCH(); + } + + TARGET(END_SEND) { + PyObject *value = stack_pointer[-1]; + PyObject *receiver = stack_pointer[-2]; + #line 242 "Python/bytecodes.c" + Py_DECREF(receiver); + #line 316 "Python/generated_cases.c.h" + STACK_SHRINK(1); + stack_pointer[-1] = value; + DISPATCH(); + } + + TARGET(INSTRUMENTED_END_SEND) { + PyObject *value = stack_pointer[-1]; + PyObject *receiver = stack_pointer[-2]; + #line 246 "Python/bytecodes.c" + if (PyGen_Check(receiver) || PyCoro_CheckExact(receiver)) { + PyErr_SetObject(PyExc_StopIteration, value); + if (monitor_stop_iteration(tstate, frame, next_instr-1)) { + goto error; + } + PyErr_SetRaisedException(NULL); + } + Py_DECREF(receiver); + #line 334 "Python/generated_cases.c.h" + STACK_SHRINK(1); + stack_pointer[-1] = value; + DISPATCH(); + } + TARGET(UNARY_NEGATIVE) { PyObject *value = stack_pointer[-1]; PyObject *res; - #line 147 "Python/bytecodes.c" + #line 257 "Python/bytecodes.c" res = PyNumber_Negative(value); - #line 259 "Python/generated_cases.c.h" + #line 345 "Python/generated_cases.c.h" Py_DECREF(value); - #line 149 "Python/bytecodes.c" + #line 259 "Python/bytecodes.c" if (res == NULL) goto pop_1_error; - #line 263 "Python/generated_cases.c.h" + #line 349 "Python/generated_cases.c.h" stack_pointer[-1] = res; DISPATCH(); } @@ -267,11 +353,11 @@ TARGET(UNARY_NOT) { PyObject *value = stack_pointer[-1]; PyObject *res; - #line 153 "Python/bytecodes.c" + #line 263 "Python/bytecodes.c" int err = PyObject_IsTrue(value); - #line 273 "Python/generated_cases.c.h" + #line 359 "Python/generated_cases.c.h" Py_DECREF(value); - #line 155 "Python/bytecodes.c" + #line 265 "Python/bytecodes.c" if (err < 0) goto pop_1_error; if (err == 0) { res = Py_True; @@ -280,7 +366,7 @@ res = Py_False; } Py_INCREF(res); - #line 284 "Python/generated_cases.c.h" + #line 370 "Python/generated_cases.c.h" stack_pointer[-1] = res; DISPATCH(); } @@ -288,13 +374,13 @@ TARGET(UNARY_INVERT) { PyObject *value = stack_pointer[-1]; PyObject *res; - #line 166 "Python/bytecodes.c" + #line 276 "Python/bytecodes.c" res = PyNumber_Invert(value); - #line 294 "Python/generated_cases.c.h" + #line 380 "Python/generated_cases.c.h" Py_DECREF(value); - #line 168 "Python/bytecodes.c" + #line 278 "Python/bytecodes.c" if (res == NULL) goto pop_1_error; - #line 298 "Python/generated_cases.c.h" + #line 384 "Python/generated_cases.c.h" stack_pointer[-1] = res; DISPATCH(); } @@ -303,8 +389,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *prod; - #line 185 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 295 "Python/bytecodes.c" DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); STAT_INC(BINARY_OP, hit); @@ -312,7 +397,7 @@ _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); if (prod == NULL) goto pop_2_error; - #line 316 "Python/generated_cases.c.h" + #line 401 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = prod; next_instr += 1; @@ -323,15 +408,14 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *prod; - #line 196 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 305 "Python/bytecodes.c" DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); STAT_INC(BINARY_OP, hit); double dprod = ((PyFloatObject *)left)->ob_fval * ((PyFloatObject *)right)->ob_fval; DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dprod, prod); - #line 335 "Python/generated_cases.c.h" + #line 419 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = prod; next_instr += 1; @@ -342,8 +426,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *sub; - #line 206 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 314 "Python/bytecodes.c" DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); STAT_INC(BINARY_OP, hit); @@ -351,7 +434,7 @@ _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); if (sub == NULL) goto pop_2_error; - #line 355 "Python/generated_cases.c.h" + #line 438 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = sub; next_instr += 1; @@ -362,14 +445,13 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *sub; - #line 217 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 324 "Python/bytecodes.c" DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); STAT_INC(BINARY_OP, hit); double dsub = ((PyFloatObject *)left)->ob_fval - ((PyFloatObject *)right)->ob_fval; DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dsub, sub); - #line 373 "Python/generated_cases.c.h" + #line 455 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = sub; next_instr += 1; @@ -380,8 +462,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *res; - #line 226 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 332 "Python/bytecodes.c" DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); STAT_INC(BINARY_OP, hit); @@ -389,7 +470,7 @@ _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); if (res == NULL) goto pop_2_error; - #line 393 "Python/generated_cases.c.h" + #line 474 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -399,8 +480,7 @@ TARGET(BINARY_OP_INPLACE_ADD_UNICODE) { PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; - #line 243 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 348 "Python/bytecodes.c" DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); _Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP]; @@ -427,7 +507,7 @@ if (*target_local == NULL) goto pop_2_error; // The STORE_FAST is already done. JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP + 1); - #line 431 "Python/generated_cases.c.h" + #line 511 "Python/generated_cases.c.h" STACK_SHRINK(2); DISPATCH(); } @@ -436,15 +516,14 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *sum; - #line 273 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 377 "Python/bytecodes.c" DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); STAT_INC(BINARY_OP, hit); double dsum = ((PyFloatObject *)left)->ob_fval + ((PyFloatObject *)right)->ob_fval; DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dsum, sum); - #line 448 "Python/generated_cases.c.h" + #line 527 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = sum; next_instr += 1; @@ -455,8 +534,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *sum; - #line 283 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 386 "Python/bytecodes.c" DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); STAT_INC(BINARY_OP, hit); @@ -464,7 +542,7 @@ _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); if (sum == NULL) goto pop_2_error; - #line 468 "Python/generated_cases.c.h" + #line 546 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = sum; next_instr += 1; @@ -473,15 +551,14 @@ TARGET(BINARY_SUBSCR) { PREDICTED(BINARY_SUBSCR); - static_assert(INLINE_CACHE_ENTRIES_BINARY_SUBSCR == 4, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_SUBSCR == 1, "incorrect cache size"); PyObject *sub = stack_pointer[-1]; PyObject *container = stack_pointer[-2]; PyObject *res; - #line 302 "Python/bytecodes.c" + #line 404 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); next_instr--; _Py_Specialize_BinarySubscr(container, sub, next_instr); DISPATCH_SAME_OPARG(); @@ -490,15 +567,15 @@ DECREMENT_ADAPTIVE_COUNTER(cache->counter); #endif /* ENABLE_SPECIALIZATION */ res = PyObject_GetItem(container, sub); - #line 494 "Python/generated_cases.c.h" + #line 571 "Python/generated_cases.c.h" Py_DECREF(container); Py_DECREF(sub); - #line 315 "Python/bytecodes.c" + #line 416 "Python/bytecodes.c" if (res == NULL) goto pop_2_error; - #line 499 "Python/generated_cases.c.h" + #line 576 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 1; DISPATCH(); } @@ -507,7 +584,7 @@ PyObject *start = stack_pointer[-2]; PyObject *container = stack_pointer[-3]; PyObject *res; - #line 319 "Python/bytecodes.c" + #line 420 "Python/bytecodes.c" PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); // Can't use ERROR_IF() here, because we haven't // DECREF'ed container yet, and we still own slice. @@ -520,7 +597,7 @@ } Py_DECREF(container); if (res == NULL) goto pop_3_error; - #line 524 "Python/generated_cases.c.h" + #line 601 "Python/generated_cases.c.h" STACK_SHRINK(2); stack_pointer[-1] = res; DISPATCH(); @@ -531,7 +608,7 @@ PyObject *start = stack_pointer[-2]; PyObject *container = stack_pointer[-3]; PyObject *v = stack_pointer[-4]; - #line 334 "Python/bytecodes.c" + #line 435 "Python/bytecodes.c" PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); int err; if (slice == NULL) { @@ -544,7 +621,7 @@ Py_DECREF(v); Py_DECREF(container); if (err) goto pop_4_error; - #line 548 "Python/generated_cases.c.h" + #line 625 "Python/generated_cases.c.h" STACK_SHRINK(4); DISPATCH(); } @@ -553,14 +630,12 @@ PyObject *sub = stack_pointer[-1]; PyObject *list = stack_pointer[-2]; PyObject *res; - #line 349 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 450 "Python/bytecodes.c" DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR); // Deopt unless 0 <= sub < PyList_Size(list) - DEOPT_IF(!_PyLong_IsPositiveSingleDigit(sub), BINARY_SUBSCR); - assert(((PyLongObject *)_PyLong_GetZero())->long_value.ob_digit[0] == 0); + DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub), BINARY_SUBSCR); Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; DEOPT_IF(index >= PyList_GET_SIZE(list), BINARY_SUBSCR); STAT_INC(BINARY_SUBSCR, hit); @@ -569,10 +644,10 @@ Py_INCREF(res); _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); Py_DECREF(list); - #line 573 "Python/generated_cases.c.h" + #line 648 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 1; DISPATCH(); } @@ -580,14 +655,12 @@ PyObject *sub = stack_pointer[-1]; PyObject *tuple = stack_pointer[-2]; PyObject *res; - #line 367 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 466 "Python/bytecodes.c" DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR); // Deopt unless 0 <= sub < PyTuple_Size(list) - DEOPT_IF(!_PyLong_IsPositiveSingleDigit(sub), BINARY_SUBSCR); - assert(((PyLongObject *)_PyLong_GetZero())->long_value.ob_digit[0] == 0); + DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub), BINARY_SUBSCR); Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; DEOPT_IF(index >= PyTuple_GET_SIZE(tuple), BINARY_SUBSCR); STAT_INC(BINARY_SUBSCR, hit); @@ -596,10 +669,10 @@ Py_INCREF(res); _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); Py_DECREF(tuple); - #line 600 "Python/generated_cases.c.h" + #line 673 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 1; DISPATCH(); } @@ -607,8 +680,7 @@ PyObject *sub = stack_pointer[-1]; PyObject *dict = stack_pointer[-2]; PyObject *res; - #line 385 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 482 "Python/bytecodes.c" DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR); STAT_INC(BINARY_SUBSCR, hit); res = PyDict_GetItemWithError(dict, sub); @@ -616,35 +688,35 @@ if (!_PyErr_Occurred(tstate)) { _PyErr_SetKeyError(sub); } - #line 620 "Python/generated_cases.c.h" + #line 692 "Python/generated_cases.c.h" Py_DECREF(dict); Py_DECREF(sub); - #line 394 "Python/bytecodes.c" + #line 490 "Python/bytecodes.c" if (true) goto pop_2_error; } Py_INCREF(res); // Do this before DECREF'ing dict, sub - #line 627 "Python/generated_cases.c.h" + #line 699 "Python/generated_cases.c.h" Py_DECREF(dict); Py_DECREF(sub); STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 1; DISPATCH(); } TARGET(BINARY_SUBSCR_GETITEM) { PyObject *sub = stack_pointer[-1]; PyObject *container = stack_pointer[-2]; - uint32_t type_version = read_u32(&next_instr[1].cache); - uint16_t func_version = read_u16(&next_instr[3].cache); - #line 401 "Python/bytecodes.c" + #line 497 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(container); - DEOPT_IF(tp->tp_version_tag != type_version, BINARY_SUBSCR); - assert(tp->tp_flags & Py_TPFLAGS_HEAPTYPE); - PyObject *cached = ((PyHeapTypeObject *)tp)->_spec_cache.getitem; + DEOPT_IF(!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE), BINARY_SUBSCR); + PyHeapTypeObject *ht = (PyHeapTypeObject *)tp; + PyObject *cached = ht->_spec_cache.getitem; + DEOPT_IF(cached == NULL, BINARY_SUBSCR); assert(PyFunction_Check(cached)); PyFunctionObject *getitem = (PyFunctionObject *)cached; - DEOPT_IF(getitem->func_version != func_version, BINARY_SUBSCR); + uint32_t cached_version = ht->_spec_cache.getitem_version; + DEOPT_IF(getitem->func_version != cached_version, BINARY_SUBSCR); PyCodeObject *code = (PyCodeObject *)getitem->func_code; assert(code->co_argcount == 2); DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), BINARY_SUBSCR); @@ -655,16 +727,17 @@ new_frame->localsplus[0] = container; new_frame->localsplus[1] = sub; JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); + frame->return_offset = 0; DISPATCH_INLINED(new_frame); - #line 660 "Python/generated_cases.c.h" + #line 733 "Python/generated_cases.c.h" } TARGET(LIST_APPEND) { PyObject *v = stack_pointer[-1]; PyObject *list = stack_pointer[-(2 + (oparg-1))]; - #line 422 "Python/bytecodes.c" + #line 521 "Python/bytecodes.c" if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0) goto pop_1_error; - #line 668 "Python/generated_cases.c.h" + #line 741 "Python/generated_cases.c.h" STACK_SHRINK(1); PREDICT(JUMP_BACKWARD); DISPATCH(); @@ -673,13 +746,13 @@ TARGET(SET_ADD) { PyObject *v = stack_pointer[-1]; PyObject *set = stack_pointer[-(2 + (oparg-1))]; - #line 427 "Python/bytecodes.c" + #line 526 "Python/bytecodes.c" int err = PySet_Add(set, v); - #line 679 "Python/generated_cases.c.h" + #line 752 "Python/generated_cases.c.h" Py_DECREF(v); - #line 429 "Python/bytecodes.c" + #line 528 "Python/bytecodes.c" if (err) goto pop_1_error; - #line 683 "Python/generated_cases.c.h" + #line 756 "Python/generated_cases.c.h" STACK_SHRINK(1); PREDICT(JUMP_BACKWARD); DISPATCH(); @@ -692,10 +765,9 @@ PyObject *container = stack_pointer[-2]; PyObject *v = stack_pointer[-3]; uint16_t counter = read_u16(&next_instr[0].cache); - #line 440 "Python/bytecodes.c" + #line 539 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION if (ADAPTIVE_COUNTER_IS_ZERO(counter)) { - assert(cframe.use_tracing == 0); next_instr--; _Py_Specialize_StoreSubscr(container, sub, next_instr); DISPATCH_SAME_OPARG(); @@ -708,13 +780,13 @@ #endif /* ENABLE_SPECIALIZATION */ /* container[sub] = v */ int err = PyObject_SetItem(container, sub, v); - #line 712 "Python/generated_cases.c.h" + #line 784 "Python/generated_cases.c.h" Py_DECREF(v); Py_DECREF(container); Py_DECREF(sub); - #line 456 "Python/bytecodes.c" + #line 554 "Python/bytecodes.c" if (err) goto pop_3_error; - #line 718 "Python/generated_cases.c.h" + #line 790 "Python/generated_cases.c.h" STACK_SHRINK(3); next_instr += 1; DISPATCH(); @@ -724,13 +796,12 @@ PyObject *sub = stack_pointer[-1]; PyObject *list = stack_pointer[-2]; PyObject *value = stack_pointer[-3]; - #line 460 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 558 "Python/bytecodes.c" DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR); DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR); // Ensure nonnegative, zero-or-one-digit ints. - DEOPT_IF(!_PyLong_IsPositiveSingleDigit(sub), STORE_SUBSCR); + DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub), STORE_SUBSCR); Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; // Ensure index < len(list) DEOPT_IF(index >= PyList_GET_SIZE(list), STORE_SUBSCR); @@ -742,7 +813,7 @@ Py_DECREF(old_value); _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); Py_DECREF(list); - #line 746 "Python/generated_cases.c.h" + #line 817 "Python/generated_cases.c.h" STACK_SHRINK(3); next_instr += 1; DISPATCH(); @@ -752,14 +823,13 @@ PyObject *sub = stack_pointer[-1]; PyObject *dict = stack_pointer[-2]; PyObject *value = stack_pointer[-3]; - #line 480 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 577 "Python/bytecodes.c" DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR); STAT_INC(STORE_SUBSCR, hit); int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value); Py_DECREF(dict); if (err) goto pop_3_error; - #line 763 "Python/generated_cases.c.h" + #line 833 "Python/generated_cases.c.h" STACK_SHRINK(3); next_instr += 1; DISPATCH(); @@ -768,15 +838,15 @@ TARGET(DELETE_SUBSCR) { PyObject *sub = stack_pointer[-1]; PyObject *container = stack_pointer[-2]; - #line 489 "Python/bytecodes.c" + #line 585 "Python/bytecodes.c" /* del container[sub] */ int err = PyObject_DelItem(container, sub); - #line 775 "Python/generated_cases.c.h" + #line 845 "Python/generated_cases.c.h" Py_DECREF(container); Py_DECREF(sub); - #line 492 "Python/bytecodes.c" + #line 588 "Python/bytecodes.c" if (err) goto pop_2_error; - #line 780 "Python/generated_cases.c.h" + #line 850 "Python/generated_cases.c.h" STACK_SHRINK(2); DISPATCH(); } @@ -784,14 +854,14 @@ TARGET(CALL_INTRINSIC_1) { PyObject *value = stack_pointer[-1]; PyObject *res; - #line 496 "Python/bytecodes.c" + #line 592 "Python/bytecodes.c" assert(oparg <= MAX_INTRINSIC_1); res = _PyIntrinsics_UnaryFunctions[oparg](tstate, value); - #line 791 "Python/generated_cases.c.h" + #line 861 "Python/generated_cases.c.h" Py_DECREF(value); - #line 499 "Python/bytecodes.c" + #line 595 "Python/bytecodes.c" if (res == NULL) goto pop_1_error; - #line 795 "Python/generated_cases.c.h" + #line 865 "Python/generated_cases.c.h" stack_pointer[-1] = res; DISPATCH(); } @@ -800,15 +870,15 @@ PyObject *value1 = stack_pointer[-1]; PyObject *value2 = stack_pointer[-2]; PyObject *res; - #line 503 "Python/bytecodes.c" + #line 599 "Python/bytecodes.c" assert(oparg <= MAX_INTRINSIC_2); res = _PyIntrinsics_BinaryFunctions[oparg](tstate, value2, value1); - #line 807 "Python/generated_cases.c.h" + #line 877 "Python/generated_cases.c.h" Py_DECREF(value2); Py_DECREF(value1); - #line 506 "Python/bytecodes.c" + #line 602 "Python/bytecodes.c" if (res == NULL) goto pop_2_error; - #line 812 "Python/generated_cases.c.h" + #line 882 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; DISPATCH(); @@ -816,7 +886,7 @@ TARGET(RAISE_VARARGS) { PyObject **args = (stack_pointer - oparg); - #line 510 "Python/bytecodes.c" + #line 606 "Python/bytecodes.c" PyObject *cause = NULL, *exc = NULL; switch (oparg) { case 2: @@ -834,68 +904,109 @@ break; } if (true) { STACK_SHRINK(oparg); goto error; } - #line 838 "Python/generated_cases.c.h" + #line 908 "Python/generated_cases.c.h" } TARGET(INTERPRETER_EXIT) { PyObject *retval = stack_pointer[-1]; - #line 530 "Python/bytecodes.c" + #line 626 "Python/bytecodes.c" assert(frame == &entry_frame); assert(_PyFrame_IsIncomplete(frame)); STACK_SHRINK(1); // Since we're not going to DISPATCH() assert(EMPTY()); /* Restore previous cframe and return. */ tstate->cframe = cframe.previous; - tstate->cframe->use_tracing = cframe.use_tracing; assert(tstate->cframe->current_frame == frame->previous); assert(!_PyErr_Occurred(tstate)); _Py_LeaveRecursiveCallTstate(tstate); return retval; - #line 855 "Python/generated_cases.c.h" + #line 924 "Python/generated_cases.c.h" } TARGET(RETURN_VALUE) { PyObject *retval = stack_pointer[-1]; - #line 544 "Python/bytecodes.c" + #line 639 "Python/bytecodes.c" STACK_SHRINK(1); assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); - TRACE_FUNCTION_EXIT(); - DTRACE_FUNCTION_EXIT(); _Py_LeaveRecursiveCallPy(tstate); assert(frame != &entry_frame); // GH-99729: We need to unlink the frame *before* clearing it: _PyInterpreterFrame *dying = frame; frame = cframe.current_frame = dying->previous; _PyEvalFrameClearAndPop(tstate, dying); + frame->prev_instr += frame->return_offset; _PyFrame_StackPush(frame, retval); goto resume_frame; - #line 874 "Python/generated_cases.c.h" + #line 942 "Python/generated_cases.c.h" + } + + TARGET(INSTRUMENTED_RETURN_VALUE) { + PyObject *retval = stack_pointer[-1]; + #line 654 "Python/bytecodes.c" + int err = _Py_call_instrumentation_arg( + tstate, PY_MONITORING_EVENT_PY_RETURN, + frame, next_instr-1, retval); + if (err) goto error; + STACK_SHRINK(1); + assert(EMPTY()); + _PyFrame_SetStackPointer(frame, stack_pointer); + _Py_LeaveRecursiveCallPy(tstate); + assert(frame != &entry_frame); + // GH-99729: We need to unlink the frame *before* clearing it: + _PyInterpreterFrame *dying = frame; + frame = cframe.current_frame = dying->previous; + _PyEvalFrameClearAndPop(tstate, dying); + frame->prev_instr += frame->return_offset; + _PyFrame_StackPush(frame, retval); + goto resume_frame; + #line 964 "Python/generated_cases.c.h" } TARGET(RETURN_CONST) { - #line 560 "Python/bytecodes.c" + #line 673 "Python/bytecodes.c" PyObject *retval = GETITEM(frame->f_code->co_consts, oparg); Py_INCREF(retval); assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); - TRACE_FUNCTION_EXIT(); - DTRACE_FUNCTION_EXIT(); _Py_LeaveRecursiveCallPy(tstate); assert(frame != &entry_frame); // GH-99729: We need to unlink the frame *before* clearing it: _PyInterpreterFrame *dying = frame; frame = cframe.current_frame = dying->previous; _PyEvalFrameClearAndPop(tstate, dying); + frame->prev_instr += frame->return_offset; _PyFrame_StackPush(frame, retval); goto resume_frame; - #line 893 "Python/generated_cases.c.h" + #line 982 "Python/generated_cases.c.h" + } + + TARGET(INSTRUMENTED_RETURN_CONST) { + #line 689 "Python/bytecodes.c" + PyObject *retval = GETITEM(frame->f_code->co_consts, oparg); + int err = _Py_call_instrumentation_arg( + tstate, PY_MONITORING_EVENT_PY_RETURN, + frame, next_instr-1, retval); + if (err) goto error; + Py_INCREF(retval); + assert(EMPTY()); + _PyFrame_SetStackPointer(frame, stack_pointer); + _Py_LeaveRecursiveCallPy(tstate); + assert(frame != &entry_frame); + // GH-99729: We need to unlink the frame *before* clearing it: + _PyInterpreterFrame *dying = frame; + frame = cframe.current_frame = dying->previous; + _PyEvalFrameClearAndPop(tstate, dying); + frame->prev_instr += frame->return_offset; + _PyFrame_StackPush(frame, retval); + goto resume_frame; + #line 1004 "Python/generated_cases.c.h" } TARGET(GET_AITER) { PyObject *obj = stack_pointer[-1]; PyObject *iter; - #line 577 "Python/bytecodes.c" + #line 709 "Python/bytecodes.c" unaryfunc getter = NULL; PyTypeObject *type = Py_TYPE(obj); @@ -908,16 +1019,16 @@ "'async for' requires an object with " "__aiter__ method, got %.100s", type->tp_name); - #line 912 "Python/generated_cases.c.h" + #line 1023 "Python/generated_cases.c.h" Py_DECREF(obj); - #line 590 "Python/bytecodes.c" + #line 722 "Python/bytecodes.c" if (true) goto pop_1_error; } iter = (*getter)(obj); - #line 919 "Python/generated_cases.c.h" + #line 1030 "Python/generated_cases.c.h" Py_DECREF(obj); - #line 595 "Python/bytecodes.c" + #line 727 "Python/bytecodes.c" if (iter == NULL) goto pop_1_error; if (Py_TYPE(iter)->tp_as_async == NULL || @@ -930,7 +1041,7 @@ Py_DECREF(iter); if (true) goto pop_1_error; } - #line 934 "Python/generated_cases.c.h" + #line 1045 "Python/generated_cases.c.h" stack_pointer[-1] = iter; DISPATCH(); } @@ -938,7 +1049,7 @@ TARGET(GET_ANEXT) { PyObject *aiter = stack_pointer[-1]; PyObject *awaitable; - #line 610 "Python/bytecodes.c" + #line 742 "Python/bytecodes.c" unaryfunc getter = NULL; PyObject *next_iter = NULL; PyTypeObject *type = Py_TYPE(aiter); @@ -982,7 +1093,7 @@ } } - #line 986 "Python/generated_cases.c.h" + #line 1097 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = awaitable; PREDICT(LOAD_CONST); @@ -993,16 +1104,16 @@ PREDICTED(GET_AWAITABLE); PyObject *iterable = stack_pointer[-1]; PyObject *iter; - #line 657 "Python/bytecodes.c" + #line 789 "Python/bytecodes.c" iter = _PyCoro_GetAwaitableIter(iterable); if (iter == NULL) { format_awaitable_error(tstate, Py_TYPE(iterable), oparg); } - #line 1004 "Python/generated_cases.c.h" + #line 1115 "Python/generated_cases.c.h" Py_DECREF(iterable); - #line 664 "Python/bytecodes.c" + #line 796 "Python/bytecodes.c" if (iter != NULL && PyCoro_CheckExact(iter)) { PyObject *yf = _PyGen_yf((PyGenObject*)iter); @@ -1020,7 +1131,7 @@ if (iter == NULL) goto pop_1_error; - #line 1024 "Python/generated_cases.c.h" + #line 1135 "Python/generated_cases.c.h" stack_pointer[-1] = iter; PREDICT(LOAD_CONST); DISPATCH(); @@ -1031,11 +1142,10 @@ PyObject *v = stack_pointer[-1]; PyObject *receiver = stack_pointer[-2]; PyObject *retval; - #line 690 "Python/bytecodes.c" + #line 822 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PySendCache *cache = (_PySendCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); next_instr--; _Py_Specialize_Send(receiver, next_instr); DISPATCH_SAME_OPARG(); @@ -1044,6 +1154,20 @@ DECREMENT_ADAPTIVE_COUNTER(cache->counter); #endif /* ENABLE_SPECIALIZATION */ assert(frame != &entry_frame); + if ((Py_TYPE(receiver) == &PyGen_Type || + Py_TYPE(receiver) == &PyCoro_Type) && ((PyGenObject *)receiver)->gi_frame_state < FRAME_EXECUTING) + { + PyGenObject *gen = (PyGenObject *)receiver; + _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; + frame->return_offset = oparg; + STACK_SHRINK(1); + _PyFrame_StackPush(gen_frame, v); + gen->gi_frame_state = FRAME_EXECUTING; + gen->gi_exc_state.previous_item = tstate->exc_info; + tstate->exc_info = &gen->gi_exc_state; + JUMPBY(INLINE_CACHE_ENTRIES_SEND); + DISPATCH_INLINED(gen_frame); + } if (Py_IsNone(v) && PyIter_Check(receiver)) { retval = Py_TYPE(receiver)->tp_iternext(receiver); } @@ -1051,23 +1175,20 @@ retval = PyObject_CallMethodOneArg(receiver, &_Py_ID(send), v); } if (retval == NULL) { - if (tstate->c_tracefunc != NULL - && _PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) - call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); + if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration) + ) { + monitor_raise(tstate, frame, next_instr-1); + } if (_PyGen_FetchStopIterationValue(&retval) == 0) { assert(retval != NULL); JUMPBY(oparg); } else { - assert(retval == NULL); goto error; } } - else { - assert(retval != NULL); - } Py_DECREF(v); - #line 1071 "Python/generated_cases.c.h" + #line 1192 "Python/generated_cases.c.h" stack_pointer[-1] = retval; next_instr += 1; DISPATCH(); @@ -1076,28 +1197,49 @@ TARGET(SEND_GEN) { PyObject *v = stack_pointer[-1]; PyObject *receiver = stack_pointer[-2]; - #line 728 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 870 "Python/bytecodes.c" PyGenObject *gen = (PyGenObject *)receiver; DEOPT_IF(Py_TYPE(gen) != &PyGen_Type && Py_TYPE(gen) != &PyCoro_Type, SEND); DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, SEND); STAT_INC(SEND, hit); _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; - frame->yield_offset = oparg; + frame->return_offset = oparg; STACK_SHRINK(1); _PyFrame_StackPush(gen_frame, v); gen->gi_frame_state = FRAME_EXECUTING; gen->gi_exc_state.previous_item = tstate->exc_info; tstate->exc_info = &gen->gi_exc_state; - JUMPBY(INLINE_CACHE_ENTRIES_SEND + oparg); + JUMPBY(INLINE_CACHE_ENTRIES_SEND); DISPATCH_INLINED(gen_frame); - #line 1096 "Python/generated_cases.c.h" + #line 1216 "Python/generated_cases.c.h" + } + + TARGET(INSTRUMENTED_YIELD_VALUE) { + PyObject *retval = stack_pointer[-1]; + #line 887 "Python/bytecodes.c" + assert(frame != &entry_frame); + PyGenObject *gen = _PyFrame_GetGenerator(frame); + gen->gi_frame_state = FRAME_SUSPENDED; + _PyFrame_SetStackPointer(frame, stack_pointer - 1); + int err = _Py_call_instrumentation_arg( + tstate, PY_MONITORING_EVENT_PY_YIELD, + frame, next_instr-1, retval); + if (err) goto error; + tstate->exc_info = gen->gi_exc_state.previous_item; + gen->gi_exc_state.previous_item = NULL; + _Py_LeaveRecursiveCallPy(tstate); + _PyInterpreterFrame *gen_frame = frame; + frame = cframe.current_frame = frame->previous; + gen_frame->previous = NULL; + _PyFrame_StackPush(frame, retval); + goto resume_frame; + #line 1238 "Python/generated_cases.c.h" } TARGET(YIELD_VALUE) { PyObject *retval = stack_pointer[-1]; - #line 746 "Python/bytecodes.c" + #line 906 "Python/bytecodes.c" // NOTE: It's important that YIELD_VALUE never raises an exception! // The compiler treats any exception raised here as a failed close() // or throw() call. @@ -1105,26 +1247,23 @@ PyGenObject *gen = _PyFrame_GetGenerator(frame); gen->gi_frame_state = FRAME_SUSPENDED; _PyFrame_SetStackPointer(frame, stack_pointer - 1); - TRACE_FUNCTION_EXIT(); - DTRACE_FUNCTION_EXIT(); tstate->exc_info = gen->gi_exc_state.previous_item; gen->gi_exc_state.previous_item = NULL; _Py_LeaveRecursiveCallPy(tstate); _PyInterpreterFrame *gen_frame = frame; frame = cframe.current_frame = frame->previous; gen_frame->previous = NULL; - frame->prev_instr -= frame->yield_offset; _PyFrame_StackPush(frame, retval); goto resume_frame; - #line 1120 "Python/generated_cases.c.h" + #line 1259 "Python/generated_cases.c.h" } TARGET(POP_EXCEPT) { PyObject *exc_value = stack_pointer[-1]; - #line 767 "Python/bytecodes.c" + #line 924 "Python/bytecodes.c" _PyErr_StackItem *exc_info = tstate->exc_info; Py_XSETREF(exc_info->exc_value, exc_value); - #line 1128 "Python/generated_cases.c.h" + #line 1267 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } @@ -1132,7 +1271,7 @@ TARGET(RERAISE) { PyObject *exc = stack_pointer[-1]; PyObject **values = (stack_pointer - (1 + oparg)); - #line 772 "Python/bytecodes.c" + #line 929 "Python/bytecodes.c" assert(oparg >= 0 && oparg <= 2); if (oparg) { PyObject *lasti = values[0]; @@ -1148,32 +1287,28 @@ } assert(exc && PyExceptionInstance_Check(exc)); Py_INCREF(exc); - PyObject *typ = Py_NewRef(PyExceptionInstance_Class(exc)); - PyObject *tb = PyException_GetTraceback(exc); - _PyErr_Restore(tstate, typ, exc, tb); + _PyErr_SetRaisedException(tstate, exc); goto exception_unwind; - #line 1156 "Python/generated_cases.c.h" + #line 1293 "Python/generated_cases.c.h" } TARGET(END_ASYNC_FOR) { PyObject *exc = stack_pointer[-1]; PyObject *awaitable = stack_pointer[-2]; - #line 794 "Python/bytecodes.c" + #line 949 "Python/bytecodes.c" assert(exc && PyExceptionInstance_Check(exc)); if (PyErr_GivenExceptionMatches(exc, PyExc_StopAsyncIteration)) { - #line 1165 "Python/generated_cases.c.h" + #line 1302 "Python/generated_cases.c.h" Py_DECREF(awaitable); Py_DECREF(exc); - #line 797 "Python/bytecodes.c" + #line 952 "Python/bytecodes.c" } else { Py_INCREF(exc); - PyObject *typ = Py_NewRef(PyExceptionInstance_Class(exc)); - PyObject *tb = PyException_GetTraceback(exc); - _PyErr_Restore(tstate, typ, exc, tb); + _PyErr_SetRaisedException(tstate, exc); goto exception_unwind; } - #line 1177 "Python/generated_cases.c.h" + #line 1312 "Python/generated_cases.c.h" STACK_SHRINK(2); DISPATCH(); } @@ -1184,23 +1319,23 @@ PyObject *sub_iter = stack_pointer[-3]; PyObject *none; PyObject *value; - #line 808 "Python/bytecodes.c" + #line 961 "Python/bytecodes.c" assert(throwflag); assert(exc_value && PyExceptionInstance_Check(exc_value)); if (PyErr_GivenExceptionMatches(exc_value, PyExc_StopIteration)) { value = Py_NewRef(((PyStopIterationObject *)exc_value)->value); - #line 1193 "Python/generated_cases.c.h" + #line 1328 "Python/generated_cases.c.h" Py_DECREF(sub_iter); Py_DECREF(last_sent_val); Py_DECREF(exc_value); - #line 813 "Python/bytecodes.c" + #line 966 "Python/bytecodes.c" none = Py_NewRef(Py_None); } else { _PyErr_SetRaisedException(tstate, Py_NewRef(exc_value)); goto exception_unwind; } - #line 1204 "Python/generated_cases.c.h" + #line 1339 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = value; stack_pointer[-2] = none; @@ -1209,9 +1344,9 @@ TARGET(LOAD_ASSERTION_ERROR) { PyObject *value; - #line 822 "Python/bytecodes.c" + #line 975 "Python/bytecodes.c" value = Py_NewRef(PyExc_AssertionError); - #line 1215 "Python/generated_cases.c.h" + #line 1350 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = value; DISPATCH(); @@ -1219,7 +1354,7 @@ TARGET(LOAD_BUILD_CLASS) { PyObject *bc; - #line 826 "Python/bytecodes.c" + #line 979 "Python/bytecodes.c" if (PyDict_CheckExact(BUILTINS())) { bc = _PyDict_GetItemWithError(BUILTINS(), &_Py_ID(__build_class__)); @@ -1241,7 +1376,7 @@ if (true) goto error; } } - #line 1245 "Python/generated_cases.c.h" + #line 1380 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = bc; DISPATCH(); @@ -1249,33 +1384,33 @@ TARGET(STORE_NAME) { PyObject *v = stack_pointer[-1]; - #line 850 "Python/bytecodes.c" + #line 1003 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); PyObject *ns = LOCALS(); int err; if (ns == NULL) { _PyErr_Format(tstate, PyExc_SystemError, "no locals found when storing %R", name); - #line 1260 "Python/generated_cases.c.h" + #line 1395 "Python/generated_cases.c.h" Py_DECREF(v); - #line 857 "Python/bytecodes.c" + #line 1010 "Python/bytecodes.c" if (true) goto pop_1_error; } if (PyDict_CheckExact(ns)) err = PyDict_SetItem(ns, name, v); else err = PyObject_SetItem(ns, name, v); - #line 1269 "Python/generated_cases.c.h" + #line 1404 "Python/generated_cases.c.h" Py_DECREF(v); - #line 864 "Python/bytecodes.c" + #line 1017 "Python/bytecodes.c" if (err) goto pop_1_error; - #line 1273 "Python/generated_cases.c.h" + #line 1408 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(DELETE_NAME) { - #line 868 "Python/bytecodes.c" + #line 1021 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); PyObject *ns = LOCALS(); int err; @@ -1292,7 +1427,7 @@ name); goto error; } - #line 1296 "Python/generated_cases.c.h" + #line 1431 "Python/generated_cases.c.h" DISPATCH(); } @@ -1300,11 +1435,10 @@ PREDICTED(UNPACK_SEQUENCE); static_assert(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE == 1, "incorrect cache size"); PyObject *seq = stack_pointer[-1]; - #line 894 "Python/bytecodes.c" + #line 1047 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); next_instr--; _Py_Specialize_UnpackSequence(seq, next_instr, oparg); DISPATCH_SAME_OPARG(); @@ -1314,11 +1448,11 @@ #endif /* ENABLE_SPECIALIZATION */ PyObject **top = stack_pointer + oparg - 1; int res = unpack_iterable(tstate, seq, oparg, -1, top); - #line 1318 "Python/generated_cases.c.h" + #line 1452 "Python/generated_cases.c.h" Py_DECREF(seq); - #line 908 "Python/bytecodes.c" + #line 1060 "Python/bytecodes.c" if (res == 0) goto pop_1_error; - #line 1322 "Python/generated_cases.c.h" + #line 1456 "Python/generated_cases.c.h" STACK_SHRINK(1); STACK_GROW(oparg); next_instr += 1; @@ -1328,14 +1462,14 @@ TARGET(UNPACK_SEQUENCE_TWO_TUPLE) { PyObject *seq = stack_pointer[-1]; PyObject **values = stack_pointer - (1); - #line 912 "Python/bytecodes.c" + #line 1064 "Python/bytecodes.c" DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); DEOPT_IF(PyTuple_GET_SIZE(seq) != 2, UNPACK_SEQUENCE); assert(oparg == 2); STAT_INC(UNPACK_SEQUENCE, hit); values[0] = Py_NewRef(PyTuple_GET_ITEM(seq, 1)); values[1] = Py_NewRef(PyTuple_GET_ITEM(seq, 0)); - #line 1339 "Python/generated_cases.c.h" + #line 1473 "Python/generated_cases.c.h" Py_DECREF(seq); STACK_SHRINK(1); STACK_GROW(oparg); @@ -1346,7 +1480,7 @@ TARGET(UNPACK_SEQUENCE_TUPLE) { PyObject *seq = stack_pointer[-1]; PyObject **values = stack_pointer - (1); - #line 922 "Python/bytecodes.c" + #line 1074 "Python/bytecodes.c" DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); DEOPT_IF(PyTuple_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); STAT_INC(UNPACK_SEQUENCE, hit); @@ -1354,7 +1488,7 @@ for (int i = oparg; --i >= 0; ) { *values++ = Py_NewRef(items[i]); } - #line 1358 "Python/generated_cases.c.h" + #line 1492 "Python/generated_cases.c.h" Py_DECREF(seq); STACK_SHRINK(1); STACK_GROW(oparg); @@ -1365,7 +1499,7 @@ TARGET(UNPACK_SEQUENCE_LIST) { PyObject *seq = stack_pointer[-1]; PyObject **values = stack_pointer - (1); - #line 933 "Python/bytecodes.c" + #line 1085 "Python/bytecodes.c" DEOPT_IF(!PyList_CheckExact(seq), UNPACK_SEQUENCE); DEOPT_IF(PyList_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); STAT_INC(UNPACK_SEQUENCE, hit); @@ -1373,7 +1507,7 @@ for (int i = oparg; --i >= 0; ) { *values++ = Py_NewRef(items[i]); } - #line 1377 "Python/generated_cases.c.h" + #line 1511 "Python/generated_cases.c.h" Py_DECREF(seq); STACK_SHRINK(1); STACK_GROW(oparg); @@ -1383,15 +1517,15 @@ TARGET(UNPACK_EX) { PyObject *seq = stack_pointer[-1]; - #line 944 "Python/bytecodes.c" + #line 1096 "Python/bytecodes.c" int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8); PyObject **top = stack_pointer + totalargs - 1; int res = unpack_iterable(tstate, seq, oparg & 0xFF, oparg >> 8, top); - #line 1391 "Python/generated_cases.c.h" + #line 1525 "Python/generated_cases.c.h" Py_DECREF(seq); - #line 948 "Python/bytecodes.c" + #line 1100 "Python/bytecodes.c" if (res == 0) goto pop_1_error; - #line 1395 "Python/generated_cases.c.h" + #line 1529 "Python/generated_cases.c.h" STACK_GROW((oparg & 0xFF) + (oparg >> 8)); DISPATCH(); } @@ -1402,10 +1536,9 @@ PyObject *owner = stack_pointer[-1]; PyObject *v = stack_pointer[-2]; uint16_t counter = read_u16(&next_instr[0].cache); - #line 959 "Python/bytecodes.c" + #line 1111 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION if (ADAPTIVE_COUNTER_IS_ZERO(counter)) { - assert(cframe.use_tracing == 0); PyObject *name = GETITEM(frame->f_code->co_names, oparg); next_instr--; _Py_Specialize_StoreAttr(owner, next_instr, name); @@ -1419,12 +1552,12 @@ #endif /* ENABLE_SPECIALIZATION */ PyObject *name = GETITEM(frame->f_code->co_names, oparg); int err = PyObject_SetAttr(owner, name, v); - #line 1423 "Python/generated_cases.c.h" + #line 1556 "Python/generated_cases.c.h" Py_DECREF(v); Py_DECREF(owner); - #line 976 "Python/bytecodes.c" + #line 1127 "Python/bytecodes.c" if (err) goto pop_2_error; - #line 1428 "Python/generated_cases.c.h" + #line 1561 "Python/generated_cases.c.h" STACK_SHRINK(2); next_instr += 4; DISPATCH(); @@ -1432,34 +1565,34 @@ TARGET(DELETE_ATTR) { PyObject *owner = stack_pointer[-1]; - #line 980 "Python/bytecodes.c" + #line 1131 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); int err = PyObject_SetAttr(owner, name, (PyObject *)NULL); - #line 1439 "Python/generated_cases.c.h" + #line 1572 "Python/generated_cases.c.h" Py_DECREF(owner); - #line 983 "Python/bytecodes.c" + #line 1134 "Python/bytecodes.c" if (err) goto pop_1_error; - #line 1443 "Python/generated_cases.c.h" + #line 1576 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(STORE_GLOBAL) { PyObject *v = stack_pointer[-1]; - #line 987 "Python/bytecodes.c" + #line 1138 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); int err = PyDict_SetItem(GLOBALS(), name, v); - #line 1453 "Python/generated_cases.c.h" + #line 1586 "Python/generated_cases.c.h" Py_DECREF(v); - #line 990 "Python/bytecodes.c" + #line 1141 "Python/bytecodes.c" if (err) goto pop_1_error; - #line 1457 "Python/generated_cases.c.h" + #line 1590 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(DELETE_GLOBAL) { - #line 994 "Python/bytecodes.c" + #line 1145 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); int err; err = PyDict_DelItem(GLOBALS(), name); @@ -1471,13 +1604,13 @@ } goto error; } - #line 1475 "Python/generated_cases.c.h" + #line 1608 "Python/generated_cases.c.h" DISPATCH(); } TARGET(LOAD_NAME) { PyObject *v; - #line 1008 "Python/bytecodes.c" + #line 1159 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); PyObject *locals = LOCALS(); if (locals == NULL) { @@ -1536,7 +1669,7 @@ } } } - #line 1540 "Python/generated_cases.c.h" + #line 1673 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = v; DISPATCH(); @@ -1547,11 +1680,10 @@ static_assert(INLINE_CACHE_ENTRIES_LOAD_GLOBAL == 4, "incorrect cache size"); PyObject *null = NULL; PyObject *v; - #line 1075 "Python/bytecodes.c" + #line 1226 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); PyObject *name = GETITEM(frame->f_code->co_names, oparg>>1); next_instr--; _Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name); @@ -1600,7 +1732,7 @@ } } null = NULL; - #line 1604 "Python/generated_cases.c.h" + #line 1736 "Python/generated_cases.c.h" STACK_GROW(1); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = v; @@ -1614,8 +1746,7 @@ PyObject *res; uint16_t index = read_u16(&next_instr[1].cache); uint16_t version = read_u16(&next_instr[2].cache); - #line 1130 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 1280 "Python/bytecodes.c" DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); PyDictObject *dict = (PyDictObject *)GLOBALS(); DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); @@ -1626,7 +1757,7 @@ Py_INCREF(res); STAT_INC(LOAD_GLOBAL, hit); null = NULL; - #line 1630 "Python/generated_cases.c.h" + #line 1761 "Python/generated_cases.c.h" STACK_GROW(1); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -1641,12 +1772,12 @@ uint16_t index = read_u16(&next_instr[1].cache); uint16_t mod_version = read_u16(&next_instr[2].cache); uint16_t bltn_version = read_u16(&next_instr[3].cache); - #line 1144 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 1293 "Python/bytecodes.c" DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); DEOPT_IF(!PyDict_CheckExact(BUILTINS()), LOAD_GLOBAL); PyDictObject *mdict = (PyDictObject *)GLOBALS(); PyDictObject *bdict = (PyDictObject *)BUILTINS(); + assert(opcode == LOAD_GLOBAL_BUILTIN); DEOPT_IF(mdict->ma_keys->dk_version != mod_version, LOAD_GLOBAL); DEOPT_IF(bdict->ma_keys->dk_version != bltn_version, LOAD_GLOBAL); assert(DK_IS_UNICODE(bdict->ma_keys)); @@ -1656,7 +1787,7 @@ Py_INCREF(res); STAT_INC(LOAD_GLOBAL, hit); null = NULL; - #line 1660 "Python/generated_cases.c.h" + #line 1791 "Python/generated_cases.c.h" STACK_GROW(1); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -1666,16 +1797,16 @@ } TARGET(DELETE_FAST) { - #line 1161 "Python/bytecodes.c" + #line 1310 "Python/bytecodes.c" PyObject *v = GETLOCAL(oparg); if (v == NULL) goto unbound_local_error; SETLOCAL(oparg, NULL); - #line 1674 "Python/generated_cases.c.h" + #line 1805 "Python/generated_cases.c.h" DISPATCH(); } TARGET(MAKE_CELL) { - #line 1167 "Python/bytecodes.c" + #line 1316 "Python/bytecodes.c" // "initial" is probably NULL but not if it's an arg (or set // via PyFrame_LocalsToFast() before MAKE_CELL has run). PyObject *initial = GETLOCAL(oparg); @@ -1684,12 +1815,12 @@ goto resume_with_error; } SETLOCAL(oparg, cell); - #line 1688 "Python/generated_cases.c.h" + #line 1819 "Python/generated_cases.c.h" DISPATCH(); } TARGET(DELETE_DEREF) { - #line 1178 "Python/bytecodes.c" + #line 1327 "Python/bytecodes.c" PyObject *cell = GETLOCAL(oparg); PyObject *oldobj = PyCell_GET(cell); // Can't use ERROR_IF here. @@ -1700,13 +1831,13 @@ } PyCell_SET(cell, NULL); Py_DECREF(oldobj); - #line 1704 "Python/generated_cases.c.h" + #line 1835 "Python/generated_cases.c.h" DISPATCH(); } TARGET(LOAD_CLASSDEREF) { PyObject *value; - #line 1191 "Python/bytecodes.c" + #line 1340 "Python/bytecodes.c" PyObject *name, *locals = LOCALS(); assert(locals); assert(oparg >= 0 && oparg < frame->f_code->co_nlocalsplus); @@ -1738,7 +1869,7 @@ } Py_INCREF(value); } - #line 1742 "Python/generated_cases.c.h" + #line 1873 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = value; DISPATCH(); @@ -1746,7 +1877,7 @@ TARGET(LOAD_DEREF) { PyObject *value; - #line 1225 "Python/bytecodes.c" + #line 1374 "Python/bytecodes.c" PyObject *cell = GETLOCAL(oparg); value = PyCell_GET(cell); if (value == NULL) { @@ -1754,7 +1885,7 @@ if (true) goto error; } Py_INCREF(value); - #line 1758 "Python/generated_cases.c.h" + #line 1889 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = value; DISPATCH(); @@ -1762,18 +1893,18 @@ TARGET(STORE_DEREF) { PyObject *v = stack_pointer[-1]; - #line 1235 "Python/bytecodes.c" + #line 1384 "Python/bytecodes.c" PyObject *cell = GETLOCAL(oparg); PyObject *oldobj = PyCell_GET(cell); PyCell_SET(cell, v); Py_XDECREF(oldobj); - #line 1771 "Python/generated_cases.c.h" + #line 1902 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(COPY_FREE_VARS) { - #line 1242 "Python/bytecodes.c" + #line 1391 "Python/bytecodes.c" /* Copy closure variables to free variables */ PyCodeObject *co = frame->f_code; assert(PyFunction_Check(frame->f_funcobj)); @@ -1784,22 +1915,22 @@ PyObject *o = PyTuple_GET_ITEM(closure, i); frame->localsplus[offset + i] = Py_NewRef(o); } - #line 1788 "Python/generated_cases.c.h" + #line 1919 "Python/generated_cases.c.h" DISPATCH(); } TARGET(BUILD_STRING) { PyObject **pieces = (stack_pointer - oparg); PyObject *str; - #line 1255 "Python/bytecodes.c" + #line 1404 "Python/bytecodes.c" str = _PyUnicode_JoinArray(&_Py_STR(empty), pieces, oparg); - #line 1797 "Python/generated_cases.c.h" + #line 1928 "Python/generated_cases.c.h" for (int _i = oparg; --_i >= 0;) { Py_DECREF(pieces[_i]); } - #line 1257 "Python/bytecodes.c" + #line 1406 "Python/bytecodes.c" if (str == NULL) { STACK_SHRINK(oparg); goto error; } - #line 1803 "Python/generated_cases.c.h" + #line 1934 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_GROW(1); stack_pointer[-1] = str; @@ -1809,10 +1940,10 @@ TARGET(BUILD_TUPLE) { PyObject **values = (stack_pointer - oparg); PyObject *tup; - #line 1261 "Python/bytecodes.c" + #line 1410 "Python/bytecodes.c" tup = _PyTuple_FromArraySteal(values, oparg); if (tup == NULL) { STACK_SHRINK(oparg); goto error; } - #line 1816 "Python/generated_cases.c.h" + #line 1947 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_GROW(1); stack_pointer[-1] = tup; @@ -1822,10 +1953,10 @@ TARGET(BUILD_LIST) { PyObject **values = (stack_pointer - oparg); PyObject *list; - #line 1266 "Python/bytecodes.c" + #line 1415 "Python/bytecodes.c" list = _PyList_FromArraySteal(values, oparg); if (list == NULL) { STACK_SHRINK(oparg); goto error; } - #line 1829 "Python/generated_cases.c.h" + #line 1960 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_GROW(1); stack_pointer[-1] = list; @@ -1835,7 +1966,7 @@ TARGET(LIST_EXTEND) { PyObject *iterable = stack_pointer[-1]; PyObject *list = stack_pointer[-(2 + (oparg-1))]; - #line 1271 "Python/bytecodes.c" + #line 1420 "Python/bytecodes.c" PyObject *none_val = _PyList_Extend((PyListObject *)list, iterable); if (none_val == NULL) { if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError) && @@ -1846,13 +1977,13 @@ "Value after * must be an iterable, not %.200s", Py_TYPE(iterable)->tp_name); } - #line 1850 "Python/generated_cases.c.h" + #line 1981 "Python/generated_cases.c.h" Py_DECREF(iterable); - #line 1282 "Python/bytecodes.c" + #line 1431 "Python/bytecodes.c" if (true) goto pop_1_error; } Py_DECREF(none_val); - #line 1856 "Python/generated_cases.c.h" + #line 1987 "Python/generated_cases.c.h" Py_DECREF(iterable); STACK_SHRINK(1); DISPATCH(); @@ -1861,13 +1992,13 @@ TARGET(SET_UPDATE) { PyObject *iterable = stack_pointer[-1]; PyObject *set = stack_pointer[-(2 + (oparg-1))]; - #line 1289 "Python/bytecodes.c" + #line 1438 "Python/bytecodes.c" int err = _PySet_Update(set, iterable); - #line 1867 "Python/generated_cases.c.h" + #line 1998 "Python/generated_cases.c.h" Py_DECREF(iterable); - #line 1291 "Python/bytecodes.c" + #line 1440 "Python/bytecodes.c" if (err < 0) goto pop_1_error; - #line 1871 "Python/generated_cases.c.h" + #line 2002 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } @@ -1875,7 +2006,7 @@ TARGET(BUILD_SET) { PyObject **values = (stack_pointer - oparg); PyObject *set; - #line 1295 "Python/bytecodes.c" + #line 1444 "Python/bytecodes.c" set = PySet_New(NULL); if (set == NULL) goto error; @@ -1890,7 +2021,7 @@ Py_DECREF(set); if (true) { STACK_SHRINK(oparg); goto error; } } - #line 1894 "Python/generated_cases.c.h" + #line 2025 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_GROW(1); stack_pointer[-1] = set; @@ -1900,7 +2031,7 @@ TARGET(BUILD_MAP) { PyObject **values = (stack_pointer - oparg*2); PyObject *map; - #line 1312 "Python/bytecodes.c" + #line 1461 "Python/bytecodes.c" map = _PyDict_FromItems( values, 2, values+1, 2, @@ -1908,13 +2039,13 @@ if (map == NULL) goto error; - #line 1912 "Python/generated_cases.c.h" + #line 2043 "Python/generated_cases.c.h" for (int _i = oparg*2; --_i >= 0;) { Py_DECREF(values[_i]); } - #line 1320 "Python/bytecodes.c" + #line 1469 "Python/bytecodes.c" if (map == NULL) { STACK_SHRINK(oparg*2); goto error; } - #line 1918 "Python/generated_cases.c.h" + #line 2049 "Python/generated_cases.c.h" STACK_SHRINK(oparg*2); STACK_GROW(1); stack_pointer[-1] = map; @@ -1922,7 +2053,7 @@ } TARGET(SETUP_ANNOTATIONS) { - #line 1324 "Python/bytecodes.c" + #line 1473 "Python/bytecodes.c" int err; PyObject *ann_dict; if (LOCALS() == NULL) { @@ -1962,7 +2093,7 @@ Py_DECREF(ann_dict); } } - #line 1966 "Python/generated_cases.c.h" + #line 2097 "Python/generated_cases.c.h" DISPATCH(); } @@ -1970,7 +2101,7 @@ PyObject *keys = stack_pointer[-1]; PyObject **values = (stack_pointer - (1 + oparg)); PyObject *map; - #line 1366 "Python/bytecodes.c" + #line 1515 "Python/bytecodes.c" if (!PyTuple_CheckExact(keys) || PyTuple_GET_SIZE(keys) != (Py_ssize_t)oparg) { _PyErr_SetString(tstate, PyExc_SystemError, @@ -1980,14 +2111,14 @@ map = _PyDict_FromItems( &PyTuple_GET_ITEM(keys, 0), 1, values, 1, oparg); - #line 1984 "Python/generated_cases.c.h" + #line 2115 "Python/generated_cases.c.h" for (int _i = oparg; --_i >= 0;) { Py_DECREF(values[_i]); } Py_DECREF(keys); - #line 1376 "Python/bytecodes.c" + #line 1525 "Python/bytecodes.c" if (map == NULL) { STACK_SHRINK(oparg); goto pop_1_error; } - #line 1991 "Python/generated_cases.c.h" + #line 2122 "Python/generated_cases.c.h" STACK_SHRINK(oparg); stack_pointer[-1] = map; DISPATCH(); @@ -1995,7 +2126,7 @@ TARGET(DICT_UPDATE) { PyObject *update = stack_pointer[-1]; - #line 1380 "Python/bytecodes.c" + #line 1529 "Python/bytecodes.c" PyObject *dict = PEEK(oparg + 1); // update is still on the stack if (PyDict_Update(dict, update) < 0) { if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) { @@ -2003,12 +2134,12 @@ "'%.200s' object is not a mapping", Py_TYPE(update)->tp_name); } - #line 2007 "Python/generated_cases.c.h" + #line 2138 "Python/generated_cases.c.h" Py_DECREF(update); - #line 1388 "Python/bytecodes.c" + #line 1537 "Python/bytecodes.c" if (true) goto pop_1_error; } - #line 2012 "Python/generated_cases.c.h" + #line 2143 "Python/generated_cases.c.h" Py_DECREF(update); STACK_SHRINK(1); DISPATCH(); @@ -2016,17 +2147,17 @@ TARGET(DICT_MERGE) { PyObject *update = stack_pointer[-1]; - #line 1394 "Python/bytecodes.c" + #line 1543 "Python/bytecodes.c" PyObject *dict = PEEK(oparg + 1); // update is still on the stack if (_PyDict_MergeEx(dict, update, 2) < 0) { format_kwargs_error(tstate, PEEK(3 + oparg), update); - #line 2025 "Python/generated_cases.c.h" + #line 2156 "Python/generated_cases.c.h" Py_DECREF(update); - #line 1399 "Python/bytecodes.c" + #line 1548 "Python/bytecodes.c" if (true) goto pop_1_error; } - #line 2030 "Python/generated_cases.c.h" + #line 2161 "Python/generated_cases.c.h" Py_DECREF(update); STACK_SHRINK(1); PREDICT(CALL_FUNCTION_EX); @@ -2036,29 +2167,100 @@ TARGET(MAP_ADD) { PyObject *value = stack_pointer[-1]; PyObject *key = stack_pointer[-2]; - #line 1406 "Python/bytecodes.c" + #line 1555 "Python/bytecodes.c" PyObject *dict = PEEK(oparg + 2); // key, value are still on the stack assert(PyDict_CheckExact(dict)); /* dict[key] = value */ // Do not DECREF INPUTS because the function steals the references if (_PyDict_SetItem_Take2((PyDictObject *)dict, key, value) != 0) goto pop_2_error; - #line 2046 "Python/generated_cases.c.h" + #line 2177 "Python/generated_cases.c.h" STACK_SHRINK(2); PREDICT(JUMP_BACKWARD); DISPATCH(); } + TARGET(LOAD_SUPER_ATTR) { + PREDICTED(LOAD_SUPER_ATTR); + static_assert(INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR == 9, "incorrect cache size"); + PyObject *self = stack_pointer[-1]; + PyObject *class = stack_pointer[-2]; + PyObject *global_super = stack_pointer[-3]; + PyObject *res2 = NULL; + PyObject *res; + #line 1569 "Python/bytecodes.c" + PyObject *name = GETITEM(frame->f_code->co_names, oparg >> 2); + int load_method = oparg & 1; + #if ENABLE_SPECIALIZATION + _PySuperAttrCache *cache = (_PySuperAttrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + next_instr--; + _Py_Specialize_LoadSuperAttr(global_super, class, self, next_instr, name, load_method); + DISPATCH_SAME_OPARG(); + } + STAT_INC(LOAD_SUPER_ATTR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + #endif /* ENABLE_SPECIALIZATION */ + + // we make no attempt to optimize here; specializations should + // handle any case whose performance we care about + PyObject *stack[] = {class, self}; + PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL); + #line 2209 "Python/generated_cases.c.h" + Py_DECREF(global_super); + Py_DECREF(class); + Py_DECREF(self); + #line 1587 "Python/bytecodes.c" + if (super == NULL) goto pop_3_error; + res = PyObject_GetAttr(super, name); + Py_DECREF(super); + if (res == NULL) goto pop_3_error; + #line 2218 "Python/generated_cases.c.h" + STACK_SHRINK(2); + STACK_GROW(((oparg & 1) ? 1 : 0)); + stack_pointer[-1] = res; + if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + next_instr += 9; + DISPATCH(); + } + + TARGET(LOAD_SUPER_ATTR_METHOD) { + PyObject *self = stack_pointer[-1]; + PyObject *class = stack_pointer[-2]; + PyObject *global_super = stack_pointer[-3]; + PyObject *res2; + PyObject *res; + uint32_t class_version = read_u32(&next_instr[1].cache); + uint32_t self_type_version = read_u32(&next_instr[3].cache); + PyObject *method = read_obj(&next_instr[5].cache); + #line 1594 "Python/bytecodes.c" + DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR); + DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR); + DEOPT_IF(((PyTypeObject *)class)->tp_version_tag != class_version, LOAD_SUPER_ATTR); + PyTypeObject *self_type = Py_TYPE(self); + DEOPT_IF(self_type->tp_version_tag != self_type_version, LOAD_SUPER_ATTR); + res2 = method; + res = self; // transfer ownership + Py_INCREF(res2); + Py_DECREF(global_super); + Py_DECREF(class); + #line 2247 "Python/generated_cases.c.h" + STACK_SHRINK(1); + stack_pointer[-1] = res; + stack_pointer[-2] = res2; + next_instr += 9; + DISPATCH(); + } + TARGET(LOAD_ATTR) { PREDICTED(LOAD_ATTR); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); PyObject *owner = stack_pointer[-1]; PyObject *res2 = NULL; PyObject *res; - #line 1429 "Python/bytecodes.c" + #line 1621 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyAttrCache *cache = (_PyAttrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); PyObject *name = GETITEM(frame->f_code->co_names, oparg>>1); next_instr--; _Py_Specialize_LoadAttr(owner, next_instr, name); @@ -2089,9 +2291,9 @@ NULL | meth | arg1 | ... | argN */ - #line 2093 "Python/generated_cases.c.h" + #line 2295 "Python/generated_cases.c.h" Py_DECREF(owner); - #line 1464 "Python/bytecodes.c" + #line 1655 "Python/bytecodes.c" if (meth == NULL) goto pop_1_error; res2 = NULL; res = meth; @@ -2100,12 +2302,12 @@ else { /* Classic, pushes one value. */ res = PyObject_GetAttr(owner, name); - #line 2104 "Python/generated_cases.c.h" + #line 2306 "Python/generated_cases.c.h" Py_DECREF(owner); - #line 1473 "Python/bytecodes.c" + #line 1664 "Python/bytecodes.c" if (res == NULL) goto pop_1_error; } - #line 2109 "Python/generated_cases.c.h" + #line 2311 "Python/generated_cases.c.h" STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } @@ -2119,8 +2321,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1478 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 1669 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); @@ -2133,7 +2334,7 @@ STAT_INC(LOAD_ATTR, hit); Py_INCREF(res); res2 = NULL; - #line 2137 "Python/generated_cases.c.h" + #line 2338 "Python/generated_cases.c.h" Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2148,8 +2349,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1495 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 1685 "Python/bytecodes.c" DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR); PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict; assert(dict != NULL); @@ -2162,7 +2362,7 @@ STAT_INC(LOAD_ATTR, hit); Py_INCREF(res); res2 = NULL; - #line 2166 "Python/generated_cases.c.h" + #line 2366 "Python/generated_cases.c.h" Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2177,8 +2377,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1512 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 1701 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); @@ -2205,7 +2404,7 @@ STAT_INC(LOAD_ATTR, hit); Py_INCREF(res); res2 = NULL; - #line 2209 "Python/generated_cases.c.h" + #line 2408 "Python/generated_cases.c.h" Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2220,8 +2419,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1543 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 1731 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); @@ -2231,7 +2429,7 @@ STAT_INC(LOAD_ATTR, hit); Py_INCREF(res); res2 = NULL; - #line 2235 "Python/generated_cases.c.h" + #line 2433 "Python/generated_cases.c.h" Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2246,8 +2444,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); PyObject *descr = read_obj(&next_instr[5].cache); - #line 1557 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 1744 "Python/bytecodes.c" DEOPT_IF(!PyType_Check(cls), LOAD_ATTR); DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version, @@ -2259,7 +2456,7 @@ res = descr; assert(res != NULL); Py_INCREF(res); - #line 2263 "Python/generated_cases.c.h" + #line 2460 "Python/generated_cases.c.h" Py_DECREF(cls); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2273,8 +2470,7 @@ uint32_t type_version = read_u32(&next_instr[1].cache); uint32_t func_version = read_u32(&next_instr[3].cache); PyObject *fget = read_obj(&next_instr[5].cache); - #line 1573 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 1759 "Python/bytecodes.c" DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); PyTypeObject *cls = Py_TYPE(owner); @@ -2296,8 +2492,9 @@ STACK_SHRINK(shrink_stack); new_frame->localsplus[0] = owner; JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + frame->return_offset = 0; DISPATCH_INLINED(new_frame); - #line 2301 "Python/generated_cases.c.h" + #line 2498 "Python/generated_cases.c.h" } TARGET(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) { @@ -2305,8 +2502,7 @@ uint32_t type_version = read_u32(&next_instr[1].cache); uint32_t func_version = read_u32(&next_instr[3].cache); PyObject *getattribute = read_obj(&next_instr[5].cache); - #line 1599 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 1785 "Python/bytecodes.c" DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); PyTypeObject *cls = Py_TYPE(owner); DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); @@ -2330,8 +2526,9 @@ new_frame->localsplus[0] = owner; new_frame->localsplus[1] = Py_NewRef(name); JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); + frame->return_offset = 0; DISPATCH_INLINED(new_frame); - #line 2335 "Python/generated_cases.c.h" + #line 2532 "Python/generated_cases.c.h" } TARGET(STORE_ATTR_INSTANCE_VALUE) { @@ -2339,8 +2536,7 @@ PyObject *value = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1627 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 1813 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); @@ -2358,7 +2554,7 @@ Py_DECREF(old_value); } Py_DECREF(owner); - #line 2362 "Python/generated_cases.c.h" + #line 2558 "Python/generated_cases.c.h" STACK_SHRINK(2); next_instr += 4; DISPATCH(); @@ -2369,8 +2565,7 @@ PyObject *value = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t hint = read_u16(&next_instr[3].cache); - #line 1648 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 1833 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); @@ -2409,7 +2604,7 @@ /* PEP 509 */ dict->ma_version_tag = new_version; Py_DECREF(owner); - #line 2413 "Python/generated_cases.c.h" + #line 2608 "Python/generated_cases.c.h" STACK_SHRINK(2); next_instr += 4; DISPATCH(); @@ -2420,8 +2615,7 @@ PyObject *value = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1690 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 1874 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); @@ -2431,144 +2625,114 @@ *(PyObject **)addr = value; Py_XDECREF(old_value); Py_DECREF(owner); - #line 2435 "Python/generated_cases.c.h" + #line 2629 "Python/generated_cases.c.h" STACK_SHRINK(2); next_instr += 4; DISPATCH(); } TARGET(COMPARE_OP) { + PREDICTED(COMPARE_OP); + static_assert(INLINE_CACHE_ENTRIES_COMPARE_OP == 1, "incorrect cache size"); PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *res; - #line 1703 "Python/bytecodes.c" - STAT_INC(COMPARE_OP, deferred); - assert((oparg >> 4) <= Py_GE); - res = PyObject_RichCompare(left, right, oparg>>4); - #line 2449 "Python/generated_cases.c.h" - Py_DECREF(left); - Py_DECREF(right); - #line 1707 "Python/bytecodes.c" - if (res == NULL) goto pop_2_error; - #line 2454 "Python/generated_cases.c.h" - STACK_SHRINK(1); - stack_pointer[-1] = res; - next_instr += 1; - DISPATCH(); - } - - TARGET(COMPARE_AND_BRANCH) { - PREDICTED(COMPARE_AND_BRANCH); - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; - #line 1719 "Python/bytecodes.c" + #line 1893 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); next_instr--; - _Py_Specialize_CompareAndBranch(left, right, next_instr, oparg); + _Py_Specialize_CompareOp(left, right, next_instr, oparg); DISPATCH_SAME_OPARG(); } - STAT_INC(COMPARE_AND_BRANCH, deferred); + STAT_INC(COMPARE_OP, deferred); DECREMENT_ADAPTIVE_COUNTER(cache->counter); #endif /* ENABLE_SPECIALIZATION */ assert((oparg >> 4) <= Py_GE); - PyObject *cond = PyObject_RichCompare(left, right, oparg>>4); - #line 2479 "Python/generated_cases.c.h" + res = PyObject_RichCompare(left, right, oparg>>4); + #line 2654 "Python/generated_cases.c.h" Py_DECREF(left); Py_DECREF(right); - #line 1733 "Python/bytecodes.c" - if (cond == NULL) goto pop_2_error; - assert(next_instr[1].op.code == POP_JUMP_IF_FALSE || - next_instr[1].op.code == POP_JUMP_IF_TRUE); - bool jump_on_true = next_instr[1].op.code == POP_JUMP_IF_TRUE; - int offset = next_instr[1].op.arg; - int err = PyObject_IsTrue(cond); - Py_DECREF(cond); - if (err < 0) goto pop_2_error; - if (jump_on_true == (err != 0)) { - JUMPBY(offset); - } - #line 2494 "Python/generated_cases.c.h" - STACK_SHRINK(2); - next_instr += 2; + #line 1906 "Python/bytecodes.c" + if (res == NULL) goto pop_2_error; + #line 2659 "Python/generated_cases.c.h" + STACK_SHRINK(1); + stack_pointer[-1] = res; + next_instr += 1; DISPATCH(); } - TARGET(COMPARE_AND_BRANCH_FLOAT) { + TARGET(COMPARE_OP_FLOAT) { PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; - #line 1747 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); - DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_AND_BRANCH); - DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_AND_BRANCH); - STAT_INC(COMPARE_AND_BRANCH, hit); + PyObject *res; + #line 1910 "Python/bytecodes.c" + DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP); + STAT_INC(COMPARE_OP, hit); double dleft = PyFloat_AS_DOUBLE(left); double dright = PyFloat_AS_DOUBLE(right); // 1 if NaN, 2 if <, 4 if >, 8 if ==; this matches low four bits of the oparg int sign_ish = COMPARISON_BIT(dleft, dright); _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); - if (sign_ish & oparg) { - int offset = next_instr[1].op.arg; - JUMPBY(offset); - } - #line 2518 "Python/generated_cases.c.h" - STACK_SHRINK(2); - next_instr += 2; + res = (sign_ish & oparg) ? Py_True : Py_False; + Py_INCREF(res); + #line 2682 "Python/generated_cases.c.h" + STACK_SHRINK(1); + stack_pointer[-1] = res; + next_instr += 1; DISPATCH(); } - TARGET(COMPARE_AND_BRANCH_INT) { + TARGET(COMPARE_OP_INT) { PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; - #line 1765 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); - DEOPT_IF(!PyLong_CheckExact(left), COMPARE_AND_BRANCH); - DEOPT_IF(!PyLong_CheckExact(right), COMPARE_AND_BRANCH); - DEOPT_IF((size_t)(Py_SIZE(left) + 1) > 2, COMPARE_AND_BRANCH); - DEOPT_IF((size_t)(Py_SIZE(right) + 1) > 2, COMPARE_AND_BRANCH); - STAT_INC(COMPARE_AND_BRANCH, hit); - assert(Py_ABS(Py_SIZE(left)) <= 1 && Py_ABS(Py_SIZE(right)) <= 1); - Py_ssize_t ileft = Py_SIZE(left) * ((PyLongObject *)left)->long_value.ob_digit[0]; - Py_ssize_t iright = Py_SIZE(right) * ((PyLongObject *)right)->long_value.ob_digit[0]; + PyObject *res; + #line 1925 "Python/bytecodes.c" + DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP); + DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)left), COMPARE_OP); + DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)right), COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + assert(_PyLong_DigitCount((PyLongObject *)left) <= 1 && + _PyLong_DigitCount((PyLongObject *)right) <= 1); + Py_ssize_t ileft = _PyLong_CompactValue((PyLongObject *)left); + Py_ssize_t iright = _PyLong_CompactValue((PyLongObject *)right); // 2 if <, 4 if >, 8 if ==; this matches the low 4 bits of the oparg int sign_ish = COMPARISON_BIT(ileft, iright); _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); - if (sign_ish & oparg) { - int offset = next_instr[1].op.arg; - JUMPBY(offset); - } - #line 2545 "Python/generated_cases.c.h" - STACK_SHRINK(2); - next_instr += 2; + res = (sign_ish & oparg) ? Py_True : Py_False; + Py_INCREF(res); + #line 2709 "Python/generated_cases.c.h" + STACK_SHRINK(1); + stack_pointer[-1] = res; + next_instr += 1; DISPATCH(); } - TARGET(COMPARE_AND_BRANCH_STR) { + TARGET(COMPARE_OP_STR) { PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; - #line 1786 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); - DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_AND_BRANCH); - DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_AND_BRANCH); - STAT_INC(COMPARE_AND_BRANCH, hit); - int res = _PyUnicode_Equal(left, right); + PyObject *res; + #line 1944 "Python/bytecodes.c" + DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP); + DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP); + STAT_INC(COMPARE_OP, hit); + int eq = _PyUnicode_Equal(left, right); assert((oparg >>4) == Py_EQ || (oparg >>4) == Py_NE); _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); - assert(res == 0 || res == 1); + assert(eq == 0 || eq == 1); assert((oparg & 0xf) == COMPARISON_NOT_EQUALS || (oparg & 0xf) == COMPARISON_EQUALS); assert(COMPARISON_NOT_EQUALS + 1 == COMPARISON_EQUALS); - if ((res + COMPARISON_NOT_EQUALS) & oparg) { - int offset = next_instr[1].op.arg; - JUMPBY(offset); - } - #line 2570 "Python/generated_cases.c.h" - STACK_SHRINK(2); - next_instr += 2; + res = ((COMPARISON_NOT_EQUALS + eq) & oparg) ? Py_True : Py_False; + Py_INCREF(res); + #line 2733 "Python/generated_cases.c.h" + STACK_SHRINK(1); + stack_pointer[-1] = res; + next_instr += 1; DISPATCH(); } @@ -2576,14 +2740,14 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *b; - #line 1804 "Python/bytecodes.c" + #line 1959 "Python/bytecodes.c" int res = Py_Is(left, right) ^ oparg; - #line 2582 "Python/generated_cases.c.h" + #line 2746 "Python/generated_cases.c.h" Py_DECREF(left); Py_DECREF(right); - #line 1806 "Python/bytecodes.c" + #line 1961 "Python/bytecodes.c" b = Py_NewRef(res ? Py_True : Py_False); - #line 2587 "Python/generated_cases.c.h" + #line 2751 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = b; DISPATCH(); @@ -2593,15 +2757,15 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *b; - #line 1810 "Python/bytecodes.c" + #line 1965 "Python/bytecodes.c" int res = PySequence_Contains(right, left); - #line 2599 "Python/generated_cases.c.h" + #line 2763 "Python/generated_cases.c.h" Py_DECREF(left); Py_DECREF(right); - #line 1812 "Python/bytecodes.c" + #line 1967 "Python/bytecodes.c" if (res < 0) goto pop_2_error; b = Py_NewRef((res^oparg) ? Py_True : Py_False); - #line 2605 "Python/generated_cases.c.h" + #line 2769 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = b; DISPATCH(); @@ -2612,12 +2776,12 @@ PyObject *exc_value = stack_pointer[-2]; PyObject *rest; PyObject *match; - #line 1817 "Python/bytecodes.c" + #line 1972 "Python/bytecodes.c" if (check_except_star_type_valid(tstate, match_type) < 0) { - #line 2618 "Python/generated_cases.c.h" + #line 2782 "Python/generated_cases.c.h" Py_DECREF(exc_value); Py_DECREF(match_type); - #line 1819 "Python/bytecodes.c" + #line 1974 "Python/bytecodes.c" if (true) goto pop_2_error; } @@ -2625,19 +2789,19 @@ rest = NULL; int res = exception_group_match(exc_value, match_type, &match, &rest); - #line 2629 "Python/generated_cases.c.h" + #line 2793 "Python/generated_cases.c.h" Py_DECREF(exc_value); Py_DECREF(match_type); - #line 1827 "Python/bytecodes.c" + #line 1982 "Python/bytecodes.c" if (res < 0) goto pop_2_error; assert((match == NULL) == (rest == NULL)); if (match == NULL) goto pop_2_error; if (!Py_IsNone(match)) { - PyErr_SetExcInfo(NULL, Py_NewRef(match), NULL); + PyErr_SetHandledException(match); } - #line 2641 "Python/generated_cases.c.h" + #line 2805 "Python/generated_cases.c.h" stack_pointer[-1] = match; stack_pointer[-2] = rest; DISPATCH(); @@ -2647,21 +2811,21 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *b; - #line 1838 "Python/bytecodes.c" + #line 1993 "Python/bytecodes.c" assert(PyExceptionInstance_Check(left)); if (check_except_type_valid(tstate, right) < 0) { - #line 2654 "Python/generated_cases.c.h" + #line 2818 "Python/generated_cases.c.h" Py_DECREF(right); - #line 1841 "Python/bytecodes.c" + #line 1996 "Python/bytecodes.c" if (true) goto pop_1_error; } int res = PyErr_GivenExceptionMatches(left, right); - #line 2661 "Python/generated_cases.c.h" + #line 2825 "Python/generated_cases.c.h" Py_DECREF(right); - #line 1846 "Python/bytecodes.c" + #line 2001 "Python/bytecodes.c" b = Py_NewRef(res ? Py_True : Py_False); - #line 2665 "Python/generated_cases.c.h" + #line 2829 "Python/generated_cases.c.h" stack_pointer[-1] = b; DISPATCH(); } @@ -2670,15 +2834,15 @@ PyObject *fromlist = stack_pointer[-1]; PyObject *level = stack_pointer[-2]; PyObject *res; - #line 1850 "Python/bytecodes.c" + #line 2005 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); res = import_name(tstate, frame, name, fromlist, level); - #line 2677 "Python/generated_cases.c.h" + #line 2841 "Python/generated_cases.c.h" Py_DECREF(level); Py_DECREF(fromlist); - #line 1853 "Python/bytecodes.c" + #line 2008 "Python/bytecodes.c" if (res == NULL) goto pop_2_error; - #line 2682 "Python/generated_cases.c.h" + #line 2846 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; DISPATCH(); @@ -2687,29 +2851,29 @@ TARGET(IMPORT_FROM) { PyObject *from = stack_pointer[-1]; PyObject *res; - #line 1857 "Python/bytecodes.c" + #line 2012 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); res = import_from(tstate, from, name); if (res == NULL) goto error; - #line 2695 "Python/generated_cases.c.h" + #line 2859 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; DISPATCH(); } TARGET(JUMP_FORWARD) { - #line 1863 "Python/bytecodes.c" + #line 2018 "Python/bytecodes.c" JUMPBY(oparg); - #line 2704 "Python/generated_cases.c.h" + #line 2868 "Python/generated_cases.c.h" DISPATCH(); } TARGET(JUMP_BACKWARD) { PREDICTED(JUMP_BACKWARD); - #line 1867 "Python/bytecodes.c" + #line 2022 "Python/bytecodes.c" assert(oparg < INSTR_OFFSET()); JUMPBY(-oparg); - #line 2713 "Python/generated_cases.c.h" + #line 2877 "Python/generated_cases.c.h" CHECK_EVAL_BREAKER(); DISPATCH(); } @@ -2717,7 +2881,7 @@ TARGET(POP_JUMP_IF_FALSE) { PREDICTED(POP_JUMP_IF_FALSE); PyObject *cond = stack_pointer[-1]; - #line 1873 "Python/bytecodes.c" + #line 2028 "Python/bytecodes.c" if (Py_IsTrue(cond)) { _Py_DECREF_NO_DEALLOC(cond); } @@ -2727,9 +2891,9 @@ } else { int err = PyObject_IsTrue(cond); - #line 2731 "Python/generated_cases.c.h" + #line 2895 "Python/generated_cases.c.h" Py_DECREF(cond); - #line 1883 "Python/bytecodes.c" + #line 2038 "Python/bytecodes.c" if (err == 0) { JUMPBY(oparg); } @@ -2737,14 +2901,14 @@ if (err < 0) goto pop_1_error; } } - #line 2741 "Python/generated_cases.c.h" + #line 2905 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(POP_JUMP_IF_TRUE) { PyObject *cond = stack_pointer[-1]; - #line 1893 "Python/bytecodes.c" + #line 2048 "Python/bytecodes.c" if (Py_IsFalse(cond)) { _Py_DECREF_NO_DEALLOC(cond); } @@ -2754,9 +2918,9 @@ } else { int err = PyObject_IsTrue(cond); - #line 2758 "Python/generated_cases.c.h" + #line 2922 "Python/generated_cases.c.h" Py_DECREF(cond); - #line 1903 "Python/bytecodes.c" + #line 2058 "Python/bytecodes.c" if (err > 0) { JUMPBY(oparg); } @@ -2764,129 +2928,67 @@ if (err < 0) goto pop_1_error; } } - #line 2768 "Python/generated_cases.c.h" + #line 2932 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(POP_JUMP_IF_NOT_NONE) { PyObject *value = stack_pointer[-1]; - #line 1913 "Python/bytecodes.c" + #line 2068 "Python/bytecodes.c" if (!Py_IsNone(value)) { - #line 2777 "Python/generated_cases.c.h" + #line 2941 "Python/generated_cases.c.h" Py_DECREF(value); - #line 1915 "Python/bytecodes.c" + #line 2070 "Python/bytecodes.c" JUMPBY(oparg); } else { _Py_DECREF_NO_DEALLOC(value); } - #line 2785 "Python/generated_cases.c.h" + #line 2949 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(POP_JUMP_IF_NONE) { PyObject *value = stack_pointer[-1]; - #line 1923 "Python/bytecodes.c" + #line 2078 "Python/bytecodes.c" if (Py_IsNone(value)) { _Py_DECREF_NO_DEALLOC(value); JUMPBY(oparg); } else { - #line 2798 "Python/generated_cases.c.h" + #line 2962 "Python/generated_cases.c.h" Py_DECREF(value); - #line 1929 "Python/bytecodes.c" - } - #line 2802 "Python/generated_cases.c.h" - STACK_SHRINK(1); - DISPATCH(); - } - - TARGET(JUMP_IF_FALSE_OR_POP) { - PyObject *cond = stack_pointer[-1]; - #line 1933 "Python/bytecodes.c" - bool jump = false; - int err; - if (Py_IsTrue(cond)) { - _Py_DECREF_NO_DEALLOC(cond); - } - else if (Py_IsFalse(cond)) { - JUMPBY(oparg); - jump = true; - } - else { - err = PyObject_IsTrue(cond); - if (err > 0) { - Py_DECREF(cond); - } - else if (err == 0) { - JUMPBY(oparg); - jump = true; - } - else { - goto error; - } - } - #line 2832 "Python/generated_cases.c.h" - STACK_SHRINK(1); - STACK_GROW((jump ? 1 : 0)); - DISPATCH(); - } - - TARGET(JUMP_IF_TRUE_OR_POP) { - PyObject *cond = stack_pointer[-1]; - #line 1958 "Python/bytecodes.c" - bool jump = false; - int err; - if (Py_IsFalse(cond)) { - _Py_DECREF_NO_DEALLOC(cond); - } - else if (Py_IsTrue(cond)) { - JUMPBY(oparg); - jump = true; - } - else { - err = PyObject_IsTrue(cond); - if (err > 0) { - JUMPBY(oparg); - jump = true; - } - else if (err == 0) { - Py_DECREF(cond); - } - else { - goto error; - } + #line 2084 "Python/bytecodes.c" } - #line 2863 "Python/generated_cases.c.h" + #line 2966 "Python/generated_cases.c.h" STACK_SHRINK(1); - STACK_GROW((jump ? 1 : 0)); DISPATCH(); } TARGET(JUMP_BACKWARD_NO_INTERRUPT) { - #line 1983 "Python/bytecodes.c" + #line 2088 "Python/bytecodes.c" /* This bytecode is used in the `yield from` or `await` loop. * If there is an interrupt, we want it handled in the innermost * generator or coroutine, so we deliberately do not check it here. * (see bpo-30039). */ JUMPBY(-oparg); - #line 2877 "Python/generated_cases.c.h" + #line 2979 "Python/generated_cases.c.h" DISPATCH(); } TARGET(GET_LEN) { PyObject *obj = stack_pointer[-1]; PyObject *len_o; - #line 1992 "Python/bytecodes.c" + #line 2097 "Python/bytecodes.c" // PUSH(len(TOS)) Py_ssize_t len_i = PyObject_Length(obj); if (len_i < 0) goto error; len_o = PyLong_FromSsize_t(len_i); if (len_o == NULL) goto error; - #line 2890 "Python/generated_cases.c.h" + #line 2992 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = len_o; DISPATCH(); @@ -2897,16 +2999,16 @@ PyObject *type = stack_pointer[-2]; PyObject *subject = stack_pointer[-3]; PyObject *attrs; - #line 2000 "Python/bytecodes.c" + #line 2105 "Python/bytecodes.c" // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or // None on failure. assert(PyTuple_CheckExact(names)); attrs = match_class(tstate, subject, type, oparg, names); - #line 2906 "Python/generated_cases.c.h" + #line 3008 "Python/generated_cases.c.h" Py_DECREF(subject); Py_DECREF(type); Py_DECREF(names); - #line 2005 "Python/bytecodes.c" + #line 2110 "Python/bytecodes.c" if (attrs) { assert(PyTuple_CheckExact(attrs)); // Success! } @@ -2914,7 +3016,7 @@ if (_PyErr_Occurred(tstate)) goto pop_3_error; attrs = Py_NewRef(Py_None); // Failure! } - #line 2918 "Python/generated_cases.c.h" + #line 3020 "Python/generated_cases.c.h" STACK_SHRINK(2); stack_pointer[-1] = attrs; DISPATCH(); @@ -2923,10 +3025,10 @@ TARGET(MATCH_MAPPING) { PyObject *subject = stack_pointer[-1]; PyObject *res; - #line 2015 "Python/bytecodes.c" + #line 2120 "Python/bytecodes.c" int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING; res = Py_NewRef(match ? Py_True : Py_False); - #line 2930 "Python/generated_cases.c.h" + #line 3032 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; PREDICT(POP_JUMP_IF_FALSE); @@ -2936,10 +3038,10 @@ TARGET(MATCH_SEQUENCE) { PyObject *subject = stack_pointer[-1]; PyObject *res; - #line 2021 "Python/bytecodes.c" + #line 2126 "Python/bytecodes.c" int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE; res = Py_NewRef(match ? Py_True : Py_False); - #line 2943 "Python/generated_cases.c.h" + #line 3045 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; PREDICT(POP_JUMP_IF_FALSE); @@ -2950,11 +3052,11 @@ PyObject *keys = stack_pointer[-1]; PyObject *subject = stack_pointer[-2]; PyObject *values_or_none; - #line 2027 "Python/bytecodes.c" + #line 2132 "Python/bytecodes.c" // On successful match, PUSH(values). Otherwise, PUSH(None). values_or_none = match_keys(tstate, subject, keys); if (values_or_none == NULL) goto error; - #line 2958 "Python/generated_cases.c.h" + #line 3060 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = values_or_none; DISPATCH(); @@ -2963,14 +3065,14 @@ TARGET(GET_ITER) { PyObject *iterable = stack_pointer[-1]; PyObject *iter; - #line 2033 "Python/bytecodes.c" + #line 2138 "Python/bytecodes.c" /* before: [obj]; after [getiter(obj)] */ iter = PyObject_GetIter(iterable); - #line 2970 "Python/generated_cases.c.h" + #line 3072 "Python/generated_cases.c.h" Py_DECREF(iterable); - #line 2036 "Python/bytecodes.c" + #line 2141 "Python/bytecodes.c" if (iter == NULL) goto pop_1_error; - #line 2974 "Python/generated_cases.c.h" + #line 3076 "Python/generated_cases.c.h" stack_pointer[-1] = iter; DISPATCH(); } @@ -2978,7 +3080,7 @@ TARGET(GET_YIELD_FROM_ITER) { PyObject *iterable = stack_pointer[-1]; PyObject *iter; - #line 2040 "Python/bytecodes.c" + #line 2145 "Python/bytecodes.c" /* before: [obj]; after [getiter(obj)] */ if (PyCoro_CheckExact(iterable)) { /* `iterable` is a coroutine */ @@ -3001,11 +3103,11 @@ if (iter == NULL) { goto error; } - #line 3005 "Python/generated_cases.c.h" + #line 3107 "Python/generated_cases.c.h" Py_DECREF(iterable); - #line 2063 "Python/bytecodes.c" + #line 2168 "Python/bytecodes.c" } - #line 3009 "Python/generated_cases.c.h" + #line 3111 "Python/generated_cases.c.h" stack_pointer[-1] = iter; PREDICT(LOAD_CONST); DISPATCH(); @@ -3016,11 +3118,10 @@ static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size"); PyObject *iter = stack_pointer[-1]; PyObject *next; - #line 2082 "Python/bytecodes.c" + #line 2187 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyForIterCache *cache = (_PyForIterCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); next_instr--; _Py_Specialize_ForIter(iter, next_instr, oparg); DISPATCH_SAME_OPARG(); @@ -3035,13 +3136,12 @@ if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { goto error; } - else if (tstate->c_tracefunc != NULL) { - call_exc_trace(tstate->c_tracefunc, tstate->c_traceobj, tstate, frame); - } + monitor_raise(tstate, frame, next_instr-1); _PyErr_Clear(tstate); } /* iterator ended normally */ - assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR); + assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR || + next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == INSTRUMENTED_END_FOR); Py_DECREF(iter); STACK_SHRINK(1); /* Jump forward oparg, then skip following END_FOR instruction */ @@ -3049,18 +3149,48 @@ DISPATCH(); } // Common case: no jump, leave it to the code generator - #line 3053 "Python/generated_cases.c.h" + #line 3153 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = next; next_instr += 1; DISPATCH(); } + TARGET(INSTRUMENTED_FOR_ITER) { + #line 2220 "Python/bytecodes.c" + _Py_CODEUNIT *here = next_instr-1; + _Py_CODEUNIT *target; + PyObject *iter = TOP(); + PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter); + if (next != NULL) { + PUSH(next); + target = next_instr + INLINE_CACHE_ENTRIES_FOR_ITER; + } + else { + if (_PyErr_Occurred(tstate)) { + if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { + goto error; + } + monitor_raise(tstate, frame, here); + _PyErr_Clear(tstate); + } + /* iterator ended normally */ + assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR || + next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == INSTRUMENTED_END_FOR); + STACK_SHRINK(1); + Py_DECREF(iter); + /* Skip END_FOR */ + target = next_instr + INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1; + } + INSTRUMENTED_JUMP(here, target, PY_MONITORING_EVENT_BRANCH); + #line 3187 "Python/generated_cases.c.h" + DISPATCH(); + } + TARGET(FOR_ITER_LIST) { PyObject *iter = stack_pointer[-1]; PyObject *next; - #line 2117 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 2248 "Python/bytecodes.c" DEOPT_IF(Py_TYPE(iter) != &PyListIter_Type, FOR_ITER); _PyListIterObject *it = (_PyListIterObject *)iter; STAT_INC(FOR_ITER, hit); @@ -3080,7 +3210,7 @@ DISPATCH(); end_for_iter_list: // Common case: no jump, leave it to the code generator - #line 3084 "Python/generated_cases.c.h" + #line 3214 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = next; next_instr += 1; @@ -3090,8 +3220,7 @@ TARGET(FOR_ITER_TUPLE) { PyObject *iter = stack_pointer[-1]; PyObject *next; - #line 2140 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 2270 "Python/bytecodes.c" _PyTupleIterObject *it = (_PyTupleIterObject *)iter; DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER); STAT_INC(FOR_ITER, hit); @@ -3111,7 +3240,7 @@ DISPATCH(); end_for_iter_tuple: // Common case: no jump, leave it to the code generator - #line 3115 "Python/generated_cases.c.h" + #line 3244 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = next; next_instr += 1; @@ -3121,8 +3250,7 @@ TARGET(FOR_ITER_RANGE) { PyObject *iter = stack_pointer[-1]; PyObject *next; - #line 2163 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 2292 "Python/bytecodes.c" _PyRangeIterObject *r = (_PyRangeIterObject *)iter; DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER); STAT_INC(FOR_ITER, hit); @@ -3140,7 +3268,7 @@ if (next == NULL) { goto error; } - #line 3144 "Python/generated_cases.c.h" + #line 3272 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = next; next_instr += 1; @@ -3149,29 +3277,29 @@ TARGET(FOR_ITER_GEN) { PyObject *iter = stack_pointer[-1]; - #line 2184 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 2312 "Python/bytecodes.c" PyGenObject *gen = (PyGenObject *)iter; DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER); DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, FOR_ITER); STAT_INC(FOR_ITER, hit); _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; - frame->yield_offset = oparg; + frame->return_offset = oparg; _PyFrame_StackPush(gen_frame, Py_NewRef(Py_None)); gen->gi_frame_state = FRAME_EXECUTING; gen->gi_exc_state.previous_item = tstate->exc_info; tstate->exc_info = &gen->gi_exc_state; - JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg); - assert(next_instr->op.code == END_FOR); + JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER); + assert(next_instr[oparg].op.code == END_FOR || + next_instr[oparg].op.code == INSTRUMENTED_END_FOR); DISPATCH_INLINED(gen_frame); - #line 3168 "Python/generated_cases.c.h" + #line 3296 "Python/generated_cases.c.h" } TARGET(BEFORE_ASYNC_WITH) { PyObject *mgr = stack_pointer[-1]; PyObject *exit; PyObject *res; - #line 2201 "Python/bytecodes.c" + #line 2329 "Python/bytecodes.c" PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__)); if (enter == NULL) { if (!_PyErr_Occurred(tstate)) { @@ -3194,16 +3322,16 @@ Py_DECREF(enter); goto error; } - #line 3198 "Python/generated_cases.c.h" + #line 3326 "Python/generated_cases.c.h" Py_DECREF(mgr); - #line 2224 "Python/bytecodes.c" + #line 2352 "Python/bytecodes.c" res = _PyObject_CallNoArgs(enter); Py_DECREF(enter); if (res == NULL) { Py_DECREF(exit); if (true) goto pop_1_error; } - #line 3207 "Python/generated_cases.c.h" + #line 3335 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; stack_pointer[-2] = exit; @@ -3215,7 +3343,7 @@ PyObject *mgr = stack_pointer[-1]; PyObject *exit; PyObject *res; - #line 2234 "Python/bytecodes.c" + #line 2362 "Python/bytecodes.c" /* pop the context manager, push its __exit__ and the * value returned from calling its __enter__ */ @@ -3241,16 +3369,16 @@ Py_DECREF(enter); goto error; } - #line 3245 "Python/generated_cases.c.h" + #line 3373 "Python/generated_cases.c.h" Py_DECREF(mgr); - #line 2260 "Python/bytecodes.c" + #line 2388 "Python/bytecodes.c" res = _PyObject_CallNoArgs(enter); Py_DECREF(enter); if (res == NULL) { Py_DECREF(exit); if (true) goto pop_1_error; } - #line 3254 "Python/generated_cases.c.h" + #line 3382 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; stack_pointer[-2] = exit; @@ -3262,7 +3390,7 @@ PyObject *lasti = stack_pointer[-3]; PyObject *exit_func = stack_pointer[-4]; PyObject *res; - #line 2269 "Python/bytecodes.c" + #line 2397 "Python/bytecodes.c" /* At the top of the stack are 4 values: - val: TOP = exc_info() - unused: SECOND = previous exception @@ -3283,7 +3411,7 @@ res = PyObject_Vectorcall(exit_func, stack + 1, 3 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); if (res == NULL) goto error; - #line 3287 "Python/generated_cases.c.h" + #line 3415 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; DISPATCH(); @@ -3292,7 +3420,7 @@ TARGET(PUSH_EXC_INFO) { PyObject *new_exc = stack_pointer[-1]; PyObject *prev_exc; - #line 2292 "Python/bytecodes.c" + #line 2420 "Python/bytecodes.c" _PyErr_StackItem *exc_info = tstate->exc_info; if (exc_info->exc_value != NULL) { prev_exc = exc_info->exc_value; @@ -3302,7 +3430,7 @@ } assert(PyExceptionInstance_Check(new_exc)); exc_info->exc_value = Py_NewRef(new_exc); - #line 3306 "Python/generated_cases.c.h" + #line 3434 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = new_exc; stack_pointer[-2] = prev_exc; @@ -3316,9 +3444,8 @@ uint32_t type_version = read_u32(&next_instr[1].cache); uint32_t keys_version = read_u32(&next_instr[3].cache); PyObject *descr = read_obj(&next_instr[5].cache); - #line 2304 "Python/bytecodes.c" + #line 2432 "Python/bytecodes.c" /* Cached method object */ - assert(cframe.use_tracing == 0); PyTypeObject *self_cls = Py_TYPE(self); assert(type_version != 0); DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); @@ -3334,7 +3461,7 @@ assert(_PyType_HasFeature(Py_TYPE(res2), Py_TPFLAGS_METHOD_DESCRIPTOR)); res = self; assert(oparg & 1); - #line 3338 "Python/generated_cases.c.h" + #line 3465 "Python/generated_cases.c.h" STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } @@ -3348,8 +3475,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); PyObject *descr = read_obj(&next_instr[5].cache); - #line 2324 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 2451 "Python/bytecodes.c" PyTypeObject *self_cls = Py_TYPE(self); DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); assert(self_cls->tp_dictoffset == 0); @@ -3359,7 +3485,7 @@ res2 = Py_NewRef(descr); res = self; assert(oparg & 1); - #line 3363 "Python/generated_cases.c.h" + #line 3489 "Python/generated_cases.c.h" STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } @@ -3373,8 +3499,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); PyObject *descr = read_obj(&next_instr[5].cache); - #line 2337 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 2463 "Python/bytecodes.c" PyTypeObject *self_cls = Py_TYPE(self); DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); Py_ssize_t dictoffset = self_cls->tp_dictoffset; @@ -3388,7 +3513,7 @@ res2 = Py_NewRef(descr); res = self; assert(oparg & 1); - #line 3392 "Python/generated_cases.c.h" + #line 3517 "Python/generated_cases.c.h" STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } @@ -3397,22 +3522,39 @@ } TARGET(KW_NAMES) { - #line 2354 "Python/bytecodes.c" + #line 2479 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg < PyTuple_GET_SIZE(frame->f_code->co_consts)); kwnames = GETITEM(frame->f_code->co_consts, oparg); - #line 3405 "Python/generated_cases.c.h" + #line 3530 "Python/generated_cases.c.h" DISPATCH(); } + TARGET(INSTRUMENTED_CALL) { + #line 2485 "Python/bytecodes.c" + int is_meth = PEEK(oparg+2) != NULL; + int total_args = oparg + is_meth; + PyObject *function = PEEK(total_args + 1); + PyObject *arg = total_args == 0 ? + &_PyInstrumentation_MISSING : PEEK(total_args); + int err = _Py_call_instrumentation_2args( + tstate, PY_MONITORING_EVENT_CALL, + frame, next_instr-1, function, arg); + if (err) goto error; + _PyCallCache *cache = (_PyCallCache *)next_instr; + INCREMENT_ADAPTIVE_COUNTER(cache->counter); + GO_TO_INSTRUCTION(CALL); + #line 3548 "Python/generated_cases.c.h" + } + TARGET(CALL) { PREDICTED(CALL); - static_assert(INLINE_CACHE_ENTRIES_CALL == 4, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); PyObject **args = (stack_pointer - oparg); PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2390 "Python/bytecodes.c" + #line 2530 "Python/bytecodes.c" int is_meth = method != NULL; int total_args = oparg; if (is_meth) { @@ -3423,7 +3565,6 @@ #if ENABLE_SPECIALIZATION _PyCallCache *cache = (_PyCallCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); next_instr--; _Py_Specialize_Call(callable, next_instr, total_args, kwnames); DISPATCH_SAME_OPARG(); @@ -3463,19 +3604,30 @@ goto error; } JUMPBY(INLINE_CACHE_ENTRIES_CALL); + frame->return_offset = 0; DISPATCH_INLINED(new_frame); } /* Callable is not a normal Python function */ - if (cframe.use_tracing) { - res = trace_call_function( - tstate, callable, args, - positional_args, kwnames); - } - else { - res = PyObject_Vectorcall( - callable, args, - positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, - kwnames); + res = PyObject_Vectorcall( + callable, args, + positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, + kwnames); + if (opcode == INSTRUMENTED_CALL) { + PyObject *arg = total_args == 0 ? + &_PyInstrumentation_MISSING : PEEK(total_args); + if (res == NULL) { + _Py_call_instrumentation_exc2( + tstate, PY_MONITORING_EVENT_C_RAISE, + frame, next_instr-1, callable, arg); + } + else { + int err = _Py_call_instrumentation_2args( + tstate, PY_MONITORING_EVENT_C_RETURN, + frame, next_instr-1, callable, arg); + if (err < 0) { + Py_CLEAR(res); + } + } } kwnames = NULL; assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); @@ -3484,11 +3636,11 @@ Py_DECREF(args[i]); } if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3488 "Python/generated_cases.c.h" + #line 3640 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 3; CHECK_EVAL_BREAKER(); DISPATCH(); } @@ -3496,7 +3648,7 @@ TARGET(CALL_BOUND_METHOD_EXACT_ARGS) { PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; - #line 2468 "Python/bytecodes.c" + #line 2618 "Python/bytecodes.c" DEOPT_IF(method != NULL, CALL); DEOPT_IF(Py_TYPE(callable) != &PyMethod_Type, CALL); STAT_INC(CALL, hit); @@ -3506,7 +3658,7 @@ PEEK(oparg + 2) = Py_NewRef(meth); // method Py_DECREF(callable); GO_TO_INSTRUCTION(CALL_PY_EXACT_ARGS); - #line 3510 "Python/generated_cases.c.h" + #line 3662 "Python/generated_cases.c.h" } TARGET(CALL_PY_EXACT_ARGS) { @@ -3515,7 +3667,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; uint32_t func_version = read_u32(&next_instr[1].cache); - #line 2480 "Python/bytecodes.c" + #line 2630 "Python/bytecodes.c" assert(kwnames == NULL); DEOPT_IF(tstate->interp->eval_frame, CALL); int is_meth = method != NULL; @@ -3539,8 +3691,9 @@ // Manipulate stack directly since we leave using DISPATCH_INLINED(). STACK_SHRINK(oparg + 2); JUMPBY(INLINE_CACHE_ENTRIES_CALL); + frame->return_offset = 0; DISPATCH_INLINED(new_frame); - #line 3544 "Python/generated_cases.c.h" + #line 3697 "Python/generated_cases.c.h" } TARGET(CALL_PY_WITH_DEFAULTS) { @@ -3548,8 +3701,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; uint32_t func_version = read_u32(&next_instr[1].cache); - uint16_t min_args = read_u16(&next_instr[3].cache); - #line 2507 "Python/bytecodes.c" + #line 2658 "Python/bytecodes.c" assert(kwnames == NULL); DEOPT_IF(tstate->interp->eval_frame, CALL); int is_meth = method != NULL; @@ -3563,6 +3715,11 @@ PyFunctionObject *func = (PyFunctionObject *)callable; DEOPT_IF(func->func_version != func_version, CALL); PyCodeObject *code = (PyCodeObject *)func->func_code; + assert(func->func_defaults); + assert(PyTuple_CheckExact(func->func_defaults)); + int defcount = (int)PyTuple_GET_SIZE(func->func_defaults); + assert(defcount <= code->co_argcount); + int min_args = code->co_argcount - defcount; DEOPT_IF(argcount > code->co_argcount, CALL); DEOPT_IF(argcount < min_args, CALL); DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), CALL); @@ -3578,8 +3735,9 @@ // Manipulate stack and cache directly since we leave using DISPATCH_INLINED(). STACK_SHRINK(oparg + 2); JUMPBY(INLINE_CACHE_ENTRIES_CALL); + frame->return_offset = 0; DISPATCH_INLINED(new_frame); - #line 3583 "Python/generated_cases.c.h" + #line 3741 "Python/generated_cases.c.h" } TARGET(CALL_NO_KW_TYPE_1) { @@ -3587,9 +3745,8 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *null = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2539 "Python/bytecodes.c" + #line 2696 "Python/bytecodes.c" assert(kwnames == NULL); - assert(cframe.use_tracing == 0); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); PyObject *obj = args[0]; @@ -3598,11 +3755,11 @@ res = Py_NewRef(Py_TYPE(obj)); Py_DECREF(obj); Py_DECREF(&PyType_Type); // I.e., callable - #line 3602 "Python/generated_cases.c.h" + #line 3759 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 3; DISPATCH(); } @@ -3611,9 +3768,8 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *null = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2552 "Python/bytecodes.c" + #line 2708 "Python/bytecodes.c" assert(kwnames == NULL); - assert(cframe.use_tracing == 0); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); DEOPT_IF(callable != (PyObject *)&PyUnicode_Type, CALL); @@ -3623,11 +3779,11 @@ Py_DECREF(arg); Py_DECREF(&PyUnicode_Type); // I.e., callable if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3627 "Python/generated_cases.c.h" + #line 3783 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 3; CHECK_EVAL_BREAKER(); DISPATCH(); } @@ -3637,7 +3793,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *null = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2567 "Python/bytecodes.c" + #line 2722 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -3648,11 +3804,11 @@ Py_DECREF(arg); Py_DECREF(&PyTuple_Type); // I.e., tuple if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3652 "Python/generated_cases.c.h" + #line 3808 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 3; CHECK_EVAL_BREAKER(); DISPATCH(); } @@ -3662,7 +3818,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2581 "Python/bytecodes.c" + #line 2736 "Python/bytecodes.c" int is_meth = method != NULL; int total_args = oparg; if (is_meth) { @@ -3684,11 +3840,11 @@ } Py_DECREF(tp); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3688 "Python/generated_cases.c.h" + #line 3844 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 3; CHECK_EVAL_BREAKER(); DISPATCH(); } @@ -3698,8 +3854,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2606 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 2761 "Python/bytecodes.c" /* Builtin METH_O functions */ assert(kwnames == NULL); int is_meth = method != NULL; @@ -3727,11 +3882,11 @@ Py_DECREF(arg); Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3731 "Python/generated_cases.c.h" + #line 3886 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 3; CHECK_EVAL_BREAKER(); DISPATCH(); } @@ -3741,8 +3896,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2638 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 2792 "Python/bytecodes.c" /* Builtin METH_FASTCALL functions, without keywords */ assert(kwnames == NULL); int is_meth = method != NULL; @@ -3774,11 +3928,11 @@ 'invalid'). In those cases an exception is set, so we must handle it. */ - #line 3778 "Python/generated_cases.c.h" + #line 3932 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 3; CHECK_EVAL_BREAKER(); DISPATCH(); } @@ -3788,8 +3942,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2674 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 2827 "Python/bytecodes.c" /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ int is_meth = method != NULL; int total_args = oparg; @@ -3821,11 +3974,11 @@ } Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3825 "Python/generated_cases.c.h" + #line 3978 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 3; CHECK_EVAL_BREAKER(); DISPATCH(); } @@ -3835,8 +3988,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2710 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 2862 "Python/bytecodes.c" assert(kwnames == NULL); /* len(o) */ int is_meth = method != NULL; @@ -3861,11 +4013,11 @@ Py_DECREF(callable); Py_DECREF(arg); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3865 "Python/generated_cases.c.h" + #line 4017 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 3; DISPATCH(); } @@ -3874,8 +4026,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2738 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 2889 "Python/bytecodes.c" assert(kwnames == NULL); /* isinstance(o, o2) */ int is_meth = method != NULL; @@ -3902,11 +4053,11 @@ Py_DECREF(cls); Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3906 "Python/generated_cases.c.h" + #line 4057 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 3; DISPATCH(); } @@ -3914,8 +4065,7 @@ PyObject **args = (stack_pointer - oparg); PyObject *self = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; - #line 2769 "Python/bytecodes.c" - assert(cframe.use_tracing == 0); + #line 2919 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg == 1); assert(method != NULL); @@ -3933,14 +4083,14 @@ JUMPBY(INLINE_CACHE_ENTRIES_CALL + 1); assert(next_instr[-1].op.code == POP_TOP); DISPATCH(); - #line 3937 "Python/generated_cases.c.h" + #line 4087 "Python/generated_cases.c.h" } TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_O) { PyObject **args = (stack_pointer - oparg); PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2790 "Python/bytecodes.c" + #line 2939 "Python/bytecodes.c" assert(kwnames == NULL); int is_meth = method != NULL; int total_args = oparg; @@ -3971,11 +4121,11 @@ Py_DECREF(arg); Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3975 "Python/generated_cases.c.h" + #line 4125 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 3; CHECK_EVAL_BREAKER(); DISPATCH(); } @@ -3984,7 +4134,7 @@ PyObject **args = (stack_pointer - oparg); PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2824 "Python/bytecodes.c" + #line 2973 "Python/bytecodes.c" int is_meth = method != NULL; int total_args = oparg; if (is_meth) { @@ -4013,11 +4163,11 @@ } Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 4017 "Python/generated_cases.c.h" + #line 4167 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 3; CHECK_EVAL_BREAKER(); DISPATCH(); } @@ -4026,7 +4176,7 @@ PyObject **args = (stack_pointer - oparg); PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2856 "Python/bytecodes.c" + #line 3005 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg == 0 || oparg == 1); int is_meth = method != NULL; @@ -4055,11 +4205,11 @@ Py_DECREF(self); Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 4059 "Python/generated_cases.c.h" + #line 4209 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 3; CHECK_EVAL_BREAKER(); DISPATCH(); } @@ -4068,7 +4218,7 @@ PyObject **args = (stack_pointer - oparg); PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2888 "Python/bytecodes.c" + #line 3037 "Python/bytecodes.c" assert(kwnames == NULL); int is_meth = method != NULL; int total_args = oparg; @@ -4096,27 +4246,31 @@ } Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 4100 "Python/generated_cases.c.h" + #line 4250 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; - next_instr += 4; + next_instr += 3; CHECK_EVAL_BREAKER(); DISPATCH(); } + TARGET(INSTRUMENTED_CALL_FUNCTION_EX) { + #line 3068 "Python/bytecodes.c" + GO_TO_INSTRUCTION(CALL_FUNCTION_EX); + #line 4262 "Python/generated_cases.c.h" + } + TARGET(CALL_FUNCTION_EX) { PREDICTED(CALL_FUNCTION_EX); PyObject *kwargs = (oparg & 1) ? stack_pointer[-(((oparg & 1) ? 1 : 0))] : NULL; PyObject *callargs = stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))]; PyObject *func = stack_pointer[-(2 + ((oparg & 1) ? 1 : 0))]; PyObject *result; - #line 2919 "Python/bytecodes.c" - if (oparg & 1) { - // DICT_MERGE is called before this opcode if there are kwargs. - // It converts all dict subtypes in kwargs into regular dicts. - assert(PyDict_CheckExact(kwargs)); - } + #line 3072 "Python/bytecodes.c" + // DICT_MERGE is called before this opcode if there are kwargs. + // It converts all dict subtypes in kwargs into regular dicts. + assert(kwargs == NULL || PyDict_CheckExact(kwargs)); if (!PyTuple_CheckExact(callargs)) { if (check_args_iterable(tstate, func, callargs) < 0) { goto error; @@ -4128,17 +4282,61 @@ Py_SETREF(callargs, tuple); } assert(PyTuple_CheckExact(callargs)); - - result = do_call_core(tstate, func, callargs, kwargs, cframe.use_tracing); - #line 4134 "Python/generated_cases.c.h" + EVAL_CALL_STAT_INC_IF_FUNCTION(EVAL_CALL_FUNCTION_EX, func); + if (opcode == INSTRUMENTED_CALL_FUNCTION_EX && + !PyFunction_Check(func) && !PyMethod_Check(func) + ) { + PyObject *arg = PyTuple_GET_SIZE(callargs) > 0 ? + PyTuple_GET_ITEM(callargs, 0) : Py_None; + int err = _Py_call_instrumentation_2args( + tstate, PY_MONITORING_EVENT_CALL, + frame, next_instr-1, func, arg); + if (err) goto error; + result = PyObject_Call(func, callargs, kwargs); + if (result == NULL) { + _Py_call_instrumentation_exc2( + tstate, PY_MONITORING_EVENT_C_RAISE, + frame, next_instr-1, func, arg); + } + else { + int err = _Py_call_instrumentation_2args( + tstate, PY_MONITORING_EVENT_C_RETURN, + frame, next_instr-1, func, arg); + if (err < 0) { + Py_CLEAR(result); + } + } + } + else { + if (Py_TYPE(func) == &PyFunction_Type && + tstate->interp->eval_frame == NULL && + ((PyFunctionObject *)func)->vectorcall == _PyFunction_Vectorcall) { + assert(PyTuple_CheckExact(callargs)); + Py_ssize_t nargs = PyTuple_GET_SIZE(callargs); + int code_flags = ((PyCodeObject *)PyFunction_GET_CODE(func))->co_flags; + PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(func)); + + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_Ex(tstate, + (PyFunctionObject *)func, locals, + nargs, callargs, kwargs); + // Need to manually shrink the stack since we exit with DISPATCH_INLINED. + STACK_SHRINK(oparg + 3); + if (new_frame == NULL) { + goto error; + } + frame->return_offset = 0; + DISPATCH_INLINED(new_frame); + } + result = PyObject_Call(func, callargs, kwargs); + } + #line 4333 "Python/generated_cases.c.h" Py_DECREF(func); Py_DECREF(callargs); Py_XDECREF(kwargs); - #line 2938 "Python/bytecodes.c" - + #line 3134 "Python/bytecodes.c" assert(PEEK(3 + (oparg & 1)) == NULL); if (result == NULL) { STACK_SHRINK(((oparg & 1) ? 1 : 0)); goto pop_3_error; } - #line 4142 "Python/generated_cases.c.h" + #line 4340 "Python/generated_cases.c.h" STACK_SHRINK(((oparg & 1) ? 1 : 0)); STACK_SHRINK(2); stack_pointer[-1] = result; @@ -4153,7 +4351,7 @@ PyObject *kwdefaults = (oparg & 0x02) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0))] : NULL; PyObject *defaults = (oparg & 0x01) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0) + ((oparg & 0x01) ? 1 : 0))] : NULL; PyObject *func; - #line 2949 "Python/bytecodes.c" + #line 3144 "Python/bytecodes.c" PyFunctionObject *func_obj = (PyFunctionObject *) PyFunction_New(codeobj, GLOBALS()); @@ -4182,14 +4380,14 @@ func_obj->func_version = ((PyCodeObject *)codeobj)->co_version; func = (PyObject *)func_obj; - #line 4186 "Python/generated_cases.c.h" + #line 4384 "Python/generated_cases.c.h" STACK_SHRINK(((oparg & 0x01) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x08) ? 1 : 0)); stack_pointer[-1] = func; DISPATCH(); } TARGET(RETURN_GENERATOR) { - #line 2980 "Python/bytecodes.c" + #line 3175 "Python/bytecodes.c" assert(PyFunction_Check(frame->f_funcobj)); PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); @@ -4210,7 +4408,7 @@ frame = cframe.current_frame = prev; _PyFrame_StackPush(frame, (PyObject *)gen); goto resume_frame; - #line 4214 "Python/generated_cases.c.h" + #line 4412 "Python/generated_cases.c.h" } TARGET(BUILD_SLICE) { @@ -4218,15 +4416,15 @@ PyObject *stop = stack_pointer[-(1 + ((oparg == 3) ? 1 : 0))]; PyObject *start = stack_pointer[-(2 + ((oparg == 3) ? 1 : 0))]; PyObject *slice; - #line 3003 "Python/bytecodes.c" + #line 3198 "Python/bytecodes.c" slice = PySlice_New(start, stop, step); - #line 4224 "Python/generated_cases.c.h" + #line 4422 "Python/generated_cases.c.h" Py_DECREF(start); Py_DECREF(stop); Py_XDECREF(step); - #line 3005 "Python/bytecodes.c" + #line 3200 "Python/bytecodes.c" if (slice == NULL) { STACK_SHRINK(((oparg == 3) ? 1 : 0)); goto pop_2_error; } - #line 4230 "Python/generated_cases.c.h" + #line 4428 "Python/generated_cases.c.h" STACK_SHRINK(((oparg == 3) ? 1 : 0)); STACK_SHRINK(1); stack_pointer[-1] = slice; @@ -4237,7 +4435,7 @@ PyObject *fmt_spec = ((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? stack_pointer[-((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0))] : NULL; PyObject *value = stack_pointer[-(1 + (((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0))]; PyObject *result; - #line 3009 "Python/bytecodes.c" + #line 3204 "Python/bytecodes.c" /* Handles f-string value formatting. */ PyObject *(*conv_fn)(PyObject *); int which_conversion = oparg & FVC_MASK; @@ -4268,24 +4466,11 @@ value = result; } - /* If value is a unicode object, and there's no fmt_spec, - then we know the result of format(value) is value - itself. In that case, skip calling format(). I plan to - move this optimization in to PyObject_Format() - itself. */ - if (PyUnicode_CheckExact(value) && fmt_spec == NULL) { - /* Do nothing, just transfer ownership to result. */ - result = value; - } else { - /* Actually call format(). */ - result = PyObject_Format(value, fmt_spec); - #line 4283 "Python/generated_cases.c.h" - Py_DECREF(value); - Py_XDECREF(fmt_spec); - #line 3051 "Python/bytecodes.c" - if (result == NULL) { STACK_SHRINK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0)); goto pop_1_error; } - } - #line 4289 "Python/generated_cases.c.h" + result = PyObject_Format(value, fmt_spec); + Py_DECREF(value); + Py_XDECREF(fmt_spec); + if (result == NULL) { STACK_SHRINK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0)); goto pop_1_error; } + #line 4474 "Python/generated_cases.c.h" STACK_SHRINK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0)); stack_pointer[-1] = result; DISPATCH(); @@ -4294,10 +4479,10 @@ TARGET(COPY) { PyObject *bottom = stack_pointer[-(1 + (oparg-1))]; PyObject *top; - #line 3056 "Python/bytecodes.c" + #line 3241 "Python/bytecodes.c" assert(oparg > 0); top = Py_NewRef(bottom); - #line 4301 "Python/generated_cases.c.h" + #line 4486 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = top; DISPATCH(); @@ -4309,11 +4494,10 @@ PyObject *rhs = stack_pointer[-1]; PyObject *lhs = stack_pointer[-2]; PyObject *res; - #line 3061 "Python/bytecodes.c" + #line 3246 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { - assert(cframe.use_tracing == 0); next_instr--; _Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, &GETLOCAL(0)); DISPATCH_SAME_OPARG(); @@ -4325,12 +4509,12 @@ assert((unsigned)oparg < Py_ARRAY_LENGTH(binary_ops)); assert(binary_ops[oparg]); res = binary_ops[oparg](lhs, rhs); - #line 4329 "Python/generated_cases.c.h" + #line 4513 "Python/generated_cases.c.h" Py_DECREF(lhs); Py_DECREF(rhs); - #line 3077 "Python/bytecodes.c" + #line 3261 "Python/bytecodes.c" if (res == NULL) goto pop_2_error; - #line 4334 "Python/generated_cases.c.h" + #line 4518 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -4340,27 +4524,153 @@ TARGET(SWAP) { PyObject *top = stack_pointer[-1]; PyObject *bottom = stack_pointer[-(2 + (oparg-2))]; - #line 3082 "Python/bytecodes.c" + #line 3266 "Python/bytecodes.c" assert(oparg >= 2); - #line 4346 "Python/generated_cases.c.h" + #line 4530 "Python/generated_cases.c.h" stack_pointer[-1] = bottom; stack_pointer[-(2 + (oparg-2))] = top; DISPATCH(); } + TARGET(INSTRUMENTED_LINE) { + #line 3270 "Python/bytecodes.c" + _Py_CODEUNIT *here = next_instr-1; + _PyFrame_SetStackPointer(frame, stack_pointer); + int original_opcode = _Py_call_instrumentation_line( + tstate, frame, here); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (original_opcode < 0) { + next_instr = here+1; + goto error; + } + next_instr = frame->prev_instr; + if (next_instr != here) { + DISPATCH(); + } + if (_PyOpcode_Caches[original_opcode]) { + _PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1); + INCREMENT_ADAPTIVE_COUNTER(cache->counter); + } + opcode = original_opcode; + DISPATCH_GOTO(); + #line 4557 "Python/generated_cases.c.h" + } + + TARGET(INSTRUMENTED_INSTRUCTION) { + #line 3292 "Python/bytecodes.c" + int next_opcode = _Py_call_instrumentation_instruction( + tstate, frame, next_instr-1); + if (next_opcode < 0) goto error; + next_instr--; + if (_PyOpcode_Caches[next_opcode]) { + _PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1); + INCREMENT_ADAPTIVE_COUNTER(cache->counter); + } + assert(next_opcode > 0 && next_opcode < 256); + opcode = next_opcode; + DISPATCH_GOTO(); + #line 4573 "Python/generated_cases.c.h" + } + + TARGET(INSTRUMENTED_JUMP_FORWARD) { + #line 3306 "Python/bytecodes.c" + INSTRUMENTED_JUMP(next_instr-1, next_instr+oparg, PY_MONITORING_EVENT_JUMP); + #line 4579 "Python/generated_cases.c.h" + DISPATCH(); + } + + TARGET(INSTRUMENTED_JUMP_BACKWARD) { + #line 3310 "Python/bytecodes.c" + INSTRUMENTED_JUMP(next_instr-1, next_instr-oparg, PY_MONITORING_EVENT_JUMP); + #line 4586 "Python/generated_cases.c.h" + CHECK_EVAL_BREAKER(); + DISPATCH(); + } + + TARGET(INSTRUMENTED_POP_JUMP_IF_TRUE) { + #line 3315 "Python/bytecodes.c" + PyObject *cond = POP(); + int err = PyObject_IsTrue(cond); + Py_DECREF(cond); + if (err < 0) goto error; + _Py_CODEUNIT *here = next_instr-1; + assert(err == 0 || err == 1); + int offset = err*oparg; + INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); + #line 4601 "Python/generated_cases.c.h" + DISPATCH(); + } + + TARGET(INSTRUMENTED_POP_JUMP_IF_FALSE) { + #line 3326 "Python/bytecodes.c" + PyObject *cond = POP(); + int err = PyObject_IsTrue(cond); + Py_DECREF(cond); + if (err < 0) goto error; + _Py_CODEUNIT *here = next_instr-1; + assert(err == 0 || err == 1); + int offset = (1-err)*oparg; + INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); + #line 4615 "Python/generated_cases.c.h" + DISPATCH(); + } + + TARGET(INSTRUMENTED_POP_JUMP_IF_NONE) { + #line 3337 "Python/bytecodes.c" + PyObject *value = POP(); + _Py_CODEUNIT *here = next_instr-1; + int offset; + if (Py_IsNone(value)) { + _Py_DECREF_NO_DEALLOC(value); + offset = oparg; + } + else { + Py_DECREF(value); + offset = 0; + } + INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); + #line 4633 "Python/generated_cases.c.h" + DISPATCH(); + } + + TARGET(INSTRUMENTED_POP_JUMP_IF_NOT_NONE) { + #line 3352 "Python/bytecodes.c" + PyObject *value = POP(); + _Py_CODEUNIT *here = next_instr-1; + int offset; + if (Py_IsNone(value)) { + _Py_DECREF_NO_DEALLOC(value); + offset = 0; + } + else { + Py_DECREF(value); + offset = oparg; + } + INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); + #line 4651 "Python/generated_cases.c.h" + DISPATCH(); + } + TARGET(EXTENDED_ARG) { - #line 3086 "Python/bytecodes.c" + #line 3367 "Python/bytecodes.c" assert(oparg); - assert(cframe.use_tracing == 0); opcode = next_instr->op.code; oparg = oparg << 8 | next_instr->op.arg; PRE_DISPATCH_GOTO(); DISPATCH_GOTO(); - #line 4360 "Python/generated_cases.c.h" + #line 4662 "Python/generated_cases.c.h" } TARGET(CACHE) { - #line 3095 "Python/bytecodes.c" + #line 3375 "Python/bytecodes.c" + assert(0 && "Executing a cache."); + Py_UNREACHABLE(); + #line 4669 "Python/generated_cases.c.h" + } + + TARGET(RESERVED) { + #line 3380 "Python/bytecodes.c" + assert(0 && "Executing RESERVED instruction."); Py_UNREACHABLE(); - #line 4366 "Python/generated_cases.c.h" + #line 4676 "Python/generated_cases.c.h" } diff --git a/Python/import.c b/Python/import.c index 9f80c6d8dd49a8..0bf107b28d3990 100644 --- a/Python/import.c +++ b/Python/import.c @@ -389,8 +389,7 @@ PyImport_AddModule(const char *name) static void remove_module(PyThreadState *tstate, PyObject *name) { - PyObject *type, *value, *traceback; - _PyErr_Fetch(tstate, &type, &value, &traceback); + PyObject *exc = _PyErr_GetRaisedException(tstate); PyObject *modules = MODULES(tstate->interp); if (PyDict_CheckExact(modules)) { @@ -403,7 +402,7 @@ remove_module(PyThreadState *tstate, PyObject *name) } } - _PyErr_ChainExceptions(type, value, traceback); + _PyErr_ChainExceptions1(exc); } @@ -414,8 +413,11 @@ remove_module(PyThreadState *tstate, PyObject *name) Py_ssize_t _PyImport_GetNextModuleIndex(void) { + PyThread_acquire_lock(EXTENSIONS.mutex, WAIT_LOCK); LAST_MODULE_INDEX++; - return LAST_MODULE_INDEX; + Py_ssize_t index = LAST_MODULE_INDEX; + PyThread_release_lock(EXTENSIONS.mutex); + return index; } static const char * @@ -592,11 +594,11 @@ _PyImport_ClearModulesByIndex(PyInterpreterState *interp) /* It may help to have a big picture view of what happens when an extension is loaded. This includes when it is imported - for the first time or via imp.load_dynamic(). + for the first time. - Here's a summary, using imp.load_dynamic() as the starting point: + Here's a summary, using importlib._boostrap._load() as a starting point. - 1. imp.load_dynamic() -> importlib._bootstrap._load() + 1. importlib._bootstrap._load() 2. _load(): acquire import lock 3. _load() -> importlib._bootstrap._load_unlocked() 4. _load_unlocked() -> importlib._bootstrap.module_from_spec() @@ -704,6 +706,7 @@ _PyImport_ClearModulesByIndex(PyInterpreterState *interp) const char * _PyImport_ResolveNameWithPackageContext(const char *name) { + PyThread_acquire_lock(EXTENSIONS.mutex, WAIT_LOCK); if (PKGCONTEXT != NULL) { const char *p = strrchr(PKGCONTEXT, '.'); if (p != NULL && strcmp(name, p+1) == 0) { @@ -711,14 +714,17 @@ _PyImport_ResolveNameWithPackageContext(const char *name) PKGCONTEXT = NULL; } } + PyThread_release_lock(EXTENSIONS.mutex); return name; } const char * _PyImport_SwapPackageContext(const char *newcontext) { + PyThread_acquire_lock(EXTENSIONS.mutex, WAIT_LOCK); const char *oldcontext = PKGCONTEXT; PKGCONTEXT = newcontext; + PyThread_release_lock(EXTENSIONS.mutex); return oldcontext; } @@ -863,6 +869,18 @@ Generally, when multiple interpreters are involved, some of the above gets even messier. */ +static inline void +extensions_lock_acquire(void) +{ + PyThread_acquire_lock(_PyRuntime.imports.extensions.mutex, WAIT_LOCK); +} + +static inline void +extensions_lock_release(void) +{ + PyThread_release_lock(_PyRuntime.imports.extensions.mutex); +} + /* Magic for extension modules (built-in as well as dynamically loaded). To prevent initializing an extension module more than once, we keep a static dictionary 'extensions' keyed by the tuple @@ -879,71 +897,170 @@ gets even messier. dictionary, to avoid loading shared libraries twice. */ +static void +_extensions_cache_init(void) +{ + /* The runtime (i.e. main interpreter) must be initializing, + so we don't need to worry about the lock. */ + _PyThreadState_InitDetached(&EXTENSIONS.main_tstate, + _PyInterpreterState_Main()); +} + static PyModuleDef * _extensions_cache_get(PyObject *filename, PyObject *name) { - PyObject *extensions = EXTENSIONS; - if (extensions == NULL) { - return NULL; - } + PyModuleDef *def = NULL; + extensions_lock_acquire(); + PyObject *key = PyTuple_Pack(2, filename, name); if (key == NULL) { - return NULL; + goto finally; + } + + PyObject *extensions = EXTENSIONS.dict; + if (extensions == NULL) { + goto finally; } - PyModuleDef *def = (PyModuleDef *)PyDict_GetItemWithError(extensions, key); - Py_DECREF(key); + def = (PyModuleDef *)PyDict_GetItemWithError(extensions, key); + +finally: + Py_XDECREF(key); + extensions_lock_release(); return def; } static int _extensions_cache_set(PyObject *filename, PyObject *name, PyModuleDef *def) { - PyObject *extensions = EXTENSIONS; + int res = -1; + PyThreadState *oldts = NULL; + extensions_lock_acquire(); + + /* Swap to the main interpreter, if necessary. This matters if + the dict hasn't been created yet or if the item isn't in the + dict yet. In both cases we must ensure the relevant objects + are created using the main interpreter. */ + PyThreadState *main_tstate = &EXTENSIONS.main_tstate; + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (!_Py_IsMainInterpreter(interp)) { + _PyThreadState_BindDetached(main_tstate); + oldts = _PyThreadState_Swap(interp->runtime, main_tstate); + assert(!_Py_IsMainInterpreter(oldts->interp)); + + /* Make sure the name and filename objects are owned + by the main interpreter. */ + name = PyUnicode_InternFromString(PyUnicode_AsUTF8(name)); + assert(name != NULL); + filename = PyUnicode_InternFromString(PyUnicode_AsUTF8(filename)); + assert(filename != NULL); + } + + PyObject *key = PyTuple_Pack(2, filename, name); + if (key == NULL) { + goto finally; + } + + PyObject *extensions = EXTENSIONS.dict; if (extensions == NULL) { extensions = PyDict_New(); if (extensions == NULL) { - return -1; + goto finally; } - EXTENSIONS = extensions; + EXTENSIONS.dict = extensions; } - PyObject *key = PyTuple_Pack(2, filename, name); - if (key == NULL) { - return -1; + + PyModuleDef *actual = (PyModuleDef *)PyDict_GetItemWithError(extensions, key); + if (PyErr_Occurred()) { + goto finally; + } + else if (actual != NULL) { + /* We expect it to be static, so it must be the same pointer. */ + assert(def == actual); + res = 0; + goto finally; } - int res = PyDict_SetItem(extensions, key, (PyObject *)def); - Py_DECREF(key); + + /* This might trigger a resize, which is why we must switch + to the main interpreter. */ + res = PyDict_SetItem(extensions, key, (PyObject *)def); if (res < 0) { - return -1; + res = -1; + goto finally; } - return 0; + res = 0; + +finally: + Py_XDECREF(key); + if (oldts != NULL) { + _PyThreadState_Swap(interp->runtime, oldts); + _PyThreadState_UnbindDetached(main_tstate); + Py_DECREF(name); + Py_DECREF(filename); + } + extensions_lock_release(); + return res; } static int _extensions_cache_delete(PyObject *filename, PyObject *name) { - PyObject *extensions = EXTENSIONS; - if (extensions == NULL) { - return 0; - } + int res = -1; + PyThreadState *oldts = NULL; + extensions_lock_acquire(); + PyObject *key = PyTuple_Pack(2, filename, name); if (key == NULL) { - return -1; + goto finally; + } + + PyObject *extensions = EXTENSIONS.dict; + if (extensions == NULL) { + res = 0; + goto finally; + } + + PyModuleDef *actual = (PyModuleDef *)PyDict_GetItemWithError(extensions, key); + if (PyErr_Occurred()) { + goto finally; + } + else if (actual == NULL) { + /* It was already removed or never added. */ + res = 0; + goto finally; + } + + /* Swap to the main interpreter, if necessary. */ + PyThreadState *main_tstate = &EXTENSIONS.main_tstate; + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (!_Py_IsMainInterpreter(interp)) { + _PyThreadState_BindDetached(main_tstate); + oldts = _PyThreadState_Swap(interp->runtime, main_tstate); + assert(!_Py_IsMainInterpreter(oldts->interp)); } + if (PyDict_DelItem(extensions, key) < 0) { - if (!PyErr_ExceptionMatches(PyExc_KeyError)) { - Py_DECREF(key); - return -1; - } - PyErr_Clear(); + goto finally; } - Py_DECREF(key); - return 0; + res = 0; + +finally: + if (oldts != NULL) { + _PyThreadState_Swap(interp->runtime, oldts); + _PyThreadState_UnbindDetached(main_tstate); + } + Py_XDECREF(key); + extensions_lock_release(); + return res; } static void _extensions_cache_clear_all(void) { - Py_CLEAR(EXTENSIONS); + /* The runtime (i.e. main interpreter) must be finalizing, + so we don't need to worry about the lock. */ + // XXX assert(_Py_IsMainInterpreter(_PyInterpreterState_GET())); + Py_CLEAR(EXTENSIONS.dict); + _PyThreadState_ClearDetached(&EXTENSIONS.main_tstate); } @@ -1000,7 +1117,17 @@ get_core_module_dict(PyInterpreterState *interp, static inline int is_core_module(PyInterpreterState *interp, PyObject *name, PyObject *filename) { - return get_core_module_dict(interp, name, filename) != NULL; + /* This might be called before the core dict copies are in place, + so we can't rely on get_core_module_dict() here. */ + if (filename == name) { + if (PyUnicode_CompareWithASCIIString(name, "sys") == 0) { + return 1; + } + if (PyUnicode_CompareWithASCIIString(name, "builtins") == 0) { + return 1; + } + } + return 0; } static int @@ -1026,6 +1153,8 @@ fix_up_extension(PyObject *mod, PyObject *name, PyObject *filename) // when the extension module doesn't support sub-interpreters. if (def->m_size == -1) { if (!is_core_module(tstate->interp, name, filename)) { + assert(PyUnicode_CompareWithASCIIString(name, "sys") != 0); + assert(PyUnicode_CompareWithASCIIString(name, "builtins") != 0); if (def->m_base.m_copy) { /* Somebody already imported the module, likely under a different name. @@ -1899,9 +2028,9 @@ find_frozen(PyObject *nameobj, struct frozen_info *info) } static PyObject * -unmarshal_frozen_code(struct frozen_info *info) +unmarshal_frozen_code(PyInterpreterState *interp, struct frozen_info *info) { - if (info->get_code) { + if (info->get_code && _Py_IsMainInterpreter(interp)) { PyObject *code = info->get_code(); assert(code != NULL); return code; @@ -1948,7 +2077,7 @@ PyImport_ImportFrozenModuleObject(PyObject *name) set_frozen_error(status, name); return -1; } - co = unmarshal_frozen_code(&info); + co = unmarshal_frozen_code(tstate->interp, &info); if (co == NULL) { return -1; } @@ -2324,32 +2453,34 @@ remove_importlib_frames(PyThreadState *tstate) const char *remove_frames = "_call_with_frames_removed"; int always_trim = 0; int in_importlib = 0; - PyObject *exception, *value, *base_tb, *tb; PyObject **prev_link, **outer_link = NULL; + PyObject *base_tb = NULL; /* Synopsis: if it's an ImportError, we trim all importlib chunks from the traceback. We always trim chunks which end with a call to "_call_with_frames_removed". */ - _PyErr_Fetch(tstate, &exception, &value, &base_tb); - if (!exception || _PyInterpreterState_GetConfig(tstate->interp)->verbose) { + PyObject *exc = _PyErr_GetRaisedException(tstate); + if (exc == NULL || _PyInterpreterState_GetConfig(tstate->interp)->verbose) { goto done; } - if (PyType_IsSubtype((PyTypeObject *) exception, - (PyTypeObject *) PyExc_ImportError)) + if (PyType_IsSubtype(Py_TYPE(exc), (PyTypeObject *) PyExc_ImportError)) { always_trim = 1; + } + assert(PyExceptionInstance_Check(exc)); + base_tb = PyException_GetTraceback(exc); prev_link = &base_tb; - tb = base_tb; + PyObject *tb = base_tb; while (tb != NULL) { + assert(PyTraceBack_Check(tb)); PyTracebackObject *traceback = (PyTracebackObject *)tb; PyObject *next = (PyObject *) traceback->tb_next; PyFrameObject *frame = traceback->tb_frame; PyCodeObject *code = PyFrame_GetCode(frame); int now_in_importlib; - assert(PyTraceBack_Check(tb)); now_in_importlib = _PyUnicode_EqualToASCIIString(code->co_filename, importlib_filename) || _PyUnicode_EqualToASCIIString(code->co_filename, external_filename); if (now_in_importlib && !in_importlib) { @@ -2370,15 +2501,14 @@ remove_importlib_frames(PyThreadState *tstate) Py_DECREF(code); tb = next; } - assert(PyExceptionInstance_Check(value)); - assert((PyObject *)Py_TYPE(value) == exception); if (base_tb == NULL) { base_tb = Py_None; Py_INCREF(Py_None); } - PyException_SetTraceback(value, base_tb); + PyException_SetTraceback(exc, base_tb); done: - _PyErr_Restore(tstate, exception, value, base_tb); + Py_XDECREF(base_tb); + _PyErr_SetRaisedException(tstate, exc); } @@ -2941,6 +3071,10 @@ _PyImport_Fini2(void) PyStatus _PyImport_InitCore(PyThreadState *tstate, PyObject *sysmod, int importlib) { + if (_Py_IsMainInterpreter(tstate->interp)) { + _extensions_cache_init(); + } + // XXX Initialize here: interp->modules and interp->import_func. // XXX Initialize here: sys.modules and sys.meta_path. @@ -3401,7 +3535,8 @@ _imp_get_frozen_object_impl(PyObject *module, PyObject *name, return NULL; } - PyObject *codeobj = unmarshal_frozen_code(&info); + PyInterpreterState *interp = _PyInterpreterState_GET(); + PyObject *codeobj = unmarshal_frozen_code(interp, &info); if (dataobj != Py_None) { PyBuffer_Release(&buf); } @@ -3659,7 +3794,7 @@ _imp_source_hash_impl(PyObject *module, long key, Py_buffer *source) PyDoc_STRVAR(doc_imp, -"(Extremely) low-level import machinery bits as used by importlib and imp."); +"(Extremely) low-level import machinery bits as used by importlib."); static PyMethodDef imp_methods[] = { _IMP_EXTENSION_SUFFIXES_METHODDEF diff --git a/Python/importlib.h b/Python/importlib.h deleted file mode 100644 index 586f3b21f46246..00000000000000 --- a/Python/importlib.h +++ /dev/null @@ -1,1783 +0,0 @@ -/* Auto-generated by Programs/_freeze_importlib.c */ -const unsigned char _Py_M__importlib_bootstrap[] = { - 99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,4,0,0,0,64,0,0,0,115,194,1,0,0,100,0, - 90,0,100,1,97,1,100,2,100,3,132,0,90,2,100,4, - 100,5,132,0,90,3,105,0,90,4,105,0,90,5,71,0, - 100,6,100,7,132,0,100,7,101,6,131,3,90,7,71,0, - 100,8,100,9,132,0,100,9,131,2,90,8,71,0,100,10, - 100,11,132,0,100,11,131,2,90,9,71,0,100,12,100,13, - 132,0,100,13,131,2,90,10,100,14,100,15,132,0,90,11, - 100,16,100,17,132,0,90,12,100,18,100,19,132,0,90,13, - 100,20,100,21,156,1,100,22,100,23,132,2,90,14,100,24, - 100,25,132,0,90,15,100,26,100,27,132,0,90,16,100,28, - 100,29,132,0,90,17,100,30,100,31,132,0,90,18,71,0, - 100,32,100,33,132,0,100,33,131,2,90,19,100,1,100,1, - 100,34,156,2,100,35,100,36,132,2,90,20,100,94,100,37, - 100,38,132,1,90,21,100,39,100,40,156,1,100,41,100,42, - 132,2,90,22,100,43,100,44,132,0,90,23,100,45,100,46, - 132,0,90,24,100,47,100,48,132,0,90,25,100,49,100,50, - 132,0,90,26,100,51,100,52,132,0,90,27,100,53,100,54, - 132,0,90,28,71,0,100,55,100,56,132,0,100,56,131,2, - 90,29,71,0,100,57,100,58,132,0,100,58,131,2,90,30, - 71,0,100,59,100,60,132,0,100,60,131,2,90,31,100,61, - 100,62,132,0,90,32,100,63,100,64,132,0,90,33,100,95, - 100,65,100,66,132,1,90,34,100,67,100,68,132,0,90,35, - 100,69,90,36,101,36,100,70,23,0,90,37,100,71,100,72, - 132,0,90,38,101,39,131,0,90,40,100,73,100,74,132,0, - 90,41,100,96,100,76,100,77,132,1,90,42,100,39,100,78, - 156,1,100,79,100,80,132,2,90,43,100,81,100,82,132,0, - 90,44,100,97,100,84,100,85,132,1,90,45,100,86,100,87, - 132,0,90,46,100,88,100,89,132,0,90,47,100,90,100,91, - 132,0,90,48,100,92,100,93,132,0,90,49,100,1,83,0, - 41,98,97,83,1,0,0,67,111,114,101,32,105,109,112,108, - 101,109,101,110,116,97,116,105,111,110,32,111,102,32,105,109, - 112,111,114,116,46,10,10,84,104,105,115,32,109,111,100,117, - 108,101,32,105,115,32,78,79,84,32,109,101,97,110,116,32, - 116,111,32,98,101,32,100,105,114,101,99,116,108,121,32,105, - 109,112,111,114,116,101,100,33,32,73,116,32,104,97,115,32, - 98,101,101,110,32,100,101,115,105,103,110,101,100,32,115,117, - 99,104,10,116,104,97,116,32,105,116,32,99,97,110,32,98, - 101,32,98,111,111,116,115,116,114,97,112,112,101,100,32,105, - 110,116,111,32,80,121,116,104,111,110,32,97,115,32,116,104, - 101,32,105,109,112,108,101,109,101,110,116,97,116,105,111,110, - 32,111,102,32,105,109,112,111,114,116,46,32,65,115,10,115, - 117,99,104,32,105,116,32,114,101,113,117,105,114,101,115,32, - 116,104,101,32,105,110,106,101,99,116,105,111,110,32,111,102, - 32,115,112,101,99,105,102,105,99,32,109,111,100,117,108,101, - 115,32,97,110,100,32,97,116,116,114,105,98,117,116,101,115, - 32,105,110,32,111,114,100,101,114,32,116,111,10,119,111,114, - 107,46,32,79,110,101,32,115,104,111,117,108,100,32,117,115, - 101,32,105,109,112,111,114,116,108,105,98,32,97,115,32,116, - 104,101,32,112,117,98,108,105,99,45,102,97,99,105,110,103, - 32,118,101,114,115,105,111,110,32,111,102,32,116,104,105,115, - 32,109,111,100,117,108,101,46,10,10,78,99,2,0,0,0, - 0,0,0,0,0,0,0,0,3,0,0,0,7,0,0,0, - 67,0,0,0,115,56,0,0,0,100,1,68,0,93,32,125, - 2,116,0,124,1,124,2,131,2,114,4,116,1,124,0,124, - 2,116,2,124,1,124,2,131,2,131,3,1,0,113,4,124, - 0,106,3,160,4,124,1,106,3,161,1,1,0,100,2,83, - 0,41,3,122,47,83,105,109,112,108,101,32,115,117,98,115, - 116,105,116,117,116,101,32,102,111,114,32,102,117,110,99,116, - 111,111,108,115,46,117,112,100,97,116,101,95,119,114,97,112, - 112,101,114,46,41,4,218,10,95,95,109,111,100,117,108,101, - 95,95,218,8,95,95,110,97,109,101,95,95,218,12,95,95, - 113,117,97,108,110,97,109,101,95,95,218,7,95,95,100,111, - 99,95,95,78,41,5,218,7,104,97,115,97,116,116,114,218, - 7,115,101,116,97,116,116,114,218,7,103,101,116,97,116,116, - 114,218,8,95,95,100,105,99,116,95,95,218,6,117,112,100, - 97,116,101,41,3,90,3,110,101,119,90,3,111,108,100,218, - 7,114,101,112,108,97,99,101,169,0,114,10,0,0,0,250, - 29,60,102,114,111,122,101,110,32,105,109,112,111,114,116,108, - 105,98,46,95,98,111,111,116,115,116,114,97,112,62,218,5, - 95,119,114,97,112,27,0,0,0,115,8,0,0,0,0,2, - 8,1,10,1,20,1,114,12,0,0,0,99,1,0,0,0, - 0,0,0,0,0,0,0,0,1,0,0,0,2,0,0,0, - 67,0,0,0,115,12,0,0,0,116,0,116,1,131,1,124, - 0,131,1,83,0,169,1,78,41,2,218,4,116,121,112,101, - 218,3,115,121,115,169,1,218,4,110,97,109,101,114,10,0, - 0,0,114,10,0,0,0,114,11,0,0,0,218,11,95,110, - 101,119,95,109,111,100,117,108,101,35,0,0,0,115,2,0, - 0,0,0,1,114,18,0,0,0,99,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,1,0,0,0,64,0, - 0,0,115,12,0,0,0,101,0,90,1,100,0,90,2,100, - 1,83,0,41,2,218,14,95,68,101,97,100,108,111,99,107, - 69,114,114,111,114,78,41,3,114,1,0,0,0,114,0,0, - 0,0,114,2,0,0,0,114,10,0,0,0,114,10,0,0, - 0,114,10,0,0,0,114,11,0,0,0,114,19,0,0,0, - 48,0,0,0,115,2,0,0,0,8,1,114,19,0,0,0, - 99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,2,0,0,0,64,0,0,0,115,56,0,0,0,101,0, - 90,1,100,0,90,2,100,1,90,3,100,2,100,3,132,0, - 90,4,100,4,100,5,132,0,90,5,100,6,100,7,132,0, - 90,6,100,8,100,9,132,0,90,7,100,10,100,11,132,0, - 90,8,100,12,83,0,41,13,218,11,95,77,111,100,117,108, - 101,76,111,99,107,122,169,65,32,114,101,99,117,114,115,105, - 118,101,32,108,111,99,107,32,105,109,112,108,101,109,101,110, - 116,97,116,105,111,110,32,119,104,105,99,104,32,105,115,32, - 97,98,108,101,32,116,111,32,100,101,116,101,99,116,32,100, - 101,97,100,108,111,99,107,115,10,32,32,32,32,40,101,46, - 103,46,32,116,104,114,101,97,100,32,49,32,116,114,121,105, - 110,103,32,116,111,32,116,97,107,101,32,108,111,99,107,115, - 32,65,32,116,104,101,110,32,66,44,32,97,110,100,32,116, - 104,114,101,97,100,32,50,32,116,114,121,105,110,103,32,116, - 111,10,32,32,32,32,116,97,107,101,32,108,111,99,107,115, - 32,66,32,116,104,101,110,32,65,41,46,10,32,32,32,32, - 99,2,0,0,0,0,0,0,0,0,0,0,0,2,0,0, - 0,2,0,0,0,67,0,0,0,115,48,0,0,0,116,0, - 160,1,161,0,124,0,95,2,116,0,160,1,161,0,124,0, - 95,3,124,1,124,0,95,4,100,0,124,0,95,5,100,1, - 124,0,95,6,100,1,124,0,95,7,100,0,83,0,169,2, - 78,233,0,0,0,0,41,8,218,7,95,116,104,114,101,97, - 100,90,13,97,108,108,111,99,97,116,101,95,108,111,99,107, - 218,4,108,111,99,107,218,6,119,97,107,101,117,112,114,17, - 0,0,0,218,5,111,119,110,101,114,218,5,99,111,117,110, - 116,218,7,119,97,105,116,101,114,115,169,2,218,4,115,101, - 108,102,114,17,0,0,0,114,10,0,0,0,114,10,0,0, - 0,114,11,0,0,0,218,8,95,95,105,110,105,116,95,95, - 58,0,0,0,115,12,0,0,0,0,1,10,1,10,1,6, - 1,6,1,6,1,122,20,95,77,111,100,117,108,101,76,111, - 99,107,46,95,95,105,110,105,116,95,95,99,1,0,0,0, - 0,0,0,0,0,0,0,0,4,0,0,0,3,0,0,0, - 67,0,0,0,115,60,0,0,0,116,0,160,1,161,0,125, - 1,124,0,106,2,125,2,116,3,160,4,124,2,161,1,125, - 3,124,3,100,0,107,8,114,36,100,1,83,0,124,3,106, - 2,125,2,124,2,124,1,107,2,114,14,100,2,83,0,113, - 14,100,0,83,0,41,3,78,70,84,41,5,114,23,0,0, - 0,218,9,103,101,116,95,105,100,101,110,116,114,26,0,0, - 0,218,12,95,98,108,111,99,107,105,110,103,95,111,110,218, - 3,103,101,116,41,4,114,30,0,0,0,90,2,109,101,218, - 3,116,105,100,114,24,0,0,0,114,10,0,0,0,114,10, - 0,0,0,114,11,0,0,0,218,12,104,97,115,95,100,101, - 97,100,108,111,99,107,66,0,0,0,115,16,0,0,0,0, - 2,8,1,6,2,10,1,8,1,4,1,6,1,8,1,122, - 24,95,77,111,100,117,108,101,76,111,99,107,46,104,97,115, - 95,100,101,97,100,108,111,99,107,99,1,0,0,0,0,0, - 0,0,0,0,0,0,2,0,0,0,9,0,0,0,67,0, - 0,0,115,178,0,0,0,116,0,160,1,161,0,125,1,124, - 0,116,2,124,1,60,0,122,148,124,0,106,3,143,110,1, - 0,124,0,106,4,100,1,107,2,115,46,124,0,106,5,124, - 1,107,2,114,84,124,1,124,0,95,5,124,0,4,0,106, - 4,100,2,55,0,2,0,95,4,87,0,53,0,81,0,82, - 0,163,0,87,0,162,86,100,3,83,0,124,0,160,6,161, - 0,114,104,116,7,100,4,124,0,22,0,131,1,130,1,124, - 0,106,8,160,9,100,5,161,1,114,130,124,0,4,0,106, - 10,100,2,55,0,2,0,95,10,87,0,53,0,81,0,82, - 0,88,0,124,0,106,8,160,9,161,0,1,0,124,0,106, - 8,160,11,161,0,1,0,113,18,87,0,53,0,116,2,124, - 1,61,0,88,0,100,6,83,0,41,7,122,185,10,32,32, - 32,32,32,32,32,32,65,99,113,117,105,114,101,32,116,104, - 101,32,109,111,100,117,108,101,32,108,111,99,107,46,32,32, - 73,102,32,97,32,112,111,116,101,110,116,105,97,108,32,100, - 101,97,100,108,111,99,107,32,105,115,32,100,101,116,101,99, - 116,101,100,44,10,32,32,32,32,32,32,32,32,97,32,95, - 68,101,97,100,108,111,99,107,69,114,114,111,114,32,105,115, - 32,114,97,105,115,101,100,46,10,32,32,32,32,32,32,32, - 32,79,116,104,101,114,119,105,115,101,44,32,116,104,101,32, - 108,111,99,107,32,105,115,32,97,108,119,97,121,115,32,97, - 99,113,117,105,114,101,100,32,97,110,100,32,84,114,117,101, - 32,105,115,32,114,101,116,117,114,110,101,100,46,10,32,32, - 32,32,32,32,32,32,114,22,0,0,0,233,1,0,0,0, - 84,122,23,100,101,97,100,108,111,99,107,32,100,101,116,101, - 99,116,101,100,32,98,121,32,37,114,70,78,41,12,114,23, - 0,0,0,114,32,0,0,0,114,33,0,0,0,114,24,0, - 0,0,114,27,0,0,0,114,26,0,0,0,114,36,0,0, - 0,114,19,0,0,0,114,25,0,0,0,218,7,97,99,113, - 117,105,114,101,114,28,0,0,0,218,7,114,101,108,101,97, - 115,101,169,2,114,30,0,0,0,114,35,0,0,0,114,10, - 0,0,0,114,10,0,0,0,114,11,0,0,0,114,38,0, - 0,0,78,0,0,0,115,30,0,0,0,0,6,8,1,8, - 1,2,2,8,1,20,1,6,1,14,1,18,1,8,1,12, - 1,12,1,24,2,10,1,16,2,122,19,95,77,111,100,117, - 108,101,76,111,99,107,46,97,99,113,117,105,114,101,99,1, - 0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,9, - 0,0,0,67,0,0,0,115,122,0,0,0,116,0,160,1, - 161,0,125,1,124,0,106,2,143,98,1,0,124,0,106,3, - 124,1,107,3,114,34,116,4,100,1,131,1,130,1,124,0, - 106,5,100,2,107,4,115,48,74,0,130,1,124,0,4,0, - 106,5,100,3,56,0,2,0,95,5,124,0,106,5,100,2, - 107,2,114,108,100,0,124,0,95,3,124,0,106,6,114,108, - 124,0,4,0,106,6,100,3,56,0,2,0,95,6,124,0, - 106,7,160,8,161,0,1,0,87,0,53,0,81,0,82,0, - 88,0,100,0,83,0,41,4,78,250,31,99,97,110,110,111, - 116,32,114,101,108,101,97,115,101,32,117,110,45,97,99,113, - 117,105,114,101,100,32,108,111,99,107,114,22,0,0,0,114, - 37,0,0,0,41,9,114,23,0,0,0,114,32,0,0,0, - 114,24,0,0,0,114,26,0,0,0,218,12,82,117,110,116, - 105,109,101,69,114,114,111,114,114,27,0,0,0,114,28,0, - 0,0,114,25,0,0,0,114,39,0,0,0,114,40,0,0, - 0,114,10,0,0,0,114,10,0,0,0,114,11,0,0,0, - 114,39,0,0,0,103,0,0,0,115,22,0,0,0,0,1, - 8,1,8,1,10,1,8,1,14,1,14,1,10,1,6,1, - 6,1,14,1,122,19,95,77,111,100,117,108,101,76,111,99, - 107,46,114,101,108,101,97,115,101,99,1,0,0,0,0,0, - 0,0,0,0,0,0,1,0,0,0,5,0,0,0,67,0, - 0,0,115,22,0,0,0,100,1,124,0,106,0,155,2,100, - 2,116,1,124,0,131,1,155,0,157,4,83,0,41,3,78, - 122,12,95,77,111,100,117,108,101,76,111,99,107,40,250,5, - 41,32,97,116,32,169,2,114,17,0,0,0,218,2,105,100, - 169,1,114,30,0,0,0,114,10,0,0,0,114,10,0,0, - 0,114,11,0,0,0,218,8,95,95,114,101,112,114,95,95, - 116,0,0,0,115,2,0,0,0,0,1,122,20,95,77,111, - 100,117,108,101,76,111,99,107,46,95,95,114,101,112,114,95, - 95,78,41,9,114,1,0,0,0,114,0,0,0,0,114,2, - 0,0,0,114,3,0,0,0,114,31,0,0,0,114,36,0, - 0,0,114,38,0,0,0,114,39,0,0,0,114,47,0,0, - 0,114,10,0,0,0,114,10,0,0,0,114,10,0,0,0, - 114,11,0,0,0,114,20,0,0,0,52,0,0,0,115,12, - 0,0,0,8,1,4,5,8,8,8,12,8,25,8,13,114, - 20,0,0,0,99,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,2,0,0,0,64,0,0,0,115,48,0, - 0,0,101,0,90,1,100,0,90,2,100,1,90,3,100,2, - 100,3,132,0,90,4,100,4,100,5,132,0,90,5,100,6, - 100,7,132,0,90,6,100,8,100,9,132,0,90,7,100,10, - 83,0,41,11,218,16,95,68,117,109,109,121,77,111,100,117, - 108,101,76,111,99,107,122,86,65,32,115,105,109,112,108,101, - 32,95,77,111,100,117,108,101,76,111,99,107,32,101,113,117, - 105,118,97,108,101,110,116,32,102,111,114,32,80,121,116,104, - 111,110,32,98,117,105,108,100,115,32,119,105,116,104,111,117, - 116,10,32,32,32,32,109,117,108,116,105,45,116,104,114,101, - 97,100,105,110,103,32,115,117,112,112,111,114,116,46,99,2, - 0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,2, - 0,0,0,67,0,0,0,115,16,0,0,0,124,1,124,0, - 95,0,100,1,124,0,95,1,100,0,83,0,114,21,0,0, - 0,41,2,114,17,0,0,0,114,27,0,0,0,114,29,0, - 0,0,114,10,0,0,0,114,10,0,0,0,114,11,0,0, - 0,114,31,0,0,0,124,0,0,0,115,4,0,0,0,0, - 1,6,1,122,25,95,68,117,109,109,121,77,111,100,117,108, - 101,76,111,99,107,46,95,95,105,110,105,116,95,95,99,1, - 0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,3, - 0,0,0,67,0,0,0,115,18,0,0,0,124,0,4,0, - 106,0,100,1,55,0,2,0,95,0,100,2,83,0,41,3, - 78,114,37,0,0,0,84,41,1,114,27,0,0,0,114,46, - 0,0,0,114,10,0,0,0,114,10,0,0,0,114,11,0, - 0,0,114,38,0,0,0,128,0,0,0,115,4,0,0,0, - 0,1,14,1,122,24,95,68,117,109,109,121,77,111,100,117, - 108,101,76,111,99,107,46,97,99,113,117,105,114,101,99,1, - 0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,3, - 0,0,0,67,0,0,0,115,36,0,0,0,124,0,106,0, - 100,1,107,2,114,18,116,1,100,2,131,1,130,1,124,0, - 4,0,106,0,100,3,56,0,2,0,95,0,100,0,83,0, - 41,4,78,114,22,0,0,0,114,41,0,0,0,114,37,0, - 0,0,41,2,114,27,0,0,0,114,42,0,0,0,114,46, - 0,0,0,114,10,0,0,0,114,10,0,0,0,114,11,0, - 0,0,114,39,0,0,0,132,0,0,0,115,6,0,0,0, - 0,1,10,1,8,1,122,24,95,68,117,109,109,121,77,111, - 100,117,108,101,76,111,99,107,46,114,101,108,101,97,115,101, - 99,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0, - 0,5,0,0,0,67,0,0,0,115,22,0,0,0,100,1, - 124,0,106,0,155,2,100,2,116,1,124,0,131,1,155,0, - 157,4,83,0,41,3,78,122,17,95,68,117,109,109,121,77, - 111,100,117,108,101,76,111,99,107,40,114,43,0,0,0,114, - 44,0,0,0,114,46,0,0,0,114,10,0,0,0,114,10, - 0,0,0,114,11,0,0,0,114,47,0,0,0,137,0,0, - 0,115,2,0,0,0,0,1,122,25,95,68,117,109,109,121, - 77,111,100,117,108,101,76,111,99,107,46,95,95,114,101,112, - 114,95,95,78,41,8,114,1,0,0,0,114,0,0,0,0, - 114,2,0,0,0,114,3,0,0,0,114,31,0,0,0,114, - 38,0,0,0,114,39,0,0,0,114,47,0,0,0,114,10, - 0,0,0,114,10,0,0,0,114,10,0,0,0,114,11,0, - 0,0,114,48,0,0,0,120,0,0,0,115,10,0,0,0, - 8,1,4,3,8,4,8,4,8,5,114,48,0,0,0,99, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 2,0,0,0,64,0,0,0,115,36,0,0,0,101,0,90, - 1,100,0,90,2,100,1,100,2,132,0,90,3,100,3,100, - 4,132,0,90,4,100,5,100,6,132,0,90,5,100,7,83, - 0,41,8,218,18,95,77,111,100,117,108,101,76,111,99,107, - 77,97,110,97,103,101,114,99,2,0,0,0,0,0,0,0, - 0,0,0,0,2,0,0,0,2,0,0,0,67,0,0,0, - 115,16,0,0,0,124,1,124,0,95,0,100,0,124,0,95, - 1,100,0,83,0,114,13,0,0,0,41,2,218,5,95,110, - 97,109,101,218,5,95,108,111,99,107,114,29,0,0,0,114, - 10,0,0,0,114,10,0,0,0,114,11,0,0,0,114,31, - 0,0,0,143,0,0,0,115,4,0,0,0,0,1,6,1, - 122,27,95,77,111,100,117,108,101,76,111,99,107,77,97,110, - 97,103,101,114,46,95,95,105,110,105,116,95,95,99,1,0, - 0,0,0,0,0,0,0,0,0,0,1,0,0,0,2,0, - 0,0,67,0,0,0,115,26,0,0,0,116,0,124,0,106, - 1,131,1,124,0,95,2,124,0,106,2,160,3,161,0,1, - 0,100,0,83,0,114,13,0,0,0,41,4,218,16,95,103, - 101,116,95,109,111,100,117,108,101,95,108,111,99,107,114,50, - 0,0,0,114,51,0,0,0,114,38,0,0,0,114,46,0, - 0,0,114,10,0,0,0,114,10,0,0,0,114,11,0,0, - 0,218,9,95,95,101,110,116,101,114,95,95,147,0,0,0, - 115,4,0,0,0,0,1,12,1,122,28,95,77,111,100,117, - 108,101,76,111,99,107,77,97,110,97,103,101,114,46,95,95, - 101,110,116,101,114,95,95,99,1,0,0,0,0,0,0,0, - 0,0,0,0,3,0,0,0,2,0,0,0,79,0,0,0, - 115,14,0,0,0,124,0,106,0,160,1,161,0,1,0,100, - 0,83,0,114,13,0,0,0,41,2,114,51,0,0,0,114, - 39,0,0,0,41,3,114,30,0,0,0,218,4,97,114,103, - 115,90,6,107,119,97,114,103,115,114,10,0,0,0,114,10, - 0,0,0,114,11,0,0,0,218,8,95,95,101,120,105,116, - 95,95,151,0,0,0,115,2,0,0,0,0,1,122,27,95, - 77,111,100,117,108,101,76,111,99,107,77,97,110,97,103,101, - 114,46,95,95,101,120,105,116,95,95,78,41,6,114,1,0, - 0,0,114,0,0,0,0,114,2,0,0,0,114,31,0,0, - 0,114,53,0,0,0,114,55,0,0,0,114,10,0,0,0, - 114,10,0,0,0,114,10,0,0,0,114,11,0,0,0,114, - 49,0,0,0,141,0,0,0,115,6,0,0,0,8,2,8, - 4,8,4,114,49,0,0,0,99,1,0,0,0,0,0,0, - 0,0,0,0,0,3,0,0,0,8,0,0,0,67,0,0, - 0,115,130,0,0,0,116,0,160,1,161,0,1,0,122,106, - 122,14,116,3,124,0,25,0,131,0,125,1,87,0,110,24, - 4,0,116,4,107,10,114,48,1,0,1,0,1,0,100,1, - 125,1,89,0,110,2,88,0,124,1,100,1,107,8,114,112, - 116,5,100,1,107,8,114,76,116,6,124,0,131,1,125,1, - 110,8,116,7,124,0,131,1,125,1,124,0,102,1,100,2, - 100,3,132,1,125,2,116,8,160,9,124,1,124,2,161,2, - 116,3,124,0,60,0,87,0,53,0,116,0,160,2,161,0, - 1,0,88,0,124,1,83,0,41,4,122,139,71,101,116,32, - 111,114,32,99,114,101,97,116,101,32,116,104,101,32,109,111, - 100,117,108,101,32,108,111,99,107,32,102,111,114,32,97,32, - 103,105,118,101,110,32,109,111,100,117,108,101,32,110,97,109, - 101,46,10,10,32,32,32,32,65,99,113,117,105,114,101,47, - 114,101,108,101,97,115,101,32,105,110,116,101,114,110,97,108, - 108,121,32,116,104,101,32,103,108,111,98,97,108,32,105,109, - 112,111,114,116,32,108,111,99,107,32,116,111,32,112,114,111, - 116,101,99,116,10,32,32,32,32,95,109,111,100,117,108,101, - 95,108,111,99,107,115,46,78,99,2,0,0,0,0,0,0, - 0,0,0,0,0,2,0,0,0,8,0,0,0,83,0,0, - 0,115,48,0,0,0,116,0,160,1,161,0,1,0,122,24, - 116,3,160,4,124,1,161,1,124,0,107,8,114,30,116,3, - 124,1,61,0,87,0,53,0,116,0,160,2,161,0,1,0, - 88,0,100,0,83,0,114,13,0,0,0,41,5,218,4,95, - 105,109,112,218,12,97,99,113,117,105,114,101,95,108,111,99, - 107,218,12,114,101,108,101,97,115,101,95,108,111,99,107,218, - 13,95,109,111,100,117,108,101,95,108,111,99,107,115,114,34, - 0,0,0,41,2,218,3,114,101,102,114,17,0,0,0,114, - 10,0,0,0,114,10,0,0,0,114,11,0,0,0,218,2, - 99,98,176,0,0,0,115,10,0,0,0,0,1,8,1,2, - 4,14,1,10,2,122,28,95,103,101,116,95,109,111,100,117, - 108,101,95,108,111,99,107,46,60,108,111,99,97,108,115,62, - 46,99,98,41,10,114,56,0,0,0,114,57,0,0,0,114, - 58,0,0,0,114,59,0,0,0,218,8,75,101,121,69,114, - 114,111,114,114,23,0,0,0,114,48,0,0,0,114,20,0, - 0,0,218,8,95,119,101,97,107,114,101,102,114,60,0,0, - 0,41,3,114,17,0,0,0,114,24,0,0,0,114,61,0, - 0,0,114,10,0,0,0,114,10,0,0,0,114,11,0,0, - 0,114,52,0,0,0,157,0,0,0,115,28,0,0,0,0, - 6,8,1,2,1,2,1,14,1,14,1,10,2,8,1,8, - 1,10,2,8,2,12,11,20,2,10,2,114,52,0,0,0, - 99,1,0,0,0,0,0,0,0,0,0,0,0,2,0,0, - 0,8,0,0,0,67,0,0,0,115,54,0,0,0,116,0, - 124,0,131,1,125,1,122,12,124,1,160,1,161,0,1,0, - 87,0,110,20,4,0,116,2,107,10,114,40,1,0,1,0, - 1,0,89,0,110,10,88,0,124,1,160,3,161,0,1,0, - 100,1,83,0,41,2,122,189,65,99,113,117,105,114,101,115, - 32,116,104,101,110,32,114,101,108,101,97,115,101,115,32,116, - 104,101,32,109,111,100,117,108,101,32,108,111,99,107,32,102, - 111,114,32,97,32,103,105,118,101,110,32,109,111,100,117,108, - 101,32,110,97,109,101,46,10,10,32,32,32,32,84,104,105, - 115,32,105,115,32,117,115,101,100,32,116,111,32,101,110,115, - 117,114,101,32,97,32,109,111,100,117,108,101,32,105,115,32, - 99,111,109,112,108,101,116,101,108,121,32,105,110,105,116,105, - 97,108,105,122,101,100,44,32,105,110,32,116,104,101,10,32, - 32,32,32,101,118,101,110,116,32,105,116,32,105,115,32,98, - 101,105,110,103,32,105,109,112,111,114,116,101,100,32,98,121, - 32,97,110,111,116,104,101,114,32,116,104,114,101,97,100,46, - 10,32,32,32,32,78,41,4,114,52,0,0,0,114,38,0, - 0,0,114,19,0,0,0,114,39,0,0,0,41,2,114,17, - 0,0,0,114,24,0,0,0,114,10,0,0,0,114,10,0, - 0,0,114,11,0,0,0,218,19,95,108,111,99,107,95,117, - 110,108,111,99,107,95,109,111,100,117,108,101,194,0,0,0, - 115,12,0,0,0,0,6,8,1,2,1,12,1,14,3,6, - 2,114,64,0,0,0,99,1,0,0,0,0,0,0,0,0, - 0,0,0,3,0,0,0,3,0,0,0,79,0,0,0,115, - 10,0,0,0,124,0,124,1,124,2,142,1,83,0,41,1, - 97,46,1,0,0,114,101,109,111,118,101,95,105,109,112,111, - 114,116,108,105,98,95,102,114,97,109,101,115,32,105,110,32, - 105,109,112,111,114,116,46,99,32,119,105,108,108,32,97,108, - 119,97,121,115,32,114,101,109,111,118,101,32,115,101,113,117, - 101,110,99,101,115,10,32,32,32,32,111,102,32,105,109,112, - 111,114,116,108,105,98,32,102,114,97,109,101,115,32,116,104, - 97,116,32,101,110,100,32,119,105,116,104,32,97,32,99,97, - 108,108,32,116,111,32,116,104,105,115,32,102,117,110,99,116, - 105,111,110,10,10,32,32,32,32,85,115,101,32,105,116,32, - 105,110,115,116,101,97,100,32,111,102,32,97,32,110,111,114, - 109,97,108,32,99,97,108,108,32,105,110,32,112,108,97,99, - 101,115,32,119,104,101,114,101,32,105,110,99,108,117,100,105, - 110,103,32,116,104,101,32,105,109,112,111,114,116,108,105,98, - 10,32,32,32,32,102,114,97,109,101,115,32,105,110,116,114, - 111,100,117,99,101,115,32,117,110,119,97,110,116,101,100,32, - 110,111,105,115,101,32,105,110,116,111,32,116,104,101,32,116, - 114,97,99,101,98,97,99,107,32,40,101,46,103,46,32,119, - 104,101,110,32,101,120,101,99,117,116,105,110,103,10,32,32, - 32,32,109,111,100,117,108,101,32,99,111,100,101,41,10,32, - 32,32,32,114,10,0,0,0,41,3,218,1,102,114,54,0, - 0,0,90,4,107,119,100,115,114,10,0,0,0,114,10,0, - 0,0,114,11,0,0,0,218,25,95,99,97,108,108,95,119, - 105,116,104,95,102,114,97,109,101,115,95,114,101,109,111,118, - 101,100,211,0,0,0,115,2,0,0,0,0,8,114,66,0, - 0,0,114,37,0,0,0,41,1,218,9,118,101,114,98,111, - 115,105,116,121,99,1,0,0,0,0,0,0,0,1,0,0, - 0,3,0,0,0,4,0,0,0,71,0,0,0,115,54,0, - 0,0,116,0,106,1,106,2,124,1,107,5,114,50,124,0, - 160,3,100,1,161,1,115,30,100,2,124,0,23,0,125,0, - 116,4,124,0,106,5,124,2,142,0,116,0,106,6,100,3, - 141,2,1,0,100,4,83,0,41,5,122,61,80,114,105,110, - 116,32,116,104,101,32,109,101,115,115,97,103,101,32,116,111, - 32,115,116,100,101,114,114,32,105,102,32,45,118,47,80,89, - 84,72,79,78,86,69,82,66,79,83,69,32,105,115,32,116, - 117,114,110,101,100,32,111,110,46,41,2,250,1,35,122,7, - 105,109,112,111,114,116,32,122,2,35,32,41,1,90,4,102, - 105,108,101,78,41,7,114,15,0,0,0,218,5,102,108,97, - 103,115,218,7,118,101,114,98,111,115,101,218,10,115,116,97, - 114,116,115,119,105,116,104,218,5,112,114,105,110,116,218,6, - 102,111,114,109,97,116,218,6,115,116,100,101,114,114,41,3, - 218,7,109,101,115,115,97,103,101,114,67,0,0,0,114,54, - 0,0,0,114,10,0,0,0,114,10,0,0,0,114,11,0, - 0,0,218,16,95,118,101,114,98,111,115,101,95,109,101,115, - 115,97,103,101,222,0,0,0,115,8,0,0,0,0,2,12, - 1,10,1,8,1,114,76,0,0,0,99,1,0,0,0,0, - 0,0,0,0,0,0,0,2,0,0,0,3,0,0,0,3, - 0,0,0,115,26,0,0,0,135,0,102,1,100,1,100,2, - 132,8,125,1,116,0,124,1,136,0,131,2,1,0,124,1, - 83,0,41,3,122,49,68,101,99,111,114,97,116,111,114,32, - 116,111,32,118,101,114,105,102,121,32,116,104,101,32,110,97, - 109,101,100,32,109,111,100,117,108,101,32,105,115,32,98,117, - 105,108,116,45,105,110,46,99,2,0,0,0,0,0,0,0, - 0,0,0,0,2,0,0,0,4,0,0,0,19,0,0,0, - 115,38,0,0,0,124,1,116,0,106,1,107,7,114,28,116, - 2,124,1,155,2,100,1,157,2,124,1,100,2,141,2,130, - 1,136,0,124,0,124,1,131,2,83,0,41,3,78,250,25, - 32,105,115,32,110,111,116,32,97,32,98,117,105,108,116,45, - 105,110,32,109,111,100,117,108,101,114,16,0,0,0,41,3, - 114,15,0,0,0,218,20,98,117,105,108,116,105,110,95,109, - 111,100,117,108,101,95,110,97,109,101,115,218,11,73,109,112, - 111,114,116,69,114,114,111,114,169,2,114,30,0,0,0,218, - 8,102,117,108,108,110,97,109,101,169,1,218,3,102,120,110, - 114,10,0,0,0,114,11,0,0,0,218,25,95,114,101,113, - 117,105,114,101,115,95,98,117,105,108,116,105,110,95,119,114, - 97,112,112,101,114,232,0,0,0,115,10,0,0,0,0,1, - 10,1,10,1,2,255,6,2,122,52,95,114,101,113,117,105, - 114,101,115,95,98,117,105,108,116,105,110,46,60,108,111,99, - 97,108,115,62,46,95,114,101,113,117,105,114,101,115,95,98, - 117,105,108,116,105,110,95,119,114,97,112,112,101,114,169,1, - 114,12,0,0,0,41,2,114,83,0,0,0,114,84,0,0, - 0,114,10,0,0,0,114,82,0,0,0,114,11,0,0,0, - 218,17,95,114,101,113,117,105,114,101,115,95,98,117,105,108, - 116,105,110,230,0,0,0,115,6,0,0,0,0,2,12,5, - 10,1,114,86,0,0,0,99,1,0,0,0,0,0,0,0, - 0,0,0,0,2,0,0,0,3,0,0,0,3,0,0,0, - 115,26,0,0,0,135,0,102,1,100,1,100,2,132,8,125, - 1,116,0,124,1,136,0,131,2,1,0,124,1,83,0,41, - 3,122,47,68,101,99,111,114,97,116,111,114,32,116,111,32, - 118,101,114,105,102,121,32,116,104,101,32,110,97,109,101,100, - 32,109,111,100,117,108,101,32,105,115,32,102,114,111,122,101, - 110,46,99,2,0,0,0,0,0,0,0,0,0,0,0,2, - 0,0,0,4,0,0,0,19,0,0,0,115,38,0,0,0, - 116,0,160,1,124,1,161,1,115,28,116,2,124,1,155,2, - 100,1,157,2,124,1,100,2,141,2,130,1,136,0,124,0, - 124,1,131,2,83,0,169,3,78,122,23,32,105,115,32,110, - 111,116,32,97,32,102,114,111,122,101,110,32,109,111,100,117, - 108,101,114,16,0,0,0,41,3,114,56,0,0,0,218,9, - 105,115,95,102,114,111,122,101,110,114,79,0,0,0,114,80, - 0,0,0,114,82,0,0,0,114,10,0,0,0,114,11,0, - 0,0,218,24,95,114,101,113,117,105,114,101,115,95,102,114, - 111,122,101,110,95,119,114,97,112,112,101,114,243,0,0,0, - 115,10,0,0,0,0,1,10,1,10,1,2,255,6,2,122, - 50,95,114,101,113,117,105,114,101,115,95,102,114,111,122,101, - 110,46,60,108,111,99,97,108,115,62,46,95,114,101,113,117, - 105,114,101,115,95,102,114,111,122,101,110,95,119,114,97,112, - 112,101,114,114,85,0,0,0,41,2,114,83,0,0,0,114, - 89,0,0,0,114,10,0,0,0,114,82,0,0,0,114,11, - 0,0,0,218,16,95,114,101,113,117,105,114,101,115,95,102, - 114,111,122,101,110,241,0,0,0,115,6,0,0,0,0,2, - 12,5,10,1,114,90,0,0,0,99,2,0,0,0,0,0, - 0,0,0,0,0,0,4,0,0,0,3,0,0,0,67,0, - 0,0,115,62,0,0,0,116,0,124,1,124,0,131,2,125, - 2,124,1,116,1,106,2,107,6,114,50,116,1,106,2,124, - 1,25,0,125,3,116,3,124,2,124,3,131,2,1,0,116, - 1,106,2,124,1,25,0,83,0,116,4,124,2,131,1,83, - 0,100,1,83,0,41,2,122,128,76,111,97,100,32,116,104, - 101,32,115,112,101,99,105,102,105,101,100,32,109,111,100,117, - 108,101,32,105,110,116,111,32,115,121,115,46,109,111,100,117, - 108,101,115,32,97,110,100,32,114,101,116,117,114,110,32,105, - 116,46,10,10,32,32,32,32,84,104,105,115,32,109,101,116, - 104,111,100,32,105,115,32,100,101,112,114,101,99,97,116,101, - 100,46,32,32,85,115,101,32,108,111,97,100,101,114,46,101, - 120,101,99,95,109,111,100,117,108,101,32,105,110,115,116,101, - 97,100,46,10,10,32,32,32,32,78,41,5,218,16,115,112, - 101,99,95,102,114,111,109,95,108,111,97,100,101,114,114,15, - 0,0,0,218,7,109,111,100,117,108,101,115,218,5,95,101, - 120,101,99,218,5,95,108,111,97,100,41,4,114,30,0,0, - 0,114,81,0,0,0,218,4,115,112,101,99,218,6,109,111, - 100,117,108,101,114,10,0,0,0,114,10,0,0,0,114,11, - 0,0,0,218,17,95,108,111,97,100,95,109,111,100,117,108, - 101,95,115,104,105,109,253,0,0,0,115,12,0,0,0,0, - 6,10,1,10,1,10,1,10,1,10,2,114,97,0,0,0, - 99,1,0,0,0,0,0,0,0,0,0,0,0,5,0,0, - 0,8,0,0,0,67,0,0,0,115,240,0,0,0,116,0, - 124,0,100,1,100,0,131,3,125,1,116,1,124,1,100,2, - 131,2,114,56,122,12,124,1,160,2,124,0,161,1,87,0, - 83,0,4,0,116,3,107,10,114,54,1,0,1,0,1,0, - 89,0,110,2,88,0,122,10,124,0,106,4,125,2,87,0, - 110,20,4,0,116,5,107,10,114,86,1,0,1,0,1,0, - 89,0,110,18,88,0,124,2,100,0,107,9,114,104,116,6, - 124,2,131,1,83,0,122,10,124,0,106,7,125,3,87,0, - 110,24,4,0,116,5,107,10,114,138,1,0,1,0,1,0, - 100,3,125,3,89,0,110,2,88,0,122,10,124,0,106,8, - 125,4,87,0,110,66,4,0,116,5,107,10,114,216,1,0, - 1,0,1,0,124,1,100,0,107,8,114,190,100,4,124,3, - 155,2,100,5,157,3,6,0,89,0,83,0,100,4,124,3, - 155,2,100,6,124,1,155,2,100,7,157,5,6,0,89,0, - 83,0,89,0,110,20,88,0,100,4,124,3,155,2,100,8, - 124,4,155,2,100,5,157,5,83,0,100,0,83,0,41,9, - 78,218,10,95,95,108,111,97,100,101,114,95,95,218,11,109, - 111,100,117,108,101,95,114,101,112,114,250,1,63,250,8,60, - 109,111,100,117,108,101,32,250,1,62,250,2,32,40,250,2, - 41,62,250,6,32,102,114,111,109,32,41,9,114,6,0,0, - 0,114,4,0,0,0,114,99,0,0,0,218,9,69,120,99, - 101,112,116,105,111,110,218,8,95,95,115,112,101,99,95,95, - 218,14,65,116,116,114,105,98,117,116,101,69,114,114,111,114, - 218,22,95,109,111,100,117,108,101,95,114,101,112,114,95,102, - 114,111,109,95,115,112,101,99,114,1,0,0,0,218,8,95, - 95,102,105,108,101,95,95,41,5,114,96,0,0,0,218,6, - 108,111,97,100,101,114,114,95,0,0,0,114,17,0,0,0, - 218,8,102,105,108,101,110,97,109,101,114,10,0,0,0,114, - 10,0,0,0,114,11,0,0,0,218,12,95,109,111,100,117, - 108,101,95,114,101,112,114,13,1,0,0,115,46,0,0,0, - 0,2,12,1,10,4,2,1,12,1,14,1,6,1,2,1, - 10,1,14,1,6,2,8,1,8,4,2,1,10,1,14,1, - 10,1,2,1,10,1,14,1,8,1,16,2,28,2,114,113, - 0,0,0,99,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,4,0,0,0,64,0,0,0,115,114,0,0, - 0,101,0,90,1,100,0,90,2,100,1,90,3,100,2,100, - 2,100,2,100,3,156,3,100,4,100,5,132,2,90,4,100, - 6,100,7,132,0,90,5,100,8,100,9,132,0,90,6,101, - 7,100,10,100,11,132,0,131,1,90,8,101,8,106,9,100, - 12,100,11,132,0,131,1,90,8,101,7,100,13,100,14,132, - 0,131,1,90,10,101,7,100,15,100,16,132,0,131,1,90, - 11,101,11,106,9,100,17,100,16,132,0,131,1,90,11,100, - 2,83,0,41,18,218,10,77,111,100,117,108,101,83,112,101, - 99,97,208,5,0,0,84,104,101,32,115,112,101,99,105,102, - 105,99,97,116,105,111,110,32,102,111,114,32,97,32,109,111, - 100,117,108,101,44,32,117,115,101,100,32,102,111,114,32,108, - 111,97,100,105,110,103,46,10,10,32,32,32,32,65,32,109, - 111,100,117,108,101,39,115,32,115,112,101,99,32,105,115,32, - 116,104,101,32,115,111,117,114,99,101,32,102,111,114,32,105, - 110,102,111,114,109,97,116,105,111,110,32,97,98,111,117,116, - 32,116,104,101,32,109,111,100,117,108,101,46,32,32,70,111, - 114,10,32,32,32,32,100,97,116,97,32,97,115,115,111,99, - 105,97,116,101,100,32,119,105,116,104,32,116,104,101,32,109, - 111,100,117,108,101,44,32,105,110,99,108,117,100,105,110,103, - 32,115,111,117,114,99,101,44,32,117,115,101,32,116,104,101, - 32,115,112,101,99,39,115,10,32,32,32,32,108,111,97,100, - 101,114,46,10,10,32,32,32,32,96,110,97,109,101,96,32, - 105,115,32,116,104,101,32,97,98,115,111,108,117,116,101,32, - 110,97,109,101,32,111,102,32,116,104,101,32,109,111,100,117, - 108,101,46,32,32,96,108,111,97,100,101,114,96,32,105,115, - 32,116,104,101,32,108,111,97,100,101,114,10,32,32,32,32, - 116,111,32,117,115,101,32,119,104,101,110,32,108,111,97,100, - 105,110,103,32,116,104,101,32,109,111,100,117,108,101,46,32, - 32,96,112,97,114,101,110,116,96,32,105,115,32,116,104,101, - 32,110,97,109,101,32,111,102,32,116,104,101,10,32,32,32, - 32,112,97,99,107,97,103,101,32,116,104,101,32,109,111,100, - 117,108,101,32,105,115,32,105,110,46,32,32,84,104,101,32, - 112,97,114,101,110,116,32,105,115,32,100,101,114,105,118,101, - 100,32,102,114,111,109,32,116,104,101,32,110,97,109,101,46, - 10,10,32,32,32,32,96,105,115,95,112,97,99,107,97,103, - 101,96,32,100,101,116,101,114,109,105,110,101,115,32,105,102, - 32,116,104,101,32,109,111,100,117,108,101,32,105,115,32,99, - 111,110,115,105,100,101,114,101,100,32,97,32,112,97,99,107, - 97,103,101,32,111,114,10,32,32,32,32,110,111,116,46,32, - 32,79,110,32,109,111,100,117,108,101,115,32,116,104,105,115, - 32,105,115,32,114,101,102,108,101,99,116,101,100,32,98,121, - 32,116,104,101,32,96,95,95,112,97,116,104,95,95,96,32, - 97,116,116,114,105,98,117,116,101,46,10,10,32,32,32,32, - 96,111,114,105,103,105,110,96,32,105,115,32,116,104,101,32, - 115,112,101,99,105,102,105,99,32,108,111,99,97,116,105,111, - 110,32,117,115,101,100,32,98,121,32,116,104,101,32,108,111, - 97,100,101,114,32,102,114,111,109,32,119,104,105,99,104,32, - 116,111,10,32,32,32,32,108,111,97,100,32,116,104,101,32, - 109,111,100,117,108,101,44,32,105,102,32,116,104,97,116,32, - 105,110,102,111,114,109,97,116,105,111,110,32,105,115,32,97, - 118,97,105,108,97,98,108,101,46,32,32,87,104,101,110,32, - 102,105,108,101,110,97,109,101,32,105,115,10,32,32,32,32, - 115,101,116,44,32,111,114,105,103,105,110,32,119,105,108,108, - 32,109,97,116,99,104,46,10,10,32,32,32,32,96,104,97, - 115,95,108,111,99,97,116,105,111,110,96,32,105,110,100,105, - 99,97,116,101,115,32,116,104,97,116,32,97,32,115,112,101, - 99,39,115,32,34,111,114,105,103,105,110,34,32,114,101,102, - 108,101,99,116,115,32,97,32,108,111,99,97,116,105,111,110, - 46,10,32,32,32,32,87,104,101,110,32,116,104,105,115,32, - 105,115,32,84,114,117,101,44,32,96,95,95,102,105,108,101, - 95,95,96,32,97,116,116,114,105,98,117,116,101,32,111,102, - 32,116,104,101,32,109,111,100,117,108,101,32,105,115,32,115, - 101,116,46,10,10,32,32,32,32,96,99,97,99,104,101,100, - 96,32,105,115,32,116,104,101,32,108,111,99,97,116,105,111, - 110,32,111,102,32,116,104,101,32,99,97,99,104,101,100,32, - 98,121,116,101,99,111,100,101,32,102,105,108,101,44,32,105, - 102,32,97,110,121,46,32,32,73,116,10,32,32,32,32,99, - 111,114,114,101,115,112,111,110,100,115,32,116,111,32,116,104, - 101,32,96,95,95,99,97,99,104,101,100,95,95,96,32,97, - 116,116,114,105,98,117,116,101,46,10,10,32,32,32,32,96, - 115,117,98,109,111,100,117,108,101,95,115,101,97,114,99,104, - 95,108,111,99,97,116,105,111,110,115,96,32,105,115,32,116, - 104,101,32,115,101,113,117,101,110,99,101,32,111,102,32,112, - 97,116,104,32,101,110,116,114,105,101,115,32,116,111,10,32, - 32,32,32,115,101,97,114,99,104,32,119,104,101,110,32,105, - 109,112,111,114,116,105,110,103,32,115,117,98,109,111,100,117, - 108,101,115,46,32,32,73,102,32,115,101,116,44,32,105,115, - 95,112,97,99,107,97,103,101,32,115,104,111,117,108,100,32, - 98,101,10,32,32,32,32,84,114,117,101,45,45,97,110,100, - 32,70,97,108,115,101,32,111,116,104,101,114,119,105,115,101, - 46,10,10,32,32,32,32,80,97,99,107,97,103,101,115,32, - 97,114,101,32,115,105,109,112,108,121,32,109,111,100,117,108, - 101,115,32,116,104,97,116,32,40,109,97,121,41,32,104,97, - 118,101,32,115,117,98,109,111,100,117,108,101,115,46,32,32, - 73,102,32,97,32,115,112,101,99,10,32,32,32,32,104,97, - 115,32,97,32,110,111,110,45,78,111,110,101,32,118,97,108, - 117,101,32,105,110,32,96,115,117,98,109,111,100,117,108,101, - 95,115,101,97,114,99,104,95,108,111,99,97,116,105,111,110, - 115,96,44,32,116,104,101,32,105,109,112,111,114,116,10,32, - 32,32,32,115,121,115,116,101,109,32,119,105,108,108,32,99, - 111,110,115,105,100,101,114,32,109,111,100,117,108,101,115,32, - 108,111,97,100,101,100,32,102,114,111,109,32,116,104,101,32, - 115,112,101,99,32,97,115,32,112,97,99,107,97,103,101,115, - 46,10,10,32,32,32,32,79,110,108,121,32,102,105,110,100, - 101,114,115,32,40,115,101,101,32,105,109,112,111,114,116,108, - 105,98,46,97,98,99,46,77,101,116,97,80,97,116,104,70, - 105,110,100,101,114,32,97,110,100,10,32,32,32,32,105,109, - 112,111,114,116,108,105,98,46,97,98,99,46,80,97,116,104, - 69,110,116,114,121,70,105,110,100,101,114,41,32,115,104,111, - 117,108,100,32,109,111,100,105,102,121,32,77,111,100,117,108, - 101,83,112,101,99,32,105,110,115,116,97,110,99,101,115,46, - 10,10,32,32,32,32,78,41,3,218,6,111,114,105,103,105, - 110,218,12,108,111,97,100,101,114,95,115,116,97,116,101,218, - 10,105,115,95,112,97,99,107,97,103,101,99,3,0,0,0, - 0,0,0,0,3,0,0,0,6,0,0,0,2,0,0,0, - 67,0,0,0,115,54,0,0,0,124,1,124,0,95,0,124, - 2,124,0,95,1,124,3,124,0,95,2,124,4,124,0,95, - 3,124,5,114,32,103,0,110,2,100,0,124,0,95,4,100, - 1,124,0,95,5,100,0,124,0,95,6,100,0,83,0,41, - 2,78,70,41,7,114,17,0,0,0,114,111,0,0,0,114, - 115,0,0,0,114,116,0,0,0,218,26,115,117,98,109,111, - 100,117,108,101,95,115,101,97,114,99,104,95,108,111,99,97, - 116,105,111,110,115,218,13,95,115,101,116,95,102,105,108,101, - 97,116,116,114,218,7,95,99,97,99,104,101,100,41,6,114, - 30,0,0,0,114,17,0,0,0,114,111,0,0,0,114,115, - 0,0,0,114,116,0,0,0,114,117,0,0,0,114,10,0, - 0,0,114,10,0,0,0,114,11,0,0,0,114,31,0,0, - 0,86,1,0,0,115,14,0,0,0,0,2,6,1,6,1, - 6,1,6,1,14,3,6,1,122,19,77,111,100,117,108,101, - 83,112,101,99,46,95,95,105,110,105,116,95,95,99,1,0, - 0,0,0,0,0,0,0,0,0,0,2,0,0,0,5,0, - 0,0,67,0,0,0,115,106,0,0,0,100,1,124,0,106, - 0,155,2,157,2,100,2,124,0,106,1,155,2,157,2,103, - 2,125,1,124,0,106,2,100,0,107,9,114,52,124,1,160, - 3,100,3,124,0,106,2,155,2,157,2,161,1,1,0,124, - 0,106,4,100,0,107,9,114,80,124,1,160,3,100,4,124, - 0,106,4,155,0,157,2,161,1,1,0,124,0,106,5,106, - 6,155,0,100,5,100,6,160,7,124,1,161,1,155,0,100, - 7,157,4,83,0,41,8,78,122,5,110,97,109,101,61,122, - 7,108,111,97,100,101,114,61,122,7,111,114,105,103,105,110, - 61,122,27,115,117,98,109,111,100,117,108,101,95,115,101,97, - 114,99,104,95,108,111,99,97,116,105,111,110,115,61,250,1, - 40,122,2,44,32,250,1,41,41,8,114,17,0,0,0,114, - 111,0,0,0,114,115,0,0,0,218,6,97,112,112,101,110, - 100,114,118,0,0,0,218,9,95,95,99,108,97,115,115,95, - 95,114,1,0,0,0,218,4,106,111,105,110,41,2,114,30, - 0,0,0,114,54,0,0,0,114,10,0,0,0,114,10,0, - 0,0,114,11,0,0,0,114,47,0,0,0,98,1,0,0, - 115,16,0,0,0,0,1,10,1,10,255,4,2,10,1,18, - 1,10,1,18,1,122,19,77,111,100,117,108,101,83,112,101, - 99,46,95,95,114,101,112,114,95,95,99,2,0,0,0,0, - 0,0,0,0,0,0,0,3,0,0,0,8,0,0,0,67, - 0,0,0,115,108,0,0,0,124,0,106,0,125,2,122,72, - 124,0,106,1,124,1,106,1,107,2,111,76,124,0,106,2, - 124,1,106,2,107,2,111,76,124,0,106,3,124,1,106,3, - 107,2,111,76,124,2,124,1,106,0,107,2,111,76,124,0, - 106,4,124,1,106,4,107,2,111,76,124,0,106,5,124,1, - 106,5,107,2,87,0,83,0,4,0,116,6,107,10,114,102, - 1,0,1,0,1,0,116,7,6,0,89,0,83,0,88,0, - 100,0,83,0,114,13,0,0,0,41,8,114,118,0,0,0, - 114,17,0,0,0,114,111,0,0,0,114,115,0,0,0,218, - 6,99,97,99,104,101,100,218,12,104,97,115,95,108,111,99, - 97,116,105,111,110,114,108,0,0,0,218,14,78,111,116,73, - 109,112,108,101,109,101,110,116,101,100,41,3,114,30,0,0, - 0,90,5,111,116,104,101,114,90,4,115,109,115,108,114,10, - 0,0,0,114,10,0,0,0,114,11,0,0,0,218,6,95, - 95,101,113,95,95,107,1,0,0,115,30,0,0,0,0,1, - 6,1,2,1,12,1,10,255,2,2,10,254,2,3,8,253, - 2,4,10,252,2,5,10,251,4,6,14,1,122,17,77,111, - 100,117,108,101,83,112,101,99,46,95,95,101,113,95,95,99, - 1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, - 3,0,0,0,67,0,0,0,115,58,0,0,0,124,0,106, - 0,100,0,107,8,114,52,124,0,106,1,100,0,107,9,114, - 52,124,0,106,2,114,52,116,3,100,0,107,8,114,38,116, - 4,130,1,116,3,160,5,124,0,106,1,161,1,124,0,95, - 0,124,0,106,0,83,0,114,13,0,0,0,41,6,114,120, - 0,0,0,114,115,0,0,0,114,119,0,0,0,218,19,95, - 98,111,111,116,115,116,114,97,112,95,101,120,116,101,114,110, - 97,108,218,19,78,111,116,73,109,112,108,101,109,101,110,116, - 101,100,69,114,114,111,114,90,11,95,103,101,116,95,99,97, - 99,104,101,100,114,46,0,0,0,114,10,0,0,0,114,10, - 0,0,0,114,11,0,0,0,114,126,0,0,0,119,1,0, - 0,115,12,0,0,0,0,2,10,1,16,1,8,1,4,1, - 14,1,122,17,77,111,100,117,108,101,83,112,101,99,46,99, - 97,99,104,101,100,99,2,0,0,0,0,0,0,0,0,0, - 0,0,2,0,0,0,2,0,0,0,67,0,0,0,115,10, - 0,0,0,124,1,124,0,95,0,100,0,83,0,114,13,0, - 0,0,41,1,114,120,0,0,0,41,2,114,30,0,0,0, - 114,126,0,0,0,114,10,0,0,0,114,10,0,0,0,114, - 11,0,0,0,114,126,0,0,0,128,1,0,0,115,2,0, - 0,0,0,2,99,1,0,0,0,0,0,0,0,0,0,0, - 0,1,0,0,0,3,0,0,0,67,0,0,0,115,36,0, - 0,0,124,0,106,0,100,1,107,8,114,26,124,0,106,1, - 160,2,100,2,161,1,100,3,25,0,83,0,124,0,106,1, - 83,0,100,1,83,0,41,4,122,32,84,104,101,32,110,97, - 109,101,32,111,102,32,116,104,101,32,109,111,100,117,108,101, - 39,115,32,112,97,114,101,110,116,46,78,218,1,46,114,22, - 0,0,0,41,3,114,118,0,0,0,114,17,0,0,0,218, - 10,114,112,97,114,116,105,116,105,111,110,114,46,0,0,0, - 114,10,0,0,0,114,10,0,0,0,114,11,0,0,0,218, - 6,112,97,114,101,110,116,132,1,0,0,115,6,0,0,0, - 0,3,10,1,16,2,122,17,77,111,100,117,108,101,83,112, - 101,99,46,112,97,114,101,110,116,99,1,0,0,0,0,0, - 0,0,0,0,0,0,1,0,0,0,1,0,0,0,67,0, - 0,0,115,6,0,0,0,124,0,106,0,83,0,114,13,0, - 0,0,41,1,114,119,0,0,0,114,46,0,0,0,114,10, - 0,0,0,114,10,0,0,0,114,11,0,0,0,114,127,0, - 0,0,140,1,0,0,115,2,0,0,0,0,2,122,23,77, - 111,100,117,108,101,83,112,101,99,46,104,97,115,95,108,111, - 99,97,116,105,111,110,99,2,0,0,0,0,0,0,0,0, - 0,0,0,2,0,0,0,2,0,0,0,67,0,0,0,115, - 14,0,0,0,116,0,124,1,131,1,124,0,95,1,100,0, - 83,0,114,13,0,0,0,41,2,218,4,98,111,111,108,114, - 119,0,0,0,41,2,114,30,0,0,0,218,5,118,97,108, - 117,101,114,10,0,0,0,114,10,0,0,0,114,11,0,0, - 0,114,127,0,0,0,144,1,0,0,115,2,0,0,0,0, - 2,41,12,114,1,0,0,0,114,0,0,0,0,114,2,0, - 0,0,114,3,0,0,0,114,31,0,0,0,114,47,0,0, - 0,114,129,0,0,0,218,8,112,114,111,112,101,114,116,121, - 114,126,0,0,0,218,6,115,101,116,116,101,114,114,134,0, - 0,0,114,127,0,0,0,114,10,0,0,0,114,10,0,0, - 0,114,10,0,0,0,114,11,0,0,0,114,114,0,0,0, - 49,1,0,0,115,32,0,0,0,8,1,4,36,4,1,2, - 255,12,12,8,9,8,12,2,1,10,8,4,1,10,3,2, - 1,10,7,2,1,10,3,4,1,114,114,0,0,0,169,2, - 114,115,0,0,0,114,117,0,0,0,99,2,0,0,0,0, - 0,0,0,2,0,0,0,6,0,0,0,8,0,0,0,67, - 0,0,0,115,154,0,0,0,116,0,124,1,100,1,131,2, - 114,74,116,1,100,2,107,8,114,22,116,2,130,1,116,1, - 106,3,125,4,124,3,100,2,107,8,114,48,124,4,124,0, - 124,1,100,3,141,2,83,0,124,3,114,56,103,0,110,2, - 100,2,125,5,124,4,124,0,124,1,124,5,100,4,141,3, - 83,0,124,3,100,2,107,8,114,138,116,0,124,1,100,5, - 131,2,114,134,122,14,124,1,160,4,124,0,161,1,125,3, - 87,0,113,138,4,0,116,5,107,10,114,130,1,0,1,0, - 1,0,100,2,125,3,89,0,113,138,88,0,110,4,100,6, - 125,3,116,6,124,0,124,1,124,2,124,3,100,7,141,4, - 83,0,41,8,122,53,82,101,116,117,114,110,32,97,32,109, - 111,100,117,108,101,32,115,112,101,99,32,98,97,115,101,100, - 32,111,110,32,118,97,114,105,111,117,115,32,108,111,97,100, - 101,114,32,109,101,116,104,111,100,115,46,90,12,103,101,116, - 95,102,105,108,101,110,97,109,101,78,41,1,114,111,0,0, - 0,41,2,114,111,0,0,0,114,118,0,0,0,114,117,0, - 0,0,70,114,139,0,0,0,41,7,114,4,0,0,0,114, - 130,0,0,0,114,131,0,0,0,218,23,115,112,101,99,95, - 102,114,111,109,95,102,105,108,101,95,108,111,99,97,116,105, - 111,110,114,117,0,0,0,114,79,0,0,0,114,114,0,0, - 0,41,6,114,17,0,0,0,114,111,0,0,0,114,115,0, - 0,0,114,117,0,0,0,114,140,0,0,0,90,6,115,101, - 97,114,99,104,114,10,0,0,0,114,10,0,0,0,114,11, - 0,0,0,114,91,0,0,0,149,1,0,0,115,36,0,0, - 0,0,2,10,1,8,1,4,1,6,2,8,1,12,1,12, - 1,6,1,2,255,6,3,8,1,10,1,2,1,14,1,14, - 1,12,3,4,2,114,91,0,0,0,99,3,0,0,0,0, - 0,0,0,0,0,0,0,8,0,0,0,8,0,0,0,67, - 0,0,0,115,56,1,0,0,122,10,124,0,106,0,125,3, - 87,0,110,20,4,0,116,1,107,10,114,30,1,0,1,0, - 1,0,89,0,110,14,88,0,124,3,100,0,107,9,114,44, - 124,3,83,0,124,0,106,2,125,4,124,1,100,0,107,8, - 114,90,122,10,124,0,106,3,125,1,87,0,110,20,4,0, - 116,1,107,10,114,88,1,0,1,0,1,0,89,0,110,2, - 88,0,122,10,124,0,106,4,125,5,87,0,110,24,4,0, - 116,1,107,10,114,124,1,0,1,0,1,0,100,0,125,5, - 89,0,110,2,88,0,124,2,100,0,107,8,114,184,124,5, - 100,0,107,8,114,180,122,10,124,1,106,5,125,2,87,0, - 113,184,4,0,116,1,107,10,114,176,1,0,1,0,1,0, - 100,0,125,2,89,0,113,184,88,0,110,4,124,5,125,2, - 122,10,124,0,106,6,125,6,87,0,110,24,4,0,116,1, - 107,10,114,218,1,0,1,0,1,0,100,0,125,6,89,0, - 110,2,88,0,122,14,116,7,124,0,106,8,131,1,125,7, - 87,0,110,26,4,0,116,1,107,10,144,1,114,4,1,0, - 1,0,1,0,100,0,125,7,89,0,110,2,88,0,116,9, - 124,4,124,1,124,2,100,1,141,3,125,3,124,5,100,0, - 107,8,144,1,114,34,100,2,110,2,100,3,124,3,95,10, - 124,6,124,3,95,11,124,7,124,3,95,12,124,3,83,0, - 41,4,78,169,1,114,115,0,0,0,70,84,41,13,114,107, - 0,0,0,114,108,0,0,0,114,1,0,0,0,114,98,0, - 0,0,114,110,0,0,0,218,7,95,79,82,73,71,73,78, - 218,10,95,95,99,97,99,104,101,100,95,95,218,4,108,105, - 115,116,218,8,95,95,112,97,116,104,95,95,114,114,0,0, - 0,114,119,0,0,0,114,126,0,0,0,114,118,0,0,0, - 41,8,114,96,0,0,0,114,111,0,0,0,114,115,0,0, - 0,114,95,0,0,0,114,17,0,0,0,90,8,108,111,99, - 97,116,105,111,110,114,126,0,0,0,114,118,0,0,0,114, - 10,0,0,0,114,10,0,0,0,114,11,0,0,0,218,17, - 95,115,112,101,99,95,102,114,111,109,95,109,111,100,117,108, - 101,175,1,0,0,115,72,0,0,0,0,2,2,1,10,1, - 14,1,6,2,8,1,4,2,6,1,8,1,2,1,10,1, - 14,2,6,1,2,1,10,1,14,1,10,1,8,1,8,1, - 2,1,10,1,14,1,12,2,4,1,2,1,10,1,14,1, - 10,1,2,1,14,1,16,1,10,2,14,1,20,1,6,1, - 6,1,114,146,0,0,0,70,169,1,218,8,111,118,101,114, - 114,105,100,101,99,2,0,0,0,0,0,0,0,1,0,0, - 0,5,0,0,0,8,0,0,0,67,0,0,0,115,226,1, - 0,0,124,2,115,20,116,0,124,1,100,1,100,0,131,3, - 100,0,107,8,114,54,122,12,124,0,106,1,124,1,95,2, - 87,0,110,20,4,0,116,3,107,10,114,52,1,0,1,0, - 1,0,89,0,110,2,88,0,124,2,115,74,116,0,124,1, - 100,2,100,0,131,3,100,0,107,8,114,178,124,0,106,4, - 125,3,124,3,100,0,107,8,114,146,124,0,106,5,100,0, - 107,9,114,146,116,6,100,0,107,8,114,110,116,7,130,1, - 116,6,106,8,125,4,124,4,160,9,124,4,161,1,125,3, - 124,0,106,5,124,3,95,10,124,3,124,0,95,4,100,0, - 124,1,95,11,122,10,124,3,124,1,95,12,87,0,110,20, - 4,0,116,3,107,10,114,176,1,0,1,0,1,0,89,0, - 110,2,88,0,124,2,115,198,116,0,124,1,100,3,100,0, - 131,3,100,0,107,8,114,232,122,12,124,0,106,13,124,1, - 95,14,87,0,110,20,4,0,116,3,107,10,114,230,1,0, - 1,0,1,0,89,0,110,2,88,0,122,10,124,0,124,1, - 95,15,87,0,110,22,4,0,116,3,107,10,144,1,114,8, - 1,0,1,0,1,0,89,0,110,2,88,0,124,2,144,1, - 115,34,116,0,124,1,100,4,100,0,131,3,100,0,107,8, - 144,1,114,82,124,0,106,5,100,0,107,9,144,1,114,82, - 122,12,124,0,106,5,124,1,95,16,87,0,110,22,4,0, - 116,3,107,10,144,1,114,80,1,0,1,0,1,0,89,0, - 110,2,88,0,124,0,106,17,144,1,114,222,124,2,144,1, - 115,114,116,0,124,1,100,5,100,0,131,3,100,0,107,8, - 144,1,114,150,122,12,124,0,106,18,124,1,95,11,87,0, - 110,22,4,0,116,3,107,10,144,1,114,148,1,0,1,0, - 1,0,89,0,110,2,88,0,124,2,144,1,115,174,116,0, - 124,1,100,6,100,0,131,3,100,0,107,8,144,1,114,222, - 124,0,106,19,100,0,107,9,144,1,114,222,122,12,124,0, - 106,19,124,1,95,20,87,0,110,22,4,0,116,3,107,10, - 144,1,114,220,1,0,1,0,1,0,89,0,110,2,88,0, - 124,1,83,0,41,7,78,114,1,0,0,0,114,98,0,0, - 0,218,11,95,95,112,97,99,107,97,103,101,95,95,114,145, - 0,0,0,114,110,0,0,0,114,143,0,0,0,41,21,114, - 6,0,0,0,114,17,0,0,0,114,1,0,0,0,114,108, - 0,0,0,114,111,0,0,0,114,118,0,0,0,114,130,0, - 0,0,114,131,0,0,0,218,16,95,78,97,109,101,115,112, - 97,99,101,76,111,97,100,101,114,218,7,95,95,110,101,119, - 95,95,90,5,95,112,97,116,104,114,110,0,0,0,114,98, - 0,0,0,114,134,0,0,0,114,149,0,0,0,114,107,0, - 0,0,114,145,0,0,0,114,127,0,0,0,114,115,0,0, - 0,114,126,0,0,0,114,143,0,0,0,41,5,114,95,0, - 0,0,114,96,0,0,0,114,148,0,0,0,114,111,0,0, - 0,114,150,0,0,0,114,10,0,0,0,114,10,0,0,0, - 114,11,0,0,0,218,18,95,105,110,105,116,95,109,111,100, - 117,108,101,95,97,116,116,114,115,220,1,0,0,115,96,0, - 0,0,0,4,20,1,2,1,12,1,14,1,6,2,20,1, - 6,1,8,2,10,1,8,1,4,1,6,2,10,1,8,1, - 6,11,6,1,2,1,10,1,14,1,6,2,20,1,2,1, - 12,1,14,1,6,2,2,1,10,1,16,1,6,2,24,1, - 12,1,2,1,12,1,16,1,6,2,8,1,24,1,2,1, - 12,1,16,1,6,2,24,1,12,1,2,1,12,1,16,1, - 6,1,114,152,0,0,0,99,1,0,0,0,0,0,0,0, - 0,0,0,0,2,0,0,0,3,0,0,0,67,0,0,0, - 115,82,0,0,0,100,1,125,1,116,0,124,0,106,1,100, - 2,131,2,114,30,124,0,106,1,160,2,124,0,161,1,125, - 1,110,20,116,0,124,0,106,1,100,3,131,2,114,50,116, - 3,100,4,131,1,130,1,124,1,100,1,107,8,114,68,116, - 4,124,0,106,5,131,1,125,1,116,6,124,0,124,1,131, - 2,1,0,124,1,83,0,41,5,122,43,67,114,101,97,116, - 101,32,97,32,109,111,100,117,108,101,32,98,97,115,101,100, - 32,111,110,32,116,104,101,32,112,114,111,118,105,100,101,100, - 32,115,112,101,99,46,78,218,13,99,114,101,97,116,101,95, - 109,111,100,117,108,101,218,11,101,120,101,99,95,109,111,100, - 117,108,101,122,66,108,111,97,100,101,114,115,32,116,104,97, - 116,32,100,101,102,105,110,101,32,101,120,101,99,95,109,111, - 100,117,108,101,40,41,32,109,117,115,116,32,97,108,115,111, - 32,100,101,102,105,110,101,32,99,114,101,97,116,101,95,109, - 111,100,117,108,101,40,41,41,7,114,4,0,0,0,114,111, - 0,0,0,114,153,0,0,0,114,79,0,0,0,114,18,0, - 0,0,114,17,0,0,0,114,152,0,0,0,169,2,114,95, - 0,0,0,114,96,0,0,0,114,10,0,0,0,114,10,0, - 0,0,114,11,0,0,0,218,16,109,111,100,117,108,101,95, - 102,114,111,109,95,115,112,101,99,36,2,0,0,115,18,0, - 0,0,0,3,4,1,12,3,14,1,12,1,8,2,8,1, - 10,1,10,1,114,156,0,0,0,99,1,0,0,0,0,0, - 0,0,0,0,0,0,2,0,0,0,5,0,0,0,67,0, - 0,0,115,126,0,0,0,124,0,106,0,100,1,107,8,114, - 14,100,2,110,4,124,0,106,0,125,1,124,0,106,1,100, - 1,107,8,114,74,124,0,106,2,100,1,107,8,114,52,100, - 3,124,1,155,2,100,4,157,3,83,0,100,3,124,1,155, - 2,100,5,124,0,106,2,155,2,100,6,157,5,83,0,110, - 48,124,0,106,3,114,100,100,3,124,1,155,2,100,7,124, - 0,106,1,155,2,100,4,157,5,83,0,100,3,124,0,106, - 0,155,2,100,5,124,0,106,1,155,0,100,6,157,5,83, - 0,100,1,83,0,41,8,122,38,82,101,116,117,114,110,32, - 116,104,101,32,114,101,112,114,32,116,111,32,117,115,101,32, - 102,111,114,32,116,104,101,32,109,111,100,117,108,101,46,78, - 114,100,0,0,0,114,101,0,0,0,114,102,0,0,0,114, - 103,0,0,0,114,104,0,0,0,114,105,0,0,0,41,4, - 114,17,0,0,0,114,115,0,0,0,114,111,0,0,0,114, - 127,0,0,0,41,2,114,95,0,0,0,114,17,0,0,0, - 114,10,0,0,0,114,10,0,0,0,114,11,0,0,0,114, - 109,0,0,0,53,2,0,0,115,16,0,0,0,0,3,20, - 1,10,1,10,1,12,2,22,2,6,1,20,2,114,109,0, - 0,0,99,2,0,0,0,0,0,0,0,0,0,0,0,4, - 0,0,0,10,0,0,0,67,0,0,0,115,206,0,0,0, - 124,0,106,0,125,2,116,1,124,2,131,1,143,182,1,0, - 116,2,106,3,160,4,124,2,161,1,124,1,107,9,114,56, - 100,1,124,2,155,2,100,2,157,3,125,3,116,5,124,3, - 124,2,100,3,141,2,130,1,122,106,124,0,106,7,100,4, - 107,8,114,108,124,0,106,8,100,4,107,8,114,92,116,5, - 100,5,124,0,106,0,100,3,141,2,130,1,116,9,124,0, - 124,1,100,6,100,7,141,3,1,0,110,52,116,9,124,0, - 124,1,100,6,100,7,141,3,1,0,116,10,124,0,106,7, - 100,8,131,2,115,148,124,0,106,7,160,11,124,2,161,1, - 1,0,110,12,124,0,106,7,160,12,124,1,161,1,1,0, - 87,0,53,0,116,2,106,3,160,6,124,0,106,0,161,1, - 125,1,124,1,116,2,106,3,124,0,106,0,60,0,88,0, - 87,0,53,0,81,0,82,0,88,0,124,1,83,0,41,9, - 122,70,69,120,101,99,117,116,101,32,116,104,101,32,115,112, - 101,99,39,115,32,115,112,101,99,105,102,105,101,100,32,109, - 111,100,117,108,101,32,105,110,32,97,110,32,101,120,105,115, - 116,105,110,103,32,109,111,100,117,108,101,39,115,32,110,97, - 109,101,115,112,97,99,101,46,122,7,109,111,100,117,108,101, - 32,122,19,32,110,111,116,32,105,110,32,115,121,115,46,109, - 111,100,117,108,101,115,114,16,0,0,0,78,250,14,109,105, - 115,115,105,110,103,32,108,111,97,100,101,114,84,114,147,0, - 0,0,114,154,0,0,0,41,13,114,17,0,0,0,114,49, - 0,0,0,114,15,0,0,0,114,92,0,0,0,114,34,0, - 0,0,114,79,0,0,0,218,3,112,111,112,114,111,0,0, - 0,114,118,0,0,0,114,152,0,0,0,114,4,0,0,0, - 218,11,108,111,97,100,95,109,111,100,117,108,101,114,154,0, - 0,0,41,4,114,95,0,0,0,114,96,0,0,0,114,17, - 0,0,0,218,3,109,115,103,114,10,0,0,0,114,10,0, - 0,0,114,11,0,0,0,114,93,0,0,0,70,2,0,0, - 115,34,0,0,0,0,2,6,1,10,1,16,1,12,1,12, - 1,2,1,10,1,10,1,14,2,16,2,14,1,12,4,14, - 2,16,4,14,1,24,1,114,93,0,0,0,99,1,0,0, - 0,0,0,0,0,0,0,0,0,2,0,0,0,8,0,0, - 0,67,0,0,0,115,26,1,0,0,122,18,124,0,106,0, - 160,1,124,0,106,2,161,1,1,0,87,0,110,52,1,0, - 1,0,1,0,124,0,106,2,116,3,106,4,107,6,114,64, - 116,3,106,4,160,5,124,0,106,2,161,1,125,1,124,1, - 116,3,106,4,124,0,106,2,60,0,130,0,89,0,110,2, - 88,0,116,3,106,4,160,5,124,0,106,2,161,1,125,1, - 124,1,116,3,106,4,124,0,106,2,60,0,116,6,124,1, - 100,1,100,0,131,3,100,0,107,8,114,148,122,12,124,0, - 106,0,124,1,95,7,87,0,110,20,4,0,116,8,107,10, - 114,146,1,0,1,0,1,0,89,0,110,2,88,0,116,6, - 124,1,100,2,100,0,131,3,100,0,107,8,114,226,122,40, - 124,1,106,9,124,1,95,10,116,11,124,1,100,3,131,2, - 115,202,124,0,106,2,160,12,100,4,161,1,100,5,25,0, - 124,1,95,10,87,0,110,20,4,0,116,8,107,10,114,224, - 1,0,1,0,1,0,89,0,110,2,88,0,116,6,124,1, - 100,6,100,0,131,3,100,0,107,8,144,1,114,22,122,10, - 124,0,124,1,95,13,87,0,110,22,4,0,116,8,107,10, - 144,1,114,20,1,0,1,0,1,0,89,0,110,2,88,0, - 124,1,83,0,41,7,78,114,98,0,0,0,114,149,0,0, - 0,114,145,0,0,0,114,132,0,0,0,114,22,0,0,0, - 114,107,0,0,0,41,14,114,111,0,0,0,114,159,0,0, - 0,114,17,0,0,0,114,15,0,0,0,114,92,0,0,0, - 114,158,0,0,0,114,6,0,0,0,114,98,0,0,0,114, - 108,0,0,0,114,1,0,0,0,114,149,0,0,0,114,4, - 0,0,0,114,133,0,0,0,114,107,0,0,0,114,155,0, - 0,0,114,10,0,0,0,114,10,0,0,0,114,11,0,0, - 0,218,25,95,108,111,97,100,95,98,97,99,107,119,97,114, - 100,95,99,111,109,112,97,116,105,98,108,101,100,2,0,0, - 115,54,0,0,0,0,4,2,1,18,1,6,1,12,1,14, - 1,12,1,8,3,14,1,12,1,16,1,2,1,12,1,14, - 1,6,1,16,1,2,4,8,1,10,1,22,1,14,1,6, - 1,18,1,2,1,10,1,16,1,6,1,114,161,0,0,0, - 99,1,0,0,0,0,0,0,0,0,0,0,0,2,0,0, - 0,11,0,0,0,67,0,0,0,115,220,0,0,0,124,0, - 106,0,100,0,107,9,114,30,116,1,124,0,106,0,100,1, - 131,2,115,30,116,2,124,0,131,1,83,0,116,3,124,0, - 131,1,125,1,100,2,124,0,95,4,122,162,124,1,116,5, - 106,6,124,0,106,7,60,0,122,52,124,0,106,0,100,0, - 107,8,114,96,124,0,106,8,100,0,107,8,114,108,116,9, - 100,4,124,0,106,7,100,5,141,2,130,1,110,12,124,0, - 106,0,160,10,124,1,161,1,1,0,87,0,110,50,1,0, - 1,0,1,0,122,14,116,5,106,6,124,0,106,7,61,0, - 87,0,110,20,4,0,116,11,107,10,114,152,1,0,1,0, - 1,0,89,0,110,2,88,0,130,0,89,0,110,2,88,0, - 116,5,106,6,160,12,124,0,106,7,161,1,125,1,124,1, - 116,5,106,6,124,0,106,7,60,0,116,13,100,6,124,0, - 106,7,124,0,106,0,131,3,1,0,87,0,53,0,100,3, - 124,0,95,4,88,0,124,1,83,0,41,7,78,114,154,0, - 0,0,84,70,114,157,0,0,0,114,16,0,0,0,122,18, - 105,109,112,111,114,116,32,123,33,114,125,32,35,32,123,33, - 114,125,41,14,114,111,0,0,0,114,4,0,0,0,114,161, - 0,0,0,114,156,0,0,0,90,13,95,105,110,105,116,105, - 97,108,105,122,105,110,103,114,15,0,0,0,114,92,0,0, - 0,114,17,0,0,0,114,118,0,0,0,114,79,0,0,0, - 114,154,0,0,0,114,62,0,0,0,114,158,0,0,0,114, - 76,0,0,0,114,155,0,0,0,114,10,0,0,0,114,10, - 0,0,0,114,11,0,0,0,218,14,95,108,111,97,100,95, - 117,110,108,111,99,107,101,100,137,2,0,0,115,46,0,0, - 0,0,2,10,2,12,1,8,2,8,5,6,1,2,1,12, - 1,2,1,10,1,10,1,16,3,16,1,6,1,2,1,14, - 1,14,1,6,1,8,5,14,1,12,1,20,2,8,2,114, - 162,0,0,0,99,1,0,0,0,0,0,0,0,0,0,0, - 0,1,0,0,0,10,0,0,0,67,0,0,0,115,42,0, - 0,0,116,0,124,0,106,1,131,1,143,22,1,0,116,2, - 124,0,131,1,87,0,2,0,53,0,81,0,82,0,163,0, - 83,0,81,0,82,0,88,0,100,1,83,0,41,2,122,191, - 82,101,116,117,114,110,32,97,32,110,101,119,32,109,111,100, - 117,108,101,32,111,98,106,101,99,116,44,32,108,111,97,100, - 101,100,32,98,121,32,116,104,101,32,115,112,101,99,39,115, - 32,108,111,97,100,101,114,46,10,10,32,32,32,32,84,104, - 101,32,109,111,100,117,108,101,32,105,115,32,110,111,116,32, - 97,100,100,101,100,32,116,111,32,105,116,115,32,112,97,114, - 101,110,116,46,10,10,32,32,32,32,73,102,32,97,32,109, - 111,100,117,108,101,32,105,115,32,97,108,114,101,97,100,121, - 32,105,110,32,115,121,115,46,109,111,100,117,108,101,115,44, - 32,116,104,97,116,32,101,120,105,115,116,105,110,103,32,109, - 111,100,117,108,101,32,103,101,116,115,10,32,32,32,32,99, - 108,111,98,98,101,114,101,100,46,10,10,32,32,32,32,78, - 41,3,114,49,0,0,0,114,17,0,0,0,114,162,0,0, - 0,41,1,114,95,0,0,0,114,10,0,0,0,114,10,0, - 0,0,114,11,0,0,0,114,94,0,0,0,179,2,0,0, - 115,4,0,0,0,0,9,12,1,114,94,0,0,0,99,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4, - 0,0,0,64,0,0,0,115,140,0,0,0,101,0,90,1, - 100,0,90,2,100,1,90,3,100,2,90,4,101,5,100,3, - 100,4,132,0,131,1,90,6,101,7,100,20,100,6,100,7, - 132,1,131,1,90,8,101,7,100,21,100,8,100,9,132,1, - 131,1,90,9,101,7,100,10,100,11,132,0,131,1,90,10, - 101,7,100,12,100,13,132,0,131,1,90,11,101,7,101,12, - 100,14,100,15,132,0,131,1,131,1,90,13,101,7,101,12, - 100,16,100,17,132,0,131,1,131,1,90,14,101,7,101,12, - 100,18,100,19,132,0,131,1,131,1,90,15,101,7,101,16, - 131,1,90,17,100,5,83,0,41,22,218,15,66,117,105,108, - 116,105,110,73,109,112,111,114,116,101,114,122,144,77,101,116, - 97,32,112,97,116,104,32,105,109,112,111,114,116,32,102,111, - 114,32,98,117,105,108,116,45,105,110,32,109,111,100,117,108, - 101,115,46,10,10,32,32,32,32,65,108,108,32,109,101,116, - 104,111,100,115,32,97,114,101,32,101,105,116,104,101,114,32, - 99,108,97,115,115,32,111,114,32,115,116,97,116,105,99,32, - 109,101,116,104,111,100,115,32,116,111,32,97,118,111,105,100, - 32,116,104,101,32,110,101,101,100,32,116,111,10,32,32,32, - 32,105,110,115,116,97,110,116,105,97,116,101,32,116,104,101, - 32,99,108,97,115,115,46,10,10,32,32,32,32,122,8,98, - 117,105,108,116,45,105,110,99,1,0,0,0,0,0,0,0, - 0,0,0,0,1,0,0,0,5,0,0,0,67,0,0,0, - 115,22,0,0,0,100,1,124,0,106,0,155,2,100,2,116, - 1,106,2,155,0,100,3,157,5,83,0,169,4,122,115,82, - 101,116,117,114,110,32,114,101,112,114,32,102,111,114,32,116, - 104,101,32,109,111,100,117,108,101,46,10,10,32,32,32,32, - 32,32,32,32,84,104,101,32,109,101,116,104,111,100,32,105, - 115,32,100,101,112,114,101,99,97,116,101,100,46,32,32,84, - 104,101,32,105,109,112,111,114,116,32,109,97,99,104,105,110, - 101,114,121,32,100,111,101,115,32,116,104,101,32,106,111,98, - 32,105,116,115,101,108,102,46,10,10,32,32,32,32,32,32, - 32,32,114,101,0,0,0,114,103,0,0,0,114,104,0,0, - 0,41,3,114,1,0,0,0,114,163,0,0,0,114,142,0, - 0,0,41,1,114,96,0,0,0,114,10,0,0,0,114,10, - 0,0,0,114,11,0,0,0,114,99,0,0,0,205,2,0, - 0,115,2,0,0,0,0,7,122,27,66,117,105,108,116,105, - 110,73,109,112,111,114,116,101,114,46,109,111,100,117,108,101, - 95,114,101,112,114,78,99,4,0,0,0,0,0,0,0,0, - 0,0,0,4,0,0,0,5,0,0,0,67,0,0,0,115, - 46,0,0,0,124,2,100,0,107,9,114,12,100,0,83,0, - 116,0,160,1,124,1,161,1,114,38,116,2,124,1,124,0, - 124,0,106,3,100,1,141,3,83,0,100,0,83,0,100,0, - 83,0,169,2,78,114,141,0,0,0,41,4,114,56,0,0, - 0,90,10,105,115,95,98,117,105,108,116,105,110,114,91,0, - 0,0,114,142,0,0,0,169,4,218,3,99,108,115,114,81, - 0,0,0,218,4,112,97,116,104,218,6,116,97,114,103,101, - 116,114,10,0,0,0,114,10,0,0,0,114,11,0,0,0, - 218,9,102,105,110,100,95,115,112,101,99,214,2,0,0,115, - 10,0,0,0,0,2,8,1,4,1,10,1,16,2,122,25, - 66,117,105,108,116,105,110,73,109,112,111,114,116,101,114,46, - 102,105,110,100,95,115,112,101,99,99,3,0,0,0,0,0, - 0,0,0,0,0,0,4,0,0,0,4,0,0,0,67,0, - 0,0,115,30,0,0,0,124,0,160,0,124,1,124,2,161, - 2,125,3,124,3,100,1,107,9,114,26,124,3,106,1,83, - 0,100,1,83,0,41,2,122,175,70,105,110,100,32,116,104, - 101,32,98,117,105,108,116,45,105,110,32,109,111,100,117,108, - 101,46,10,10,32,32,32,32,32,32,32,32,73,102,32,39, - 112,97,116,104,39,32,105,115,32,101,118,101,114,32,115,112, - 101,99,105,102,105,101,100,32,116,104,101,110,32,116,104,101, - 32,115,101,97,114,99,104,32,105,115,32,99,111,110,115,105, - 100,101,114,101,100,32,97,32,102,97,105,108,117,114,101,46, - 10,10,32,32,32,32,32,32,32,32,84,104,105,115,32,109, - 101,116,104,111,100,32,105,115,32,100,101,112,114,101,99,97, - 116,101,100,46,32,32,85,115,101,32,102,105,110,100,95,115, - 112,101,99,40,41,32,105,110,115,116,101,97,100,46,10,10, - 32,32,32,32,32,32,32,32,78,41,2,114,170,0,0,0, - 114,111,0,0,0,41,4,114,167,0,0,0,114,81,0,0, - 0,114,168,0,0,0,114,95,0,0,0,114,10,0,0,0, - 114,10,0,0,0,114,11,0,0,0,218,11,102,105,110,100, - 95,109,111,100,117,108,101,223,2,0,0,115,4,0,0,0, - 0,9,12,1,122,27,66,117,105,108,116,105,110,73,109,112, - 111,114,116,101,114,46,102,105,110,100,95,109,111,100,117,108, - 101,99,2,0,0,0,0,0,0,0,0,0,0,0,2,0, - 0,0,4,0,0,0,67,0,0,0,115,46,0,0,0,124, - 1,106,0,116,1,106,2,107,7,114,34,116,3,124,1,106, - 0,155,2,100,1,157,2,124,1,106,0,100,2,141,2,130, - 1,116,4,116,5,106,6,124,1,131,2,83,0,41,3,122, - 24,67,114,101,97,116,101,32,97,32,98,117,105,108,116,45, - 105,110,32,109,111,100,117,108,101,114,77,0,0,0,114,16, - 0,0,0,41,7,114,17,0,0,0,114,15,0,0,0,114, - 78,0,0,0,114,79,0,0,0,114,66,0,0,0,114,56, - 0,0,0,90,14,99,114,101,97,116,101,95,98,117,105,108, - 116,105,110,41,2,114,30,0,0,0,114,95,0,0,0,114, - 10,0,0,0,114,10,0,0,0,114,11,0,0,0,114,153, - 0,0,0,235,2,0,0,115,10,0,0,0,0,3,12,1, - 12,1,4,255,6,2,122,29,66,117,105,108,116,105,110,73, - 109,112,111,114,116,101,114,46,99,114,101,97,116,101,95,109, - 111,100,117,108,101,99,2,0,0,0,0,0,0,0,0,0, - 0,0,2,0,0,0,3,0,0,0,67,0,0,0,115,16, - 0,0,0,116,0,116,1,106,2,124,1,131,2,1,0,100, - 1,83,0,41,2,122,22,69,120,101,99,32,97,32,98,117, - 105,108,116,45,105,110,32,109,111,100,117,108,101,78,41,3, - 114,66,0,0,0,114,56,0,0,0,90,12,101,120,101,99, - 95,98,117,105,108,116,105,110,41,2,114,30,0,0,0,114, - 96,0,0,0,114,10,0,0,0,114,10,0,0,0,114,11, - 0,0,0,114,154,0,0,0,243,2,0,0,115,2,0,0, - 0,0,3,122,27,66,117,105,108,116,105,110,73,109,112,111, - 114,116,101,114,46,101,120,101,99,95,109,111,100,117,108,101, - 99,2,0,0,0,0,0,0,0,0,0,0,0,2,0,0, - 0,1,0,0,0,67,0,0,0,115,4,0,0,0,100,1, - 83,0,41,2,122,57,82,101,116,117,114,110,32,78,111,110, - 101,32,97,115,32,98,117,105,108,116,45,105,110,32,109,111, - 100,117,108,101,115,32,100,111,32,110,111,116,32,104,97,118, - 101,32,99,111,100,101,32,111,98,106,101,99,116,115,46,78, - 114,10,0,0,0,169,2,114,167,0,0,0,114,81,0,0, - 0,114,10,0,0,0,114,10,0,0,0,114,11,0,0,0, - 218,8,103,101,116,95,99,111,100,101,248,2,0,0,115,2, - 0,0,0,0,4,122,24,66,117,105,108,116,105,110,73,109, - 112,111,114,116,101,114,46,103,101,116,95,99,111,100,101,99, - 2,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0, - 1,0,0,0,67,0,0,0,115,4,0,0,0,100,1,83, - 0,41,2,122,56,82,101,116,117,114,110,32,78,111,110,101, - 32,97,115,32,98,117,105,108,116,45,105,110,32,109,111,100, - 117,108,101,115,32,100,111,32,110,111,116,32,104,97,118,101, - 32,115,111,117,114,99,101,32,99,111,100,101,46,78,114,10, - 0,0,0,114,172,0,0,0,114,10,0,0,0,114,10,0, - 0,0,114,11,0,0,0,218,10,103,101,116,95,115,111,117, - 114,99,101,254,2,0,0,115,2,0,0,0,0,4,122,26, - 66,117,105,108,116,105,110,73,109,112,111,114,116,101,114,46, - 103,101,116,95,115,111,117,114,99,101,99,2,0,0,0,0, - 0,0,0,0,0,0,0,2,0,0,0,1,0,0,0,67, - 0,0,0,115,4,0,0,0,100,1,83,0,41,2,122,52, - 82,101,116,117,114,110,32,70,97,108,115,101,32,97,115,32, - 98,117,105,108,116,45,105,110,32,109,111,100,117,108,101,115, - 32,97,114,101,32,110,101,118,101,114,32,112,97,99,107,97, - 103,101,115,46,70,114,10,0,0,0,114,172,0,0,0,114, - 10,0,0,0,114,10,0,0,0,114,11,0,0,0,114,117, - 0,0,0,4,3,0,0,115,2,0,0,0,0,4,122,26, - 66,117,105,108,116,105,110,73,109,112,111,114,116,101,114,46, - 105,115,95,112,97,99,107,97,103,101,41,2,78,78,41,1, - 78,41,18,114,1,0,0,0,114,0,0,0,0,114,2,0, - 0,0,114,3,0,0,0,114,142,0,0,0,218,12,115,116, - 97,116,105,99,109,101,116,104,111,100,114,99,0,0,0,218, - 11,99,108,97,115,115,109,101,116,104,111,100,114,170,0,0, - 0,114,171,0,0,0,114,153,0,0,0,114,154,0,0,0, - 114,86,0,0,0,114,173,0,0,0,114,174,0,0,0,114, - 117,0,0,0,114,97,0,0,0,114,159,0,0,0,114,10, - 0,0,0,114,10,0,0,0,114,10,0,0,0,114,11,0, - 0,0,114,163,0,0,0,194,2,0,0,115,44,0,0,0, - 8,2,4,7,4,2,2,1,10,8,2,1,12,8,2,1, - 12,11,2,1,10,7,2,1,10,4,2,1,2,1,12,4, - 2,1,2,1,12,4,2,1,2,1,12,4,114,163,0,0, - 0,99,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,4,0,0,0,64,0,0,0,115,144,0,0,0,101, - 0,90,1,100,0,90,2,100,1,90,3,100,2,90,4,101, - 5,100,3,100,4,132,0,131,1,90,6,101,7,100,22,100, - 6,100,7,132,1,131,1,90,8,101,7,100,23,100,8,100, - 9,132,1,131,1,90,9,101,7,100,10,100,11,132,0,131, - 1,90,10,101,5,100,12,100,13,132,0,131,1,90,11,101, - 7,100,14,100,15,132,0,131,1,90,12,101,7,101,13,100, - 16,100,17,132,0,131,1,131,1,90,14,101,7,101,13,100, - 18,100,19,132,0,131,1,131,1,90,15,101,7,101,13,100, - 20,100,21,132,0,131,1,131,1,90,16,100,5,83,0,41, - 24,218,14,70,114,111,122,101,110,73,109,112,111,114,116,101, - 114,122,142,77,101,116,97,32,112,97,116,104,32,105,109,112, - 111,114,116,32,102,111,114,32,102,114,111,122,101,110,32,109, - 111,100,117,108,101,115,46,10,10,32,32,32,32,65,108,108, - 32,109,101,116,104,111,100,115,32,97,114,101,32,101,105,116, - 104,101,114,32,99,108,97,115,115,32,111,114,32,115,116,97, - 116,105,99,32,109,101,116,104,111,100,115,32,116,111,32,97, - 118,111,105,100,32,116,104,101,32,110,101,101,100,32,116,111, - 10,32,32,32,32,105,110,115,116,97,110,116,105,97,116,101, - 32,116,104,101,32,99,108,97,115,115,46,10,10,32,32,32, - 32,90,6,102,114,111,122,101,110,99,1,0,0,0,0,0, - 0,0,0,0,0,0,1,0,0,0,5,0,0,0,67,0, - 0,0,115,22,0,0,0,100,1,124,0,106,0,155,2,100, - 2,116,1,106,2,155,0,100,3,157,5,83,0,114,164,0, - 0,0,41,3,114,1,0,0,0,114,177,0,0,0,114,142, - 0,0,0,41,1,218,1,109,114,10,0,0,0,114,10,0, - 0,0,114,11,0,0,0,114,99,0,0,0,24,3,0,0, - 115,2,0,0,0,0,7,122,26,70,114,111,122,101,110,73, - 109,112,111,114,116,101,114,46,109,111,100,117,108,101,95,114, - 101,112,114,78,99,4,0,0,0,0,0,0,0,0,0,0, - 0,4,0,0,0,5,0,0,0,67,0,0,0,115,34,0, - 0,0,116,0,160,1,124,1,161,1,114,26,116,2,124,1, - 124,0,124,0,106,3,100,1,141,3,83,0,100,0,83,0, - 100,0,83,0,114,165,0,0,0,41,4,114,56,0,0,0, - 114,88,0,0,0,114,91,0,0,0,114,142,0,0,0,114, - 166,0,0,0,114,10,0,0,0,114,10,0,0,0,114,11, - 0,0,0,114,170,0,0,0,33,3,0,0,115,6,0,0, - 0,0,2,10,1,16,2,122,24,70,114,111,122,101,110,73, - 109,112,111,114,116,101,114,46,102,105,110,100,95,115,112,101, - 99,99,3,0,0,0,0,0,0,0,0,0,0,0,3,0, - 0,0,3,0,0,0,67,0,0,0,115,18,0,0,0,116, - 0,160,1,124,1,161,1,114,14,124,0,83,0,100,1,83, - 0,41,2,122,93,70,105,110,100,32,97,32,102,114,111,122, - 101,110,32,109,111,100,117,108,101,46,10,10,32,32,32,32, - 32,32,32,32,84,104,105,115,32,109,101,116,104,111,100,32, - 105,115,32,100,101,112,114,101,99,97,116,101,100,46,32,32, - 85,115,101,32,102,105,110,100,95,115,112,101,99,40,41,32, - 105,110,115,116,101,97,100,46,10,10,32,32,32,32,32,32, - 32,32,78,41,2,114,56,0,0,0,114,88,0,0,0,41, - 3,114,167,0,0,0,114,81,0,0,0,114,168,0,0,0, - 114,10,0,0,0,114,10,0,0,0,114,11,0,0,0,114, - 171,0,0,0,40,3,0,0,115,2,0,0,0,0,7,122, - 26,70,114,111,122,101,110,73,109,112,111,114,116,101,114,46, - 102,105,110,100,95,109,111,100,117,108,101,99,2,0,0,0, - 0,0,0,0,0,0,0,0,2,0,0,0,1,0,0,0, - 67,0,0,0,115,4,0,0,0,100,1,83,0,41,2,122, - 42,85,115,101,32,100,101,102,97,117,108,116,32,115,101,109, - 97,110,116,105,99,115,32,102,111,114,32,109,111,100,117,108, - 101,32,99,114,101,97,116,105,111,110,46,78,114,10,0,0, - 0,41,2,114,167,0,0,0,114,95,0,0,0,114,10,0, - 0,0,114,10,0,0,0,114,11,0,0,0,114,153,0,0, - 0,49,3,0,0,115,2,0,0,0,0,2,122,28,70,114, - 111,122,101,110,73,109,112,111,114,116,101,114,46,99,114,101, - 97,116,101,95,109,111,100,117,108,101,99,1,0,0,0,0, - 0,0,0,0,0,0,0,3,0,0,0,4,0,0,0,67, - 0,0,0,115,64,0,0,0,124,0,106,0,106,1,125,1, - 116,2,160,3,124,1,161,1,115,36,116,4,124,1,155,2, - 100,1,157,2,124,1,100,2,141,2,130,1,116,5,116,2, - 106,6,124,1,131,2,125,2,116,7,124,2,124,0,106,8, - 131,2,1,0,100,0,83,0,114,87,0,0,0,41,9,114, - 107,0,0,0,114,17,0,0,0,114,56,0,0,0,114,88, - 0,0,0,114,79,0,0,0,114,66,0,0,0,218,17,103, - 101,116,95,102,114,111,122,101,110,95,111,98,106,101,99,116, - 218,4,101,120,101,99,114,7,0,0,0,41,3,114,96,0, - 0,0,114,17,0,0,0,218,4,99,111,100,101,114,10,0, - 0,0,114,10,0,0,0,114,11,0,0,0,114,154,0,0, - 0,53,3,0,0,115,10,0,0,0,0,2,8,1,10,1, - 18,1,12,1,122,26,70,114,111,122,101,110,73,109,112,111, - 114,116,101,114,46,101,120,101,99,95,109,111,100,117,108,101, - 99,2,0,0,0,0,0,0,0,0,0,0,0,2,0,0, - 0,3,0,0,0,67,0,0,0,115,10,0,0,0,116,0, - 124,0,124,1,131,2,83,0,41,1,122,95,76,111,97,100, - 32,97,32,102,114,111,122,101,110,32,109,111,100,117,108,101, - 46,10,10,32,32,32,32,32,32,32,32,84,104,105,115,32, - 109,101,116,104,111,100,32,105,115,32,100,101,112,114,101,99, - 97,116,101,100,46,32,32,85,115,101,32,101,120,101,99,95, - 109,111,100,117,108,101,40,41,32,105,110,115,116,101,97,100, - 46,10,10,32,32,32,32,32,32,32,32,41,1,114,97,0, - 0,0,114,172,0,0,0,114,10,0,0,0,114,10,0,0, - 0,114,11,0,0,0,114,159,0,0,0,61,3,0,0,115, - 2,0,0,0,0,7,122,26,70,114,111,122,101,110,73,109, - 112,111,114,116,101,114,46,108,111,97,100,95,109,111,100,117, - 108,101,99,2,0,0,0,0,0,0,0,0,0,0,0,2, - 0,0,0,3,0,0,0,67,0,0,0,115,10,0,0,0, - 116,0,160,1,124,1,161,1,83,0,41,1,122,45,82,101, - 116,117,114,110,32,116,104,101,32,99,111,100,101,32,111,98, - 106,101,99,116,32,102,111,114,32,116,104,101,32,102,114,111, - 122,101,110,32,109,111,100,117,108,101,46,41,2,114,56,0, - 0,0,114,179,0,0,0,114,172,0,0,0,114,10,0,0, - 0,114,10,0,0,0,114,11,0,0,0,114,173,0,0,0, - 70,3,0,0,115,2,0,0,0,0,4,122,23,70,114,111, - 122,101,110,73,109,112,111,114,116,101,114,46,103,101,116,95, - 99,111,100,101,99,2,0,0,0,0,0,0,0,0,0,0, - 0,2,0,0,0,1,0,0,0,67,0,0,0,115,4,0, - 0,0,100,1,83,0,41,2,122,54,82,101,116,117,114,110, - 32,78,111,110,101,32,97,115,32,102,114,111,122,101,110,32, - 109,111,100,117,108,101,115,32,100,111,32,110,111,116,32,104, - 97,118,101,32,115,111,117,114,99,101,32,99,111,100,101,46, - 78,114,10,0,0,0,114,172,0,0,0,114,10,0,0,0, - 114,10,0,0,0,114,11,0,0,0,114,174,0,0,0,76, - 3,0,0,115,2,0,0,0,0,4,122,25,70,114,111,122, - 101,110,73,109,112,111,114,116,101,114,46,103,101,116,95,115, - 111,117,114,99,101,99,2,0,0,0,0,0,0,0,0,0, - 0,0,2,0,0,0,3,0,0,0,67,0,0,0,115,10, - 0,0,0,116,0,160,1,124,1,161,1,83,0,41,1,122, - 46,82,101,116,117,114,110,32,84,114,117,101,32,105,102,32, - 116,104,101,32,102,114,111,122,101,110,32,109,111,100,117,108, - 101,32,105,115,32,97,32,112,97,99,107,97,103,101,46,41, - 2,114,56,0,0,0,90,17,105,115,95,102,114,111,122,101, - 110,95,112,97,99,107,97,103,101,114,172,0,0,0,114,10, - 0,0,0,114,10,0,0,0,114,11,0,0,0,114,117,0, - 0,0,82,3,0,0,115,2,0,0,0,0,4,122,25,70, - 114,111,122,101,110,73,109,112,111,114,116,101,114,46,105,115, - 95,112,97,99,107,97,103,101,41,2,78,78,41,1,78,41, - 17,114,1,0,0,0,114,0,0,0,0,114,2,0,0,0, - 114,3,0,0,0,114,142,0,0,0,114,175,0,0,0,114, - 99,0,0,0,114,176,0,0,0,114,170,0,0,0,114,171, - 0,0,0,114,153,0,0,0,114,154,0,0,0,114,159,0, - 0,0,114,90,0,0,0,114,173,0,0,0,114,174,0,0, - 0,114,117,0,0,0,114,10,0,0,0,114,10,0,0,0, - 114,10,0,0,0,114,11,0,0,0,114,177,0,0,0,13, - 3,0,0,115,46,0,0,0,8,2,4,7,4,2,2,1, - 10,8,2,1,12,6,2,1,12,8,2,1,10,3,2,1, - 10,7,2,1,10,8,2,1,2,1,12,4,2,1,2,1, - 12,4,2,1,2,1,114,177,0,0,0,99,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0, - 64,0,0,0,115,32,0,0,0,101,0,90,1,100,0,90, - 2,100,1,90,3,100,2,100,3,132,0,90,4,100,4,100, - 5,132,0,90,5,100,6,83,0,41,7,218,18,95,73,109, - 112,111,114,116,76,111,99,107,67,111,110,116,101,120,116,122, - 36,67,111,110,116,101,120,116,32,109,97,110,97,103,101,114, - 32,102,111,114,32,116,104,101,32,105,109,112,111,114,116,32, - 108,111,99,107,46,99,1,0,0,0,0,0,0,0,0,0, - 0,0,1,0,0,0,2,0,0,0,67,0,0,0,115,12, - 0,0,0,116,0,160,1,161,0,1,0,100,1,83,0,41, - 2,122,24,65,99,113,117,105,114,101,32,116,104,101,32,105, - 109,112,111,114,116,32,108,111,99,107,46,78,41,2,114,56, - 0,0,0,114,57,0,0,0,114,46,0,0,0,114,10,0, - 0,0,114,10,0,0,0,114,11,0,0,0,114,53,0,0, - 0,95,3,0,0,115,2,0,0,0,0,2,122,28,95,73, - 109,112,111,114,116,76,111,99,107,67,111,110,116,101,120,116, - 46,95,95,101,110,116,101,114,95,95,99,4,0,0,0,0, - 0,0,0,0,0,0,0,4,0,0,0,2,0,0,0,67, - 0,0,0,115,12,0,0,0,116,0,160,1,161,0,1,0, - 100,1,83,0,41,2,122,60,82,101,108,101,97,115,101,32, - 116,104,101,32,105,109,112,111,114,116,32,108,111,99,107,32, - 114,101,103,97,114,100,108,101,115,115,32,111,102,32,97,110, - 121,32,114,97,105,115,101,100,32,101,120,99,101,112,116,105, - 111,110,115,46,78,41,2,114,56,0,0,0,114,58,0,0, - 0,41,4,114,30,0,0,0,218,8,101,120,99,95,116,121, - 112,101,218,9,101,120,99,95,118,97,108,117,101,218,13,101, - 120,99,95,116,114,97,99,101,98,97,99,107,114,10,0,0, - 0,114,10,0,0,0,114,11,0,0,0,114,55,0,0,0, - 99,3,0,0,115,2,0,0,0,0,2,122,27,95,73,109, - 112,111,114,116,76,111,99,107,67,111,110,116,101,120,116,46, - 95,95,101,120,105,116,95,95,78,41,6,114,1,0,0,0, - 114,0,0,0,0,114,2,0,0,0,114,3,0,0,0,114, - 53,0,0,0,114,55,0,0,0,114,10,0,0,0,114,10, - 0,0,0,114,10,0,0,0,114,11,0,0,0,114,182,0, - 0,0,91,3,0,0,115,6,0,0,0,8,2,4,2,8, - 4,114,182,0,0,0,99,3,0,0,0,0,0,0,0,0, - 0,0,0,5,0,0,0,5,0,0,0,67,0,0,0,115, - 66,0,0,0,124,1,160,0,100,1,124,2,100,2,24,0, - 161,2,125,3,116,1,124,3,131,1,124,2,107,0,114,36, - 116,2,100,3,131,1,130,1,124,3,100,4,25,0,125,4, - 124,0,114,62,124,4,155,0,100,1,124,0,155,0,157,3, - 83,0,124,4,83,0,41,5,122,50,82,101,115,111,108,118, - 101,32,97,32,114,101,108,97,116,105,118,101,32,109,111,100, - 117,108,101,32,110,97,109,101,32,116,111,32,97,110,32,97, - 98,115,111,108,117,116,101,32,111,110,101,46,114,132,0,0, - 0,114,37,0,0,0,122,50,97,116,116,101,109,112,116,101, - 100,32,114,101,108,97,116,105,118,101,32,105,109,112,111,114, - 116,32,98,101,121,111,110,100,32,116,111,112,45,108,101,118, - 101,108,32,112,97,99,107,97,103,101,114,22,0,0,0,41, - 3,218,6,114,115,112,108,105,116,218,3,108,101,110,114,79, - 0,0,0,41,5,114,17,0,0,0,218,7,112,97,99,107, - 97,103,101,218,5,108,101,118,101,108,90,4,98,105,116,115, - 90,4,98,97,115,101,114,10,0,0,0,114,10,0,0,0, - 114,11,0,0,0,218,13,95,114,101,115,111,108,118,101,95, - 110,97,109,101,104,3,0,0,115,10,0,0,0,0,2,16, - 1,12,1,8,1,8,1,114,190,0,0,0,99,3,0,0, - 0,0,0,0,0,0,0,0,0,4,0,0,0,4,0,0, - 0,67,0,0,0,115,34,0,0,0,124,0,160,0,124,1, - 124,2,161,2,125,3,124,3,100,0,107,8,114,24,100,0, - 83,0,116,1,124,1,124,3,131,2,83,0,114,13,0,0, - 0,41,2,114,171,0,0,0,114,91,0,0,0,41,4,218, - 6,102,105,110,100,101,114,114,17,0,0,0,114,168,0,0, - 0,114,111,0,0,0,114,10,0,0,0,114,10,0,0,0, - 114,11,0,0,0,218,17,95,102,105,110,100,95,115,112,101, - 99,95,108,101,103,97,99,121,113,3,0,0,115,8,0,0, - 0,0,3,12,1,8,1,4,1,114,192,0,0,0,99,3, - 0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,10, - 0,0,0,67,0,0,0,115,12,1,0,0,116,0,106,1, - 125,3,124,3,100,1,107,8,114,22,116,2,100,2,131,1, - 130,1,124,3,115,38,116,3,160,4,100,3,116,5,161,2, - 1,0,124,0,116,0,106,6,107,6,125,4,124,3,68,0, - 93,210,125,5,116,7,131,0,143,84,1,0,122,10,124,5, - 106,8,125,6,87,0,110,54,4,0,116,9,107,10,114,128, - 1,0,1,0,1,0,116,10,124,5,124,0,124,1,131,3, - 125,7,124,7,100,1,107,8,114,124,89,0,87,0,53,0, - 81,0,82,0,163,0,113,52,89,0,110,14,88,0,124,6, - 124,0,124,1,124,2,131,3,125,7,87,0,53,0,81,0, - 82,0,88,0,124,7,100,1,107,9,114,52,124,4,144,0, - 115,254,124,0,116,0,106,6,107,6,144,0,114,254,116,0, - 106,6,124,0,25,0,125,8,122,10,124,8,106,11,125,9, - 87,0,110,28,4,0,116,9,107,10,114,226,1,0,1,0, - 1,0,124,7,6,0,89,0,2,0,1,0,83,0,88,0, - 124,9,100,1,107,8,114,244,124,7,2,0,1,0,83,0, - 124,9,2,0,1,0,83,0,113,52,124,7,2,0,1,0, - 83,0,113,52,100,1,83,0,41,4,122,21,70,105,110,100, - 32,97,32,109,111,100,117,108,101,39,115,32,115,112,101,99, - 46,78,122,53,115,121,115,46,109,101,116,97,95,112,97,116, - 104,32,105,115,32,78,111,110,101,44,32,80,121,116,104,111, - 110,32,105,115,32,108,105,107,101,108,121,32,115,104,117,116, - 116,105,110,103,32,100,111,119,110,122,22,115,121,115,46,109, - 101,116,97,95,112,97,116,104,32,105,115,32,101,109,112,116, - 121,41,12,114,15,0,0,0,218,9,109,101,116,97,95,112, - 97,116,104,114,79,0,0,0,218,9,95,119,97,114,110,105, - 110,103,115,218,4,119,97,114,110,218,13,73,109,112,111,114, - 116,87,97,114,110,105,110,103,114,92,0,0,0,114,182,0, - 0,0,114,170,0,0,0,114,108,0,0,0,114,192,0,0, - 0,114,107,0,0,0,41,10,114,17,0,0,0,114,168,0, - 0,0,114,169,0,0,0,114,193,0,0,0,90,9,105,115, - 95,114,101,108,111,97,100,114,191,0,0,0,114,170,0,0, - 0,114,95,0,0,0,114,96,0,0,0,114,107,0,0,0, - 114,10,0,0,0,114,10,0,0,0,114,11,0,0,0,218, - 10,95,102,105,110,100,95,115,112,101,99,122,3,0,0,115, - 54,0,0,0,0,2,6,1,8,2,8,3,4,1,12,5, - 10,1,8,1,8,1,2,1,10,1,14,1,12,1,8,1, - 20,2,22,1,8,2,18,1,10,1,2,1,10,1,14,4, - 14,2,8,1,8,2,10,2,10,2,114,197,0,0,0,99, - 3,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0, - 4,0,0,0,67,0,0,0,115,108,0,0,0,116,0,124, - 0,116,1,131,2,115,28,116,2,100,1,116,3,124,0,131, - 1,155,0,157,2,131,1,130,1,124,2,100,2,107,0,114, - 44,116,4,100,3,131,1,130,1,124,2,100,2,107,4,114, - 84,116,0,124,1,116,1,131,2,115,72,116,2,100,4,131, - 1,130,1,110,12,124,1,115,84,116,5,100,5,131,1,130, - 1,124,0,115,104,124,2,100,2,107,2,114,104,116,4,100, - 6,131,1,130,1,100,7,83,0,41,8,122,28,86,101,114, - 105,102,121,32,97,114,103,117,109,101,110,116,115,32,97,114, - 101,32,34,115,97,110,101,34,46,122,29,109,111,100,117,108, - 101,32,110,97,109,101,32,109,117,115,116,32,98,101,32,115, - 116,114,44,32,110,111,116,32,114,22,0,0,0,122,18,108, - 101,118,101,108,32,109,117,115,116,32,98,101,32,62,61,32, - 48,122,31,95,95,112,97,99,107,97,103,101,95,95,32,110, - 111,116,32,115,101,116,32,116,111,32,97,32,115,116,114,105, - 110,103,122,54,97,116,116,101,109,112,116,101,100,32,114,101, - 108,97,116,105,118,101,32,105,109,112,111,114,116,32,119,105, - 116,104,32,110,111,32,107,110,111,119,110,32,112,97,114,101, - 110,116,32,112,97,99,107,97,103,101,122,17,69,109,112,116, - 121,32,109,111,100,117,108,101,32,110,97,109,101,78,41,6, - 218,10,105,115,105,110,115,116,97,110,99,101,218,3,115,116, - 114,218,9,84,121,112,101,69,114,114,111,114,114,14,0,0, - 0,218,10,86,97,108,117,101,69,114,114,111,114,114,79,0, - 0,0,169,3,114,17,0,0,0,114,188,0,0,0,114,189, - 0,0,0,114,10,0,0,0,114,10,0,0,0,114,11,0, - 0,0,218,13,95,115,97,110,105,116,121,95,99,104,101,99, - 107,169,3,0,0,115,22,0,0,0,0,2,10,1,18,1, - 8,1,8,1,8,1,10,1,10,1,4,1,8,2,12,1, - 114,203,0,0,0,250,16,78,111,32,109,111,100,117,108,101, - 32,110,97,109,101,100,32,122,4,123,33,114,125,99,2,0, - 0,0,0,0,0,0,0,0,0,0,8,0,0,0,8,0, - 0,0,67,0,0,0,115,222,0,0,0,100,0,125,2,124, - 0,160,0,100,1,161,1,100,2,25,0,125,3,124,3,114, - 136,124,3,116,1,106,2,107,7,114,42,116,3,124,1,124, - 3,131,2,1,0,124,0,116,1,106,2,107,6,114,62,116, - 1,106,2,124,0,25,0,83,0,116,1,106,2,124,3,25, - 0,125,4,122,10,124,4,106,4,125,2,87,0,110,52,4, - 0,116,5,107,10,114,134,1,0,1,0,1,0,100,3,124, - 0,155,2,100,4,124,3,155,2,100,5,157,5,125,5,116, - 6,124,5,124,0,100,6,141,2,100,0,130,2,89,0,110, - 2,88,0,116,7,124,0,124,2,131,2,125,6,124,6,100, - 0,107,8,114,174,116,6,100,3,124,0,155,2,157,2,124, - 0,100,6,141,2,130,1,110,8,116,8,124,6,131,1,125, - 7,124,3,114,218,116,1,106,2,124,3,25,0,125,4,116, - 9,124,4,124,0,160,0,100,1,161,1,100,7,25,0,124, - 7,131,3,1,0,124,7,83,0,41,8,78,114,132,0,0, - 0,114,22,0,0,0,114,204,0,0,0,122,2,59,32,122, - 17,32,105,115,32,110,111,116,32,97,32,112,97,99,107,97, - 103,101,114,16,0,0,0,233,2,0,0,0,41,10,114,133, - 0,0,0,114,15,0,0,0,114,92,0,0,0,114,66,0, - 0,0,114,145,0,0,0,114,108,0,0,0,218,19,77,111, - 100,117,108,101,78,111,116,70,111,117,110,100,69,114,114,111, - 114,114,197,0,0,0,114,162,0,0,0,114,5,0,0,0, - 41,8,114,17,0,0,0,218,7,105,109,112,111,114,116,95, - 114,168,0,0,0,114,134,0,0,0,90,13,112,97,114,101, - 110,116,95,109,111,100,117,108,101,114,160,0,0,0,114,95, - 0,0,0,114,96,0,0,0,114,10,0,0,0,114,10,0, - 0,0,114,11,0,0,0,218,23,95,102,105,110,100,95,97, - 110,100,95,108,111,97,100,95,117,110,108,111,99,107,101,100, - 188,3,0,0,115,42,0,0,0,0,1,4,1,14,1,4, - 1,10,1,10,2,10,1,10,1,10,1,2,1,10,1,14, - 1,18,1,20,1,10,1,8,1,20,2,8,1,4,2,10, - 1,22,1,114,208,0,0,0,99,2,0,0,0,0,0,0, - 0,0,0,0,0,4,0,0,0,10,0,0,0,67,0,0, - 0,115,108,0,0,0,116,0,124,0,131,1,143,50,1,0, - 116,1,106,2,160,3,124,0,116,4,161,2,125,2,124,2, - 116,4,107,8,114,54,116,5,124,0,124,1,131,2,87,0, - 2,0,53,0,81,0,82,0,163,0,83,0,87,0,53,0, - 81,0,82,0,88,0,124,2,100,1,107,8,114,96,100,2, - 124,0,155,0,100,3,157,3,125,3,116,6,124,3,124,0, - 100,4,141,2,130,1,116,7,124,0,131,1,1,0,124,2, - 83,0,41,5,122,25,70,105,110,100,32,97,110,100,32,108, - 111,97,100,32,116,104,101,32,109,111,100,117,108,101,46,78, - 122,10,105,109,112,111,114,116,32,111,102,32,122,28,32,104, - 97,108,116,101,100,59,32,78,111,110,101,32,105,110,32,115, - 121,115,46,109,111,100,117,108,101,115,114,16,0,0,0,41, - 8,114,49,0,0,0,114,15,0,0,0,114,92,0,0,0, - 114,34,0,0,0,218,14,95,78,69,69,68,83,95,76,79, - 65,68,73,78,71,114,208,0,0,0,114,206,0,0,0,114, - 64,0,0,0,41,4,114,17,0,0,0,114,207,0,0,0, - 114,96,0,0,0,114,75,0,0,0,114,10,0,0,0,114, - 10,0,0,0,114,11,0,0,0,218,14,95,102,105,110,100, - 95,97,110,100,95,108,111,97,100,218,3,0,0,115,18,0, - 0,0,0,2,10,1,14,1,8,1,32,2,8,1,12,2, - 12,2,8,1,114,210,0,0,0,114,22,0,0,0,99,3, - 0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,4, - 0,0,0,67,0,0,0,115,42,0,0,0,116,0,124,0, - 124,1,124,2,131,3,1,0,124,2,100,1,107,4,114,32, - 116,1,124,0,124,1,124,2,131,3,125,0,116,2,124,0, - 116,3,131,2,83,0,41,2,97,50,1,0,0,73,109,112, - 111,114,116,32,97,110,100,32,114,101,116,117,114,110,32,116, - 104,101,32,109,111,100,117,108,101,32,98,97,115,101,100,32, - 111,110,32,105,116,115,32,110,97,109,101,44,32,116,104,101, - 32,112,97,99,107,97,103,101,32,116,104,101,32,99,97,108, - 108,32,105,115,10,32,32,32,32,98,101,105,110,103,32,109, - 97,100,101,32,102,114,111,109,44,32,97,110,100,32,116,104, - 101,32,108,101,118,101,108,32,97,100,106,117,115,116,109,101, - 110,116,46,10,10,32,32,32,32,84,104,105,115,32,102,117, - 110,99,116,105,111,110,32,114,101,112,114,101,115,101,110,116, - 115,32,116,104,101,32,103,114,101,97,116,101,115,116,32,99, - 111,109,109,111,110,32,100,101,110,111,109,105,110,97,116,111, - 114,32,111,102,32,102,117,110,99,116,105,111,110,97,108,105, - 116,121,10,32,32,32,32,98,101,116,119,101,101,110,32,105, - 109,112,111,114,116,95,109,111,100,117,108,101,32,97,110,100, - 32,95,95,105,109,112,111,114,116,95,95,46,32,84,104,105, - 115,32,105,110,99,108,117,100,101,115,32,115,101,116,116,105, - 110,103,32,95,95,112,97,99,107,97,103,101,95,95,32,105, - 102,10,32,32,32,32,116,104,101,32,108,111,97,100,101,114, - 32,100,105,100,32,110,111,116,46,10,10,32,32,32,32,114, - 22,0,0,0,41,4,114,203,0,0,0,114,190,0,0,0, - 114,210,0,0,0,218,11,95,103,99,100,95,105,109,112,111, - 114,116,114,202,0,0,0,114,10,0,0,0,114,10,0,0, - 0,114,11,0,0,0,114,211,0,0,0,234,3,0,0,115, - 8,0,0,0,0,9,12,1,8,1,12,1,114,211,0,0, - 0,169,1,218,9,114,101,99,117,114,115,105,118,101,99,3, - 0,0,0,0,0,0,0,1,0,0,0,8,0,0,0,11, - 0,0,0,67,0,0,0,115,228,0,0,0,124,1,68,0, - 93,218,125,4,116,0,124,4,116,1,131,2,115,66,124,3, - 114,34,124,0,106,2,100,1,23,0,125,5,110,4,100,2, - 125,5,116,3,100,3,124,5,155,0,100,4,116,4,124,4, - 131,1,106,2,155,0,157,4,131,1,130,1,113,4,124,4, - 100,5,107,2,114,108,124,3,115,222,116,5,124,0,100,6, - 131,2,114,222,116,6,124,0,124,0,106,7,124,2,100,7, - 100,8,141,4,1,0,113,4,116,5,124,0,124,4,131,2, - 115,4,124,0,106,2,155,0,100,9,124,4,155,0,157,3, - 125,6,122,14,116,8,124,2,124,6,131,2,1,0,87,0, - 113,4,4,0,116,9,107,10,114,220,1,0,125,7,1,0, - 122,42,124,7,106,10,124,6,107,2,114,202,116,11,106,12, - 160,13,124,6,116,14,161,2,100,10,107,9,114,202,87,0, - 89,0,162,8,113,4,130,0,87,0,53,0,100,10,125,7, - 126,7,88,0,89,0,113,4,88,0,113,4,124,0,83,0, - 41,11,122,238,70,105,103,117,114,101,32,111,117,116,32,119, - 104,97,116,32,95,95,105,109,112,111,114,116,95,95,32,115, - 104,111,117,108,100,32,114,101,116,117,114,110,46,10,10,32, - 32,32,32,84,104,101,32,105,109,112,111,114,116,95,32,112, - 97,114,97,109,101,116,101,114,32,105,115,32,97,32,99,97, - 108,108,97,98,108,101,32,119,104,105,99,104,32,116,97,107, - 101,115,32,116,104,101,32,110,97,109,101,32,111,102,32,109, - 111,100,117,108,101,32,116,111,10,32,32,32,32,105,109,112, - 111,114,116,46,32,73,116,32,105,115,32,114,101,113,117,105, - 114,101,100,32,116,111,32,100,101,99,111,117,112,108,101,32, - 116,104,101,32,102,117,110,99,116,105,111,110,32,102,114,111, - 109,32,97,115,115,117,109,105,110,103,32,105,109,112,111,114, - 116,108,105,98,39,115,10,32,32,32,32,105,109,112,111,114, - 116,32,105,109,112,108,101,109,101,110,116,97,116,105,111,110, - 32,105,115,32,100,101,115,105,114,101,100,46,10,10,32,32, - 32,32,122,8,46,95,95,97,108,108,95,95,122,13,96,96, - 102,114,111,109,32,108,105,115,116,39,39,122,8,73,116,101, - 109,32,105,110,32,122,18,32,109,117,115,116,32,98,101,32, - 115,116,114,44,32,110,111,116,32,250,1,42,218,7,95,95, - 97,108,108,95,95,84,114,212,0,0,0,114,132,0,0,0, - 78,41,15,114,198,0,0,0,114,199,0,0,0,114,1,0, - 0,0,114,200,0,0,0,114,14,0,0,0,114,4,0,0, - 0,218,16,95,104,97,110,100,108,101,95,102,114,111,109,108, - 105,115,116,114,215,0,0,0,114,66,0,0,0,114,206,0, - 0,0,114,17,0,0,0,114,15,0,0,0,114,92,0,0, - 0,114,34,0,0,0,114,209,0,0,0,41,8,114,96,0, - 0,0,218,8,102,114,111,109,108,105,115,116,114,207,0,0, - 0,114,213,0,0,0,218,1,120,90,5,119,104,101,114,101, - 90,9,102,114,111,109,95,110,97,109,101,90,3,101,120,99, - 114,10,0,0,0,114,10,0,0,0,114,11,0,0,0,114, - 216,0,0,0,249,3,0,0,115,44,0,0,0,0,10,8, - 1,10,1,4,1,12,2,4,1,28,2,8,1,14,1,10, - 1,2,255,8,2,10,1,16,1,2,1,14,1,16,4,10, - 1,16,255,2,2,8,1,22,1,114,216,0,0,0,99,1, - 0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,6, - 0,0,0,67,0,0,0,115,146,0,0,0,124,0,160,0, - 100,1,161,1,125,1,124,0,160,0,100,2,161,1,125,2, - 124,1,100,3,107,9,114,82,124,2,100,3,107,9,114,78, - 124,1,124,2,106,1,107,3,114,78,116,2,106,3,100,4, - 124,1,155,2,100,5,124,2,106,1,155,2,100,6,157,5, - 116,4,100,7,100,8,141,3,1,0,124,1,83,0,124,2, - 100,3,107,9,114,96,124,2,106,1,83,0,116,2,106,3, - 100,9,116,4,100,7,100,8,141,3,1,0,124,0,100,10, - 25,0,125,1,100,11,124,0,107,7,114,142,124,1,160,5, - 100,12,161,1,100,13,25,0,125,1,124,1,83,0,41,14, - 122,167,67,97,108,99,117,108,97,116,101,32,119,104,97,116, - 32,95,95,112,97,99,107,97,103,101,95,95,32,115,104,111, - 117,108,100,32,98,101,46,10,10,32,32,32,32,95,95,112, - 97,99,107,97,103,101,95,95,32,105,115,32,110,111,116,32, - 103,117,97,114,97,110,116,101,101,100,32,116,111,32,98,101, - 32,100,101,102,105,110,101,100,32,111,114,32,99,111,117,108, - 100,32,98,101,32,115,101,116,32,116,111,32,78,111,110,101, - 10,32,32,32,32,116,111,32,114,101,112,114,101,115,101,110, - 116,32,116,104,97,116,32,105,116,115,32,112,114,111,112,101, - 114,32,118,97,108,117,101,32,105,115,32,117,110,107,110,111, - 119,110,46,10,10,32,32,32,32,114,149,0,0,0,114,107, - 0,0,0,78,122,32,95,95,112,97,99,107,97,103,101,95, - 95,32,33,61,32,95,95,115,112,101,99,95,95,46,112,97, - 114,101,110,116,32,40,122,4,32,33,61,32,114,122,0,0, - 0,233,3,0,0,0,41,1,90,10,115,116,97,99,107,108, - 101,118,101,108,122,89,99,97,110,39,116,32,114,101,115,111, - 108,118,101,32,112,97,99,107,97,103,101,32,102,114,111,109, - 32,95,95,115,112,101,99,95,95,32,111,114,32,95,95,112, - 97,99,107,97,103,101,95,95,44,32,102,97,108,108,105,110, - 103,32,98,97,99,107,32,111,110,32,95,95,110,97,109,101, - 95,95,32,97,110,100,32,95,95,112,97,116,104,95,95,114, - 1,0,0,0,114,145,0,0,0,114,132,0,0,0,114,22, - 0,0,0,41,6,114,34,0,0,0,114,134,0,0,0,114, - 194,0,0,0,114,195,0,0,0,114,196,0,0,0,114,133, - 0,0,0,41,3,218,7,103,108,111,98,97,108,115,114,188, - 0,0,0,114,95,0,0,0,114,10,0,0,0,114,10,0, - 0,0,114,11,0,0,0,218,17,95,99,97,108,99,95,95, - 95,112,97,99,107,97,103,101,95,95,30,4,0,0,115,38, - 0,0,0,0,7,10,1,10,1,8,1,18,1,22,2,2, - 0,2,254,6,3,4,1,8,1,6,2,6,2,2,0,2, - 254,6,3,8,1,8,1,14,1,114,221,0,0,0,114,10, - 0,0,0,99,5,0,0,0,0,0,0,0,0,0,0,0, - 9,0,0,0,5,0,0,0,67,0,0,0,115,180,0,0, - 0,124,4,100,1,107,2,114,18,116,0,124,0,131,1,125, - 5,110,36,124,1,100,2,107,9,114,30,124,1,110,2,105, - 0,125,6,116,1,124,6,131,1,125,7,116,0,124,0,124, - 7,124,4,131,3,125,5,124,3,115,150,124,4,100,1,107, - 2,114,84,116,0,124,0,160,2,100,3,161,1,100,1,25, - 0,131,1,83,0,124,0,115,92,124,5,83,0,116,3,124, - 0,131,1,116,3,124,0,160,2,100,3,161,1,100,1,25, - 0,131,1,24,0,125,8,116,4,106,5,124,5,106,6,100, - 2,116,3,124,5,106,6,131,1,124,8,24,0,133,2,25, - 0,25,0,83,0,110,26,116,7,124,5,100,4,131,2,114, - 172,116,8,124,5,124,3,116,0,131,3,83,0,124,5,83, - 0,100,2,83,0,41,5,97,215,1,0,0,73,109,112,111, - 114,116,32,97,32,109,111,100,117,108,101,46,10,10,32,32, - 32,32,84,104,101,32,39,103,108,111,98,97,108,115,39,32, - 97,114,103,117,109,101,110,116,32,105,115,32,117,115,101,100, - 32,116,111,32,105,110,102,101,114,32,119,104,101,114,101,32, - 116,104,101,32,105,109,112,111,114,116,32,105,115,32,111,99, - 99,117,114,114,105,110,103,32,102,114,111,109,10,32,32,32, - 32,116,111,32,104,97,110,100,108,101,32,114,101,108,97,116, - 105,118,101,32,105,109,112,111,114,116,115,46,32,84,104,101, - 32,39,108,111,99,97,108,115,39,32,97,114,103,117,109,101, - 110,116,32,105,115,32,105,103,110,111,114,101,100,46,32,84, - 104,101,10,32,32,32,32,39,102,114,111,109,108,105,115,116, - 39,32,97,114,103,117,109,101,110,116,32,115,112,101,99,105, - 102,105,101,115,32,119,104,97,116,32,115,104,111,117,108,100, - 32,101,120,105,115,116,32,97,115,32,97,116,116,114,105,98, - 117,116,101,115,32,111,110,32,116,104,101,32,109,111,100,117, - 108,101,10,32,32,32,32,98,101,105,110,103,32,105,109,112, - 111,114,116,101,100,32,40,101,46,103,46,32,96,96,102,114, - 111,109,32,109,111,100,117,108,101,32,105,109,112,111,114,116, - 32,60,102,114,111,109,108,105,115,116,62,96,96,41,46,32, - 32,84,104,101,32,39,108,101,118,101,108,39,10,32,32,32, - 32,97,114,103,117,109,101,110,116,32,114,101,112,114,101,115, - 101,110,116,115,32,116,104,101,32,112,97,99,107,97,103,101, - 32,108,111,99,97,116,105,111,110,32,116,111,32,105,109,112, - 111,114,116,32,102,114,111,109,32,105,110,32,97,32,114,101, - 108,97,116,105,118,101,10,32,32,32,32,105,109,112,111,114, - 116,32,40,101,46,103,46,32,96,96,102,114,111,109,32,46, - 46,112,107,103,32,105,109,112,111,114,116,32,109,111,100,96, - 96,32,119,111,117,108,100,32,104,97,118,101,32,97,32,39, - 108,101,118,101,108,39,32,111,102,32,50,41,46,10,10,32, - 32,32,32,114,22,0,0,0,78,114,132,0,0,0,114,145, - 0,0,0,41,9,114,211,0,0,0,114,221,0,0,0,218, - 9,112,97,114,116,105,116,105,111,110,114,187,0,0,0,114, - 15,0,0,0,114,92,0,0,0,114,1,0,0,0,114,4, - 0,0,0,114,216,0,0,0,41,9,114,17,0,0,0,114, - 220,0,0,0,218,6,108,111,99,97,108,115,114,217,0,0, - 0,114,189,0,0,0,114,96,0,0,0,90,8,103,108,111, - 98,97,108,115,95,114,188,0,0,0,90,7,99,117,116,95, - 111,102,102,114,10,0,0,0,114,10,0,0,0,114,11,0, - 0,0,218,10,95,95,105,109,112,111,114,116,95,95,57,4, - 0,0,115,30,0,0,0,0,11,8,1,10,2,16,1,8, - 1,12,1,4,3,8,1,18,1,4,1,4,4,26,3,32, - 1,10,1,12,2,114,224,0,0,0,99,1,0,0,0,0, - 0,0,0,0,0,0,0,2,0,0,0,3,0,0,0,67, - 0,0,0,115,38,0,0,0,116,0,160,1,124,0,161,1, - 125,1,124,1,100,0,107,8,114,30,116,2,100,1,124,0, - 23,0,131,1,130,1,116,3,124,1,131,1,83,0,41,2, - 78,122,25,110,111,32,98,117,105,108,116,45,105,110,32,109, - 111,100,117,108,101,32,110,97,109,101,100,32,41,4,114,163, - 0,0,0,114,170,0,0,0,114,79,0,0,0,114,162,0, - 0,0,41,2,114,17,0,0,0,114,95,0,0,0,114,10, - 0,0,0,114,10,0,0,0,114,11,0,0,0,218,18,95, - 98,117,105,108,116,105,110,95,102,114,111,109,95,110,97,109, - 101,94,4,0,0,115,8,0,0,0,0,1,10,1,8,1, - 12,1,114,225,0,0,0,99,2,0,0,0,0,0,0,0, - 0,0,0,0,10,0,0,0,5,0,0,0,67,0,0,0, - 115,166,0,0,0,124,1,97,0,124,0,97,1,116,2,116, - 1,131,1,125,2,116,1,106,3,160,4,161,0,68,0,93, - 72,92,2,125,3,125,4,116,5,124,4,124,2,131,2,114, - 26,124,3,116,1,106,6,107,6,114,60,116,7,125,5,110, - 18,116,0,160,8,124,3,161,1,114,26,116,9,125,5,110, - 2,113,26,116,10,124,4,124,5,131,2,125,6,116,11,124, - 6,124,4,131,2,1,0,113,26,116,1,106,3,116,12,25, - 0,125,7,100,1,68,0,93,46,125,8,124,8,116,1,106, - 3,107,7,114,138,116,13,124,8,131,1,125,9,110,10,116, - 1,106,3,124,8,25,0,125,9,116,14,124,7,124,8,124, - 9,131,3,1,0,113,114,100,2,83,0,41,3,122,250,83, - 101,116,117,112,32,105,109,112,111,114,116,108,105,98,32,98, - 121,32,105,109,112,111,114,116,105,110,103,32,110,101,101,100, - 101,100,32,98,117,105,108,116,45,105,110,32,109,111,100,117, - 108,101,115,32,97,110,100,32,105,110,106,101,99,116,105,110, - 103,32,116,104,101,109,10,32,32,32,32,105,110,116,111,32, - 116,104,101,32,103,108,111,98,97,108,32,110,97,109,101,115, - 112,97,99,101,46,10,10,32,32,32,32,65,115,32,115,121, - 115,32,105,115,32,110,101,101,100,101,100,32,102,111,114,32, - 115,121,115,46,109,111,100,117,108,101,115,32,97,99,99,101, - 115,115,32,97,110,100,32,95,105,109,112,32,105,115,32,110, - 101,101,100,101,100,32,116,111,32,108,111,97,100,32,98,117, - 105,108,116,45,105,110,10,32,32,32,32,109,111,100,117,108, - 101,115,44,32,116,104,111,115,101,32,116,119,111,32,109,111, - 100,117,108,101,115,32,109,117,115,116,32,98,101,32,101,120, - 112,108,105,99,105,116,108,121,32,112,97,115,115,101,100,32, - 105,110,46,10,10,32,32,32,32,41,3,114,23,0,0,0, - 114,194,0,0,0,114,63,0,0,0,78,41,15,114,56,0, - 0,0,114,15,0,0,0,114,14,0,0,0,114,92,0,0, - 0,218,5,105,116,101,109,115,114,198,0,0,0,114,78,0, - 0,0,114,163,0,0,0,114,88,0,0,0,114,177,0,0, - 0,114,146,0,0,0,114,152,0,0,0,114,1,0,0,0, - 114,225,0,0,0,114,5,0,0,0,41,10,218,10,115,121, - 115,95,109,111,100,117,108,101,218,11,95,105,109,112,95,109, - 111,100,117,108,101,90,11,109,111,100,117,108,101,95,116,121, - 112,101,114,17,0,0,0,114,96,0,0,0,114,111,0,0, - 0,114,95,0,0,0,90,11,115,101,108,102,95,109,111,100, - 117,108,101,90,12,98,117,105,108,116,105,110,95,110,97,109, - 101,90,14,98,117,105,108,116,105,110,95,109,111,100,117,108, - 101,114,10,0,0,0,114,10,0,0,0,114,11,0,0,0, - 218,6,95,115,101,116,117,112,101,4,0,0,115,36,0,0, - 0,0,9,4,1,4,3,8,1,18,1,10,1,10,1,6, - 1,10,1,6,2,2,1,10,1,12,3,10,1,8,1,10, - 1,10,2,10,1,114,229,0,0,0,99,2,0,0,0,0, - 0,0,0,0,0,0,0,2,0,0,0,3,0,0,0,67, - 0,0,0,115,38,0,0,0,116,0,124,0,124,1,131,2, - 1,0,116,1,106,2,160,3,116,4,161,1,1,0,116,1, - 106,2,160,3,116,5,161,1,1,0,100,1,83,0,41,2, - 122,48,73,110,115,116,97,108,108,32,105,109,112,111,114,116, - 101,114,115,32,102,111,114,32,98,117,105,108,116,105,110,32, - 97,110,100,32,102,114,111,122,101,110,32,109,111,100,117,108, - 101,115,78,41,6,114,229,0,0,0,114,15,0,0,0,114, - 193,0,0,0,114,123,0,0,0,114,163,0,0,0,114,177, - 0,0,0,41,2,114,227,0,0,0,114,228,0,0,0,114, - 10,0,0,0,114,10,0,0,0,114,11,0,0,0,218,8, - 95,105,110,115,116,97,108,108,136,4,0,0,115,6,0,0, - 0,0,2,10,2,12,1,114,230,0,0,0,99,0,0,0, - 0,0,0,0,0,0,0,0,0,1,0,0,0,4,0,0, - 0,67,0,0,0,115,32,0,0,0,100,1,100,2,108,0, - 125,0,124,0,97,1,124,0,160,2,116,3,106,4,116,5, - 25,0,161,1,1,0,100,2,83,0,41,3,122,57,73,110, - 115,116,97,108,108,32,105,109,112,111,114,116,101,114,115,32, - 116,104,97,116,32,114,101,113,117,105,114,101,32,101,120,116, - 101,114,110,97,108,32,102,105,108,101,115,121,115,116,101,109, - 32,97,99,99,101,115,115,114,22,0,0,0,78,41,6,218, - 26,95,102,114,111,122,101,110,95,105,109,112,111,114,116,108, - 105,98,95,101,120,116,101,114,110,97,108,114,130,0,0,0, - 114,230,0,0,0,114,15,0,0,0,114,92,0,0,0,114, - 1,0,0,0,41,1,114,231,0,0,0,114,10,0,0,0, - 114,10,0,0,0,114,11,0,0,0,218,27,95,105,110,115, - 116,97,108,108,95,101,120,116,101,114,110,97,108,95,105,109, - 112,111,114,116,101,114,115,144,4,0,0,115,6,0,0,0, - 0,3,8,1,4,1,114,232,0,0,0,41,2,78,78,41, - 1,78,41,2,78,114,22,0,0,0,41,4,78,78,114,10, - 0,0,0,114,22,0,0,0,41,50,114,3,0,0,0,114, - 130,0,0,0,114,12,0,0,0,114,18,0,0,0,114,59, - 0,0,0,114,33,0,0,0,114,42,0,0,0,114,19,0, - 0,0,114,20,0,0,0,114,48,0,0,0,114,49,0,0, - 0,114,52,0,0,0,114,64,0,0,0,114,66,0,0,0, - 114,76,0,0,0,114,86,0,0,0,114,90,0,0,0,114, - 97,0,0,0,114,113,0,0,0,114,114,0,0,0,114,91, - 0,0,0,114,146,0,0,0,114,152,0,0,0,114,156,0, - 0,0,114,109,0,0,0,114,93,0,0,0,114,161,0,0, - 0,114,162,0,0,0,114,94,0,0,0,114,163,0,0,0, - 114,177,0,0,0,114,182,0,0,0,114,190,0,0,0,114, - 192,0,0,0,114,197,0,0,0,114,203,0,0,0,90,15, - 95,69,82,82,95,77,83,71,95,80,82,69,70,73,88,90, - 8,95,69,82,82,95,77,83,71,114,208,0,0,0,218,6, - 111,98,106,101,99,116,114,209,0,0,0,114,210,0,0,0, - 114,211,0,0,0,114,216,0,0,0,114,221,0,0,0,114, - 224,0,0,0,114,225,0,0,0,114,229,0,0,0,114,230, - 0,0,0,114,232,0,0,0,114,10,0,0,0,114,10,0, - 0,0,114,10,0,0,0,114,11,0,0,0,218,8,60,109, - 111,100,117,108,101,62,1,0,0,0,115,94,0,0,0,4, - 24,4,2,8,8,8,8,4,2,4,3,16,4,14,68,14, - 21,14,16,8,37,8,17,8,11,14,8,8,11,8,12,8, - 16,8,36,14,100,16,26,10,45,14,72,8,17,8,17,8, - 30,8,37,8,42,8,15,14,75,14,78,14,13,8,9,8, - 9,10,47,8,16,4,1,8,2,8,27,6,3,8,16,10, - 15,14,37,8,27,10,37,8,7,8,35,8,8, -}; diff --git a/Python/initconfig.c b/Python/initconfig.c index db7f11e17d6662..0d42b7ea082d61 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -2355,13 +2355,13 @@ config_usage(int error, const wchar_t* program) } static void -config_envvars_usage() +config_envvars_usage(void) { printf(usage_envvars, (wint_t)DELIM, (wint_t)DELIM, PYTHONHOMEHELP); } static void -config_xoptions_usage() +config_xoptions_usage(void) { puts(usage_xoptions); } diff --git a/Python/instrumentation.c b/Python/instrumentation.c new file mode 100644 index 00000000000000..c5bbbdacbb851e --- /dev/null +++ b/Python/instrumentation.c @@ -0,0 +1,2030 @@ + + +#include "Python.h" +#include "pycore_call.h" +#include "pycore_frame.h" +#include "pycore_interp.h" +#include "pycore_long.h" +#include "pycore_namespace.h" +#include "pycore_object.h" +#include "pycore_opcode.h" +#include "pycore_pyerrors.h" +#include "pycore_pystate.h" + +/* Uncomment this to dump debugging output when assertions fail */ +// #define INSTRUMENT_DEBUG 1 + +static PyObject DISABLE = +{ + .ob_refcnt = _Py_IMMORTAL_REFCNT, + .ob_type = &PyBaseObject_Type +}; + +PyObject _PyInstrumentation_MISSING = +{ + .ob_refcnt = _Py_IMMORTAL_REFCNT, + .ob_type = &PyBaseObject_Type +}; + +static const int8_t EVENT_FOR_OPCODE[256] = { + [RETURN_CONST] = PY_MONITORING_EVENT_PY_RETURN, + [INSTRUMENTED_RETURN_CONST] = PY_MONITORING_EVENT_PY_RETURN, + [RETURN_VALUE] = PY_MONITORING_EVENT_PY_RETURN, + [INSTRUMENTED_RETURN_VALUE] = PY_MONITORING_EVENT_PY_RETURN, + [CALL] = PY_MONITORING_EVENT_CALL, + [INSTRUMENTED_CALL] = PY_MONITORING_EVENT_CALL, + [CALL_FUNCTION_EX] = PY_MONITORING_EVENT_CALL, + [INSTRUMENTED_CALL_FUNCTION_EX] = PY_MONITORING_EVENT_CALL, + [RESUME] = -1, + [YIELD_VALUE] = PY_MONITORING_EVENT_PY_YIELD, + [INSTRUMENTED_YIELD_VALUE] = PY_MONITORING_EVENT_PY_YIELD, + [JUMP_FORWARD] = PY_MONITORING_EVENT_JUMP, + [JUMP_BACKWARD] = PY_MONITORING_EVENT_JUMP, + [POP_JUMP_IF_FALSE] = PY_MONITORING_EVENT_BRANCH, + [POP_JUMP_IF_TRUE] = PY_MONITORING_EVENT_BRANCH, + [POP_JUMP_IF_NONE] = PY_MONITORING_EVENT_BRANCH, + [POP_JUMP_IF_NOT_NONE] = PY_MONITORING_EVENT_BRANCH, + [INSTRUMENTED_JUMP_FORWARD] = PY_MONITORING_EVENT_JUMP, + [INSTRUMENTED_JUMP_BACKWARD] = PY_MONITORING_EVENT_JUMP, + [INSTRUMENTED_POP_JUMP_IF_FALSE] = PY_MONITORING_EVENT_BRANCH, + [INSTRUMENTED_POP_JUMP_IF_TRUE] = PY_MONITORING_EVENT_BRANCH, + [INSTRUMENTED_POP_JUMP_IF_NONE] = PY_MONITORING_EVENT_BRANCH, + [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = PY_MONITORING_EVENT_BRANCH, + [FOR_ITER] = PY_MONITORING_EVENT_BRANCH, + [INSTRUMENTED_FOR_ITER] = PY_MONITORING_EVENT_BRANCH, + [END_FOR] = PY_MONITORING_EVENT_STOP_ITERATION, + [INSTRUMENTED_END_FOR] = PY_MONITORING_EVENT_STOP_ITERATION, + [END_SEND] = PY_MONITORING_EVENT_STOP_ITERATION, + [INSTRUMENTED_END_SEND] = PY_MONITORING_EVENT_STOP_ITERATION, +}; + +static const uint8_t DE_INSTRUMENT[256] = { + [INSTRUMENTED_RESUME] = RESUME, + [INSTRUMENTED_RETURN_VALUE] = RETURN_VALUE, + [INSTRUMENTED_RETURN_CONST] = RETURN_CONST, + [INSTRUMENTED_CALL] = CALL, + [INSTRUMENTED_CALL_FUNCTION_EX] = CALL_FUNCTION_EX, + [INSTRUMENTED_YIELD_VALUE] = YIELD_VALUE, + [INSTRUMENTED_JUMP_FORWARD] = JUMP_FORWARD, + [INSTRUMENTED_JUMP_BACKWARD] = JUMP_BACKWARD, + [INSTRUMENTED_POP_JUMP_IF_FALSE] = POP_JUMP_IF_FALSE, + [INSTRUMENTED_POP_JUMP_IF_TRUE] = POP_JUMP_IF_TRUE, + [INSTRUMENTED_POP_JUMP_IF_NONE] = POP_JUMP_IF_NONE, + [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = POP_JUMP_IF_NOT_NONE, + [INSTRUMENTED_FOR_ITER] = FOR_ITER, + [INSTRUMENTED_END_FOR] = END_FOR, + [INSTRUMENTED_END_SEND] = END_SEND, +}; + +static const uint8_t INSTRUMENTED_OPCODES[256] = { + [RETURN_CONST] = INSTRUMENTED_RETURN_CONST, + [INSTRUMENTED_RETURN_CONST] = INSTRUMENTED_RETURN_CONST, + [RETURN_VALUE] = INSTRUMENTED_RETURN_VALUE, + [INSTRUMENTED_RETURN_VALUE] = INSTRUMENTED_RETURN_VALUE, + [CALL] = INSTRUMENTED_CALL, + [INSTRUMENTED_CALL] = INSTRUMENTED_CALL, + [CALL_FUNCTION_EX] = INSTRUMENTED_CALL_FUNCTION_EX, + [INSTRUMENTED_CALL_FUNCTION_EX] = INSTRUMENTED_CALL_FUNCTION_EX, + [YIELD_VALUE] = INSTRUMENTED_YIELD_VALUE, + [INSTRUMENTED_YIELD_VALUE] = INSTRUMENTED_YIELD_VALUE, + [RESUME] = INSTRUMENTED_RESUME, + [INSTRUMENTED_RESUME] = INSTRUMENTED_RESUME, + [JUMP_FORWARD] = INSTRUMENTED_JUMP_FORWARD, + [INSTRUMENTED_JUMP_FORWARD] = INSTRUMENTED_JUMP_FORWARD, + [JUMP_BACKWARD] = INSTRUMENTED_JUMP_BACKWARD, + [INSTRUMENTED_JUMP_BACKWARD] = INSTRUMENTED_JUMP_BACKWARD, + [POP_JUMP_IF_FALSE] = INSTRUMENTED_POP_JUMP_IF_FALSE, + [INSTRUMENTED_POP_JUMP_IF_FALSE] = INSTRUMENTED_POP_JUMP_IF_FALSE, + [POP_JUMP_IF_TRUE] = INSTRUMENTED_POP_JUMP_IF_TRUE, + [INSTRUMENTED_POP_JUMP_IF_TRUE] = INSTRUMENTED_POP_JUMP_IF_TRUE, + [POP_JUMP_IF_NONE] = INSTRUMENTED_POP_JUMP_IF_NONE, + [INSTRUMENTED_POP_JUMP_IF_NONE] = INSTRUMENTED_POP_JUMP_IF_NONE, + [POP_JUMP_IF_NOT_NONE] = INSTRUMENTED_POP_JUMP_IF_NOT_NONE, + [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = INSTRUMENTED_POP_JUMP_IF_NOT_NONE, + [END_FOR] = INSTRUMENTED_END_FOR, + [INSTRUMENTED_END_FOR] = INSTRUMENTED_END_FOR, + [END_SEND] = INSTRUMENTED_END_SEND, + [INSTRUMENTED_END_SEND] = INSTRUMENTED_END_SEND, + [FOR_ITER] = INSTRUMENTED_FOR_ITER, + [INSTRUMENTED_FOR_ITER] = INSTRUMENTED_FOR_ITER, + + [INSTRUMENTED_LINE] = INSTRUMENTED_LINE, + [INSTRUMENTED_INSTRUCTION] = INSTRUMENTED_INSTRUCTION, +}; + +static inline bool +opcode_has_event(int opcode) +{ + return ( + opcode < INSTRUMENTED_LINE && + INSTRUMENTED_OPCODES[opcode] > 0 + ); +} + +static inline bool +is_instrumented(int opcode) +{ + assert(opcode != 0); + assert(opcode != RESERVED); + return opcode >= MIN_INSTRUMENTED_OPCODE; +} + +#ifndef NDEBUG +static inline bool +monitors_equals(_Py_Monitors a, _Py_Monitors b) +{ + for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + if (a.tools[i] != b.tools[i]) { + return false; + } + } + return true; +} +#endif + +static inline _Py_Monitors +monitors_sub(_Py_Monitors a, _Py_Monitors b) +{ + _Py_Monitors res; + for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + res.tools[i] = a.tools[i] & ~b.tools[i]; + } + return res; +} + +#ifndef NDEBUG +static inline _Py_Monitors +monitors_and(_Py_Monitors a, _Py_Monitors b) +{ + _Py_Monitors res; + for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + res.tools[i] = a.tools[i] & b.tools[i]; + } + return res; +} +#endif + +static inline _Py_Monitors +monitors_or(_Py_Monitors a, _Py_Monitors b) +{ + _Py_Monitors res; + for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + res.tools[i] = a.tools[i] | b.tools[i]; + } + return res; +} + +static inline bool +monitors_are_empty(_Py_Monitors m) +{ + for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + if (m.tools[i]) { + return false; + } + } + return true; +} + +static inline bool +multiple_tools(_Py_Monitors *m) +{ + for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + if (_Py_popcount32(m->tools[i]) > 1) { + return true; + } + } + return false; +} + +static inline _PyMonitoringEventSet +get_events(_Py_Monitors *m, int tool_id) +{ + _PyMonitoringEventSet result = 0; + for (int e = 0; e < PY_MONITORING_UNGROUPED_EVENTS; e++) { + if ((m->tools[e] >> tool_id) & 1) { + result |= (1 << e); + } + } + return result; +} + +/* Line delta. + * 8 bit value. + * if line_delta == -128: + * line = None # represented as -1 + * elif line_delta == -127: + * line = PyCode_Addr2Line(code, offset * sizeof(_Py_CODEUNIT)); + * else: + * line = first_line + (offset >> OFFSET_SHIFT) + line_delta; + */ + +#define NO_LINE -128 +#define COMPUTED_LINE -127 + +#define OFFSET_SHIFT 4 + +static int8_t +compute_line_delta(PyCodeObject *code, int offset, int line) +{ + if (line < 0) { + return NO_LINE; + } + int delta = line - code->co_firstlineno - (offset >> OFFSET_SHIFT); + if (delta <= INT8_MAX && delta > COMPUTED_LINE) { + return delta; + } + return COMPUTED_LINE; +} + +static int +compute_line(PyCodeObject *code, int offset, int8_t line_delta) +{ + if (line_delta > COMPUTED_LINE) { + return code->co_firstlineno + (offset >> OFFSET_SHIFT) + line_delta; + } + if (line_delta == NO_LINE) { + + return -1; + } + assert(line_delta == COMPUTED_LINE); + /* Look it up */ + return PyCode_Addr2Line(code, offset * sizeof(_Py_CODEUNIT)); +} + +static int +instruction_length(PyCodeObject *code, int offset) +{ + int opcode = _PyCode_CODE(code)[offset].op.code; + assert(opcode != 0); + assert(opcode != RESERVED); + if (opcode == INSTRUMENTED_LINE) { + opcode = code->_co_monitoring->lines[offset].original_opcode; + } + if (opcode == INSTRUMENTED_INSTRUCTION) { + opcode = code->_co_monitoring->per_instruction_opcodes[offset]; + } + int deinstrumented = DE_INSTRUMENT[opcode]; + if (deinstrumented) { + opcode = deinstrumented; + } + else { + opcode = _PyOpcode_Deopt[opcode]; + } + assert(opcode != 0); + assert(!is_instrumented(opcode)); + assert(opcode == _PyOpcode_Deopt[opcode]); + return 1 + _PyOpcode_Caches[opcode]; +} + +#ifdef INSTRUMENT_DEBUG + +static void +dump_instrumentation_data_tools(PyCodeObject *code, uint8_t *tools, int i, FILE*out) +{ + if (tools == NULL) { + fprintf(out, "tools = NULL"); + } + else { + fprintf(out, "tools = %d", tools[i]); + } +} + +static void +dump_instrumentation_data_lines(PyCodeObject *code, _PyCoLineInstrumentationData *lines, int i, FILE*out) +{ + if (lines == NULL) { + fprintf(out, ", lines = NULL"); + } + else if (lines[i].original_opcode == 0) { + fprintf(out, ", lines = {original_opcode = No LINE (0), line_delta = %d)", lines[i].line_delta); + } + else { + fprintf(out, ", lines = {original_opcode = %s, line_delta = %d)", _PyOpcode_OpName[lines[i].original_opcode], lines[i].line_delta); + } +} + +static void +dump_instrumentation_data_line_tools(PyCodeObject *code, uint8_t *line_tools, int i, FILE*out) +{ + if (line_tools == NULL) { + fprintf(out, ", line_tools = NULL"); + } + else { + fprintf(out, ", line_tools = %d", line_tools[i]); + } +} + +static void +dump_instrumentation_data_per_instruction(PyCodeObject *code, _PyCoMonitoringData *data, int i, FILE*out) +{ + if (data->per_instruction_opcodes == NULL) { + fprintf(out, ", per-inst opcode = NULL"); + } + else { + fprintf(out, ", per-inst opcode = %s", _PyOpcode_OpName[data->per_instruction_opcodes[i]]); + } + if (data->per_instruction_tools == NULL) { + fprintf(out, ", per-inst tools = NULL"); + } + else { + fprintf(out, ", per-inst tools = %d", data->per_instruction_tools[i]); + } +} + +static void +dump_monitors(const char *prefix, _Py_Monitors monitors, FILE*out) +{ + fprintf(out, "%s monitors:\n", prefix); + for (int event = 0; event < PY_MONITORING_UNGROUPED_EVENTS; event++) { + fprintf(out, " Event %d: Tools %x\n", event, monitors.tools[event]); + } +} + +/* Like _Py_GetBaseOpcode but without asserts. + * Does its best to give the right answer, but won't abort + * if something is wrong */ +static int +get_base_opcode_best_attempt(PyCodeObject *code, int offset) +{ + int opcode = _Py_OPCODE(_PyCode_CODE(code)[offset]); + if (INSTRUMENTED_OPCODES[opcode] != opcode) { + /* Not instrumented */ + return _PyOpcode_Deopt[opcode] == 0 ? opcode : _PyOpcode_Deopt[opcode]; + } + if (opcode == INSTRUMENTED_INSTRUCTION) { + if (code->_co_monitoring->per_instruction_opcodes[offset] == 0) { + return opcode; + } + opcode = code->_co_monitoring->per_instruction_opcodes[offset]; + } + if (opcode == INSTRUMENTED_LINE) { + if (code->_co_monitoring->lines[offset].original_opcode == 0) { + return opcode; + } + opcode = code->_co_monitoring->lines[offset].original_opcode; + } + int deinstrumented = DE_INSTRUMENT[opcode]; + if (deinstrumented) { + return deinstrumented; + } + if (_PyOpcode_Deopt[opcode] == 0) { + return opcode; + } + return _PyOpcode_Deopt[opcode]; +} + +/* No error checking -- Don't use this for anything but experimental debugging */ +static void +dump_instrumentation_data(PyCodeObject *code, int star, FILE*out) +{ + _PyCoMonitoringData *data = code->_co_monitoring; + fprintf(out, "\n"); + PyObject_Print(code->co_name, out, Py_PRINT_RAW); + fprintf(out, "\n"); + if (data == NULL) { + fprintf(out, "NULL\n"); + return; + } + dump_monitors("Global", PyInterpreterState_Get()->monitors, out); + dump_monitors("Code", data->local_monitors, out); + dump_monitors("Active", data->active_monitors, out); + int code_len = (int)Py_SIZE(code); + bool starred = false; + for (int i = 0; i < code_len; i += instruction_length(code, i)) { + _Py_CODEUNIT *instr = &_PyCode_CODE(code)[i]; + int opcode = instr->op.code; + if (i == star) { + fprintf(out, "** "); + starred = true; + } + fprintf(out, "Offset: %d, line: %d %s: ", i, PyCode_Addr2Line(code, i*2), _PyOpcode_OpName[opcode]); + dump_instrumentation_data_tools(code, data->tools, i, out); + dump_instrumentation_data_lines(code, data->lines, i, out); + dump_instrumentation_data_line_tools(code, data->line_tools, i, out); + dump_instrumentation_data_per_instruction(code, data, i, out); + fprintf(out, "\n"); + ; + } + if (!starred && star >= 0) { + fprintf(out, "Error offset not at valid instruction offset: %d\n", star); + fprintf(out, " "); + dump_instrumentation_data_tools(code, data->tools, star, out); + dump_instrumentation_data_lines(code, data->lines, star, out); + dump_instrumentation_data_line_tools(code, data->line_tools, star, out); + dump_instrumentation_data_per_instruction(code, data, star, out); + fprintf(out, "\n"); + } +} + +#define CHECK(test) do { \ + if (!(test)) { \ + dump_instrumentation_data(code, i, stderr); \ + } \ + assert(test); \ +} while (0) + +static bool +valid_opcode(int opcode) +{ + if (opcode > 0 && + opcode != RESERVED && + opcode < 255 && + _PyOpcode_OpName[opcode] && + _PyOpcode_OpName[opcode][0] != '<') + { + return true; + } + return false; +} + +static void +sanity_check_instrumentation(PyCodeObject *code) +{ + _PyCoMonitoringData *data = code->_co_monitoring; + if (data == NULL) { + return; + } + _Py_Monitors active_monitors = PyInterpreterState_Get()->monitors; + if (code->_co_monitoring) { + _Py_Monitors local_monitors = code->_co_monitoring->local_monitors; + active_monitors = monitors_or(active_monitors, local_monitors); + } + assert(monitors_equals( + code->_co_monitoring->active_monitors, + active_monitors) + ); + int code_len = (int)Py_SIZE(code); + for (int i = 0; i < code_len;) { + int opcode = _PyCode_CODE(code)[i].op.code; + int base_opcode = _Py_GetBaseOpcode(code, i); + CHECK(valid_opcode(opcode)); + CHECK(valid_opcode(base_opcode)); + if (opcode == INSTRUMENTED_INSTRUCTION) { + opcode = data->per_instruction_opcodes[i]; + if (!is_instrumented(opcode)) { + CHECK(_PyOpcode_Deopt[opcode] == opcode); + } + if (data->per_instruction_tools) { + uint8_t tools = active_monitors.tools[PY_MONITORING_EVENT_INSTRUCTION]; + CHECK((tools & data->per_instruction_tools[i]) == data->per_instruction_tools[i]); + } + } + if (opcode == INSTRUMENTED_LINE) { + CHECK(data->lines); + CHECK(valid_opcode(data->lines[i].original_opcode)); + opcode = data->lines[i].original_opcode; + CHECK(opcode != END_FOR); + CHECK(opcode != RESUME); + CHECK(opcode != INSTRUMENTED_RESUME); + if (!is_instrumented(opcode)) { + CHECK(_PyOpcode_Deopt[opcode] == opcode); + } + CHECK(opcode != INSTRUMENTED_LINE); + } + else if (data->lines && !is_instrumented(opcode)) { + CHECK(data->lines[i].original_opcode == 0 || + data->lines[i].original_opcode == base_opcode || + DE_INSTRUMENT[data->lines[i].original_opcode] == base_opcode); + } + if (is_instrumented(opcode)) { + CHECK(DE_INSTRUMENT[opcode] == base_opcode); + int event = EVENT_FOR_OPCODE[DE_INSTRUMENT[opcode]]; + if (event < 0) { + /* RESUME fixup */ + event = _PyCode_CODE(code)[i].op.arg; + } + CHECK(active_monitors.tools[event] != 0); + } + if (data->lines && base_opcode != END_FOR) { + int line1 = compute_line(code, i, data->lines[i].line_delta); + int line2 = PyCode_Addr2Line(code, i*sizeof(_Py_CODEUNIT)); + CHECK(line1 == line2); + } + CHECK(valid_opcode(opcode)); + if (data->tools) { + uint8_t local_tools = data->tools[i]; + if (opcode_has_event(base_opcode)) { + int event = EVENT_FOR_OPCODE[base_opcode]; + if (event == -1) { + /* RESUME fixup */ + event = _PyCode_CODE(code)[i].op.arg; + } + CHECK((active_monitors.tools[event] & local_tools) == local_tools); + } + else { + CHECK(local_tools == 0xff); + } + } + i += instruction_length(code, i); + assert(i <= code_len); + } +} +#else + +#define CHECK(test) assert(test) + +#endif + +/* Get the underlying opcode, stripping instrumentation */ +int _Py_GetBaseOpcode(PyCodeObject *code, int i) +{ + int opcode = _PyCode_CODE(code)[i].op.code; + if (opcode == INSTRUMENTED_LINE) { + opcode = code->_co_monitoring->lines[i].original_opcode; + } + if (opcode == INSTRUMENTED_INSTRUCTION) { + opcode = code->_co_monitoring->per_instruction_opcodes[i]; + } + CHECK(opcode != INSTRUMENTED_INSTRUCTION); + CHECK(opcode != INSTRUMENTED_LINE); + int deinstrumented = DE_INSTRUMENT[opcode]; + if (deinstrumented) { + return deinstrumented; + } + return _PyOpcode_Deopt[opcode]; +} + +static void +de_instrument(PyCodeObject *code, int i, int event) +{ + assert(event != PY_MONITORING_EVENT_INSTRUCTION); + assert(event != PY_MONITORING_EVENT_LINE); + + _Py_CODEUNIT *instr = &_PyCode_CODE(code)[i]; + uint8_t *opcode_ptr = &instr->op.code; + int opcode = *opcode_ptr; + if (opcode == INSTRUMENTED_LINE) { + opcode_ptr = &code->_co_monitoring->lines[i].original_opcode; + opcode = *opcode_ptr; + } + if (opcode == INSTRUMENTED_INSTRUCTION) { + opcode_ptr = &code->_co_monitoring->per_instruction_opcodes[i]; + opcode = *opcode_ptr; + } + int deinstrumented = DE_INSTRUMENT[opcode]; + if (deinstrumented == 0) { + return; + } + CHECK(_PyOpcode_Deopt[deinstrumented] == deinstrumented); + *opcode_ptr = deinstrumented; + if (_PyOpcode_Caches[deinstrumented]) { + instr[1].cache = adaptive_counter_warmup(); + } +} + +static void +de_instrument_line(PyCodeObject *code, int i) +{ + _Py_CODEUNIT *instr = &_PyCode_CODE(code)[i]; + uint8_t *opcode_ptr = &instr->op.code; + int opcode =*opcode_ptr; + if (opcode != INSTRUMENTED_LINE) { + return; + } + _PyCoLineInstrumentationData *lines = &code->_co_monitoring->lines[i]; + int original_opcode = lines->original_opcode; + CHECK(original_opcode != 0); + CHECK(original_opcode == _PyOpcode_Deopt[original_opcode]); + *opcode_ptr = instr->op.code = original_opcode; + if (_PyOpcode_Caches[original_opcode]) { + instr[1].cache = adaptive_counter_warmup(); + } + assert(*opcode_ptr != INSTRUMENTED_LINE); + assert(instr->op.code != INSTRUMENTED_LINE); +} + + +static void +de_instrument_per_instruction(PyCodeObject *code, int i) +{ + _Py_CODEUNIT *instr = &_PyCode_CODE(code)[i]; + uint8_t *opcode_ptr = &instr->op.code; + int opcode =*opcode_ptr; + if (opcode == INSTRUMENTED_LINE) { + opcode_ptr = &code->_co_monitoring->lines[i].original_opcode; + opcode = *opcode_ptr; + } + if (opcode != INSTRUMENTED_INSTRUCTION) { + return; + } + int original_opcode = code->_co_monitoring->per_instruction_opcodes[i]; + CHECK(original_opcode != 0); + CHECK(original_opcode == _PyOpcode_Deopt[original_opcode]); + instr->op.code = original_opcode; + if (_PyOpcode_Caches[original_opcode]) { + instr[1].cache = adaptive_counter_warmup(); + } + assert(instr->op.code != INSTRUMENTED_INSTRUCTION); + /* Keep things clean for sanity check */ + code->_co_monitoring->per_instruction_opcodes[i] = 0; +} + + +static void +instrument(PyCodeObject *code, int i) +{ + _Py_CODEUNIT *instr = &_PyCode_CODE(code)[i]; + uint8_t *opcode_ptr = &instr->op.code; + int opcode =*opcode_ptr; + if (opcode == INSTRUMENTED_LINE) { + _PyCoLineInstrumentationData *lines = &code->_co_monitoring->lines[i]; + opcode_ptr = &lines->original_opcode; + opcode = *opcode_ptr; + } + if (opcode == INSTRUMENTED_INSTRUCTION) { + opcode_ptr = &code->_co_monitoring->per_instruction_opcodes[i]; + opcode = *opcode_ptr; + CHECK(!is_instrumented(opcode)); + CHECK(opcode == _PyOpcode_Deopt[opcode]); + } + CHECK(opcode != 0); + if (!is_instrumented(opcode)) { + int deopt = _PyOpcode_Deopt[opcode]; + int instrumented = INSTRUMENTED_OPCODES[deopt]; + assert(instrumented); + *opcode_ptr = instrumented; + if (_PyOpcode_Caches[deopt]) { + instr[1].cache = adaptive_counter_warmup(); + } + } +} + +static void +instrument_line(PyCodeObject *code, int i) +{ + uint8_t *opcode_ptr = &_PyCode_CODE(code)[i].op.code; + int opcode =*opcode_ptr; + if (opcode == INSTRUMENTED_LINE) { + return; + } + _PyCoLineInstrumentationData *lines = &code->_co_monitoring->lines[i]; + lines->original_opcode = _PyOpcode_Deopt[opcode]; + CHECK(lines->original_opcode > 0); + *opcode_ptr = INSTRUMENTED_LINE; +} + +static void +instrument_per_instruction(PyCodeObject *code, int i) +{ + _Py_CODEUNIT *instr = &_PyCode_CODE(code)[i]; + uint8_t *opcode_ptr = &instr->op.code; + int opcode =*opcode_ptr; + if (opcode == INSTRUMENTED_LINE) { + _PyCoLineInstrumentationData *lines = &code->_co_monitoring->lines[i]; + opcode_ptr = &lines->original_opcode; + opcode = *opcode_ptr; + } + if (opcode == INSTRUMENTED_INSTRUCTION) { + return; + } + CHECK(opcode != 0); + if (is_instrumented(opcode)) { + code->_co_monitoring->per_instruction_opcodes[i] = opcode; + } + else { + assert(opcode != 0); + assert(_PyOpcode_Deopt[opcode] != 0); + assert(_PyOpcode_Deopt[opcode] != RESUME); + code->_co_monitoring->per_instruction_opcodes[i] = _PyOpcode_Deopt[opcode]; + } + assert(code->_co_monitoring->per_instruction_opcodes[i] > 0); + *opcode_ptr = INSTRUMENTED_INSTRUCTION; +} + +#ifndef NDEBUG +static bool +instruction_has_event(PyCodeObject *code, int offset) +{ + _Py_CODEUNIT instr = _PyCode_CODE(code)[offset]; + int opcode = instr.op.code; + if (opcode == INSTRUMENTED_LINE) { + opcode = code->_co_monitoring->lines[offset].original_opcode; + } + if (opcode == INSTRUMENTED_INSTRUCTION) { + opcode = code->_co_monitoring->per_instruction_opcodes[offset]; + } + return opcode_has_event(opcode); +} +#endif + +static void +remove_tools(PyCodeObject * code, int offset, int event, int tools) +{ + assert(event != PY_MONITORING_EVENT_LINE); + assert(event != PY_MONITORING_EVENT_INSTRUCTION); + assert(event < PY_MONITORING_INSTRUMENTED_EVENTS); + assert(instruction_has_event(code, offset)); + _PyCoMonitoringData *monitoring = code->_co_monitoring; + if (monitoring && monitoring->tools) { + monitoring->tools[offset] &= ~tools; + if (monitoring->tools[offset] == 0) { + de_instrument(code, offset, event); + } + } + else { + /* Single tool */ + uint8_t single_tool = code->_co_monitoring->active_monitors.tools[event]; + assert(_Py_popcount32(single_tool) <= 1); + if (((single_tool & tools) == single_tool)) { + de_instrument(code, offset, event); + } + } +} + +#ifndef NDEBUG +static bool +tools_is_subset_for_event(PyCodeObject * code, int event, int tools) +{ + int global_tools = PyInterpreterState_Get()->monitors.tools[event]; + int local_tools = code->_co_monitoring->local_monitors.tools[event]; + return tools == ((global_tools | local_tools) & tools); +} +#endif + +static void +remove_line_tools(PyCodeObject * code, int offset, int tools) +{ + assert(code->_co_monitoring); + if (code->_co_monitoring->line_tools) + { + uint8_t *toolsptr = &code->_co_monitoring->line_tools[offset]; + *toolsptr &= ~tools; + if (*toolsptr == 0 ) { + de_instrument_line(code, offset); + } + } + else { + /* Single tool */ + uint8_t single_tool = code->_co_monitoring->active_monitors.tools[PY_MONITORING_EVENT_LINE]; + assert(_Py_popcount32(single_tool) <= 1); + if (((single_tool & tools) == single_tool)) { + de_instrument_line(code, offset); + } + } +} + +static void +add_tools(PyCodeObject * code, int offset, int event, int tools) +{ + assert(event != PY_MONITORING_EVENT_LINE); + assert(event != PY_MONITORING_EVENT_INSTRUCTION); + assert(event < PY_MONITORING_INSTRUMENTED_EVENTS); + assert(code->_co_monitoring); + if (code->_co_monitoring && + code->_co_monitoring->tools + ) { + code->_co_monitoring->tools[offset] |= tools; + } + else { + /* Single tool */ + assert(_Py_popcount32(tools) == 1); + assert(tools_is_subset_for_event(code, event, tools)); + } + instrument(code, offset); +} + +static void +add_line_tools(PyCodeObject * code, int offset, int tools) +{ + assert(tools_is_subset_for_event(code, PY_MONITORING_EVENT_LINE, tools)); + assert(code->_co_monitoring); + if (code->_co_monitoring->line_tools) { + code->_co_monitoring->line_tools[offset] |= tools; + } + else { + /* Single tool */ + assert(_Py_popcount32(tools) == 1); + } + instrument_line(code, offset); +} + + +static void +add_per_instruction_tools(PyCodeObject * code, int offset, int tools) +{ + assert(tools_is_subset_for_event(code, PY_MONITORING_EVENT_INSTRUCTION, tools)); + assert(code->_co_monitoring); + if (code->_co_monitoring->per_instruction_tools) { + code->_co_monitoring->per_instruction_tools[offset] |= tools; + } + else { + /* Single tool */ + assert(_Py_popcount32(tools) == 1); + } + instrument_per_instruction(code, offset); +} + + +static void +remove_per_instruction_tools(PyCodeObject * code, int offset, int tools) +{ + assert(code->_co_monitoring); + if (code->_co_monitoring->per_instruction_tools) { + uint8_t *toolsptr = &code->_co_monitoring->per_instruction_tools[offset]; + *toolsptr &= ~tools; + if (*toolsptr == 0) { + de_instrument_per_instruction(code, offset); + } + } + else { + /* Single tool */ + uint8_t single_tool = code->_co_monitoring->active_monitors.tools[PY_MONITORING_EVENT_INSTRUCTION]; + assert(_Py_popcount32(single_tool) <= 1); + if (((single_tool & tools) == single_tool)) { + de_instrument_per_instruction(code, offset); + } + } +} + + +/* Return 1 if DISABLE returned, -1 if error, 0 otherwise */ +static int +call_one_instrument( + PyInterpreterState *interp, PyThreadState *tstate, PyObject **args, + Py_ssize_t nargsf, int8_t tool, int event) +{ + assert(0 <= tool && tool < 8); + assert(tstate->tracing == 0); + PyObject *instrument = interp->monitoring_callables[tool][event]; + if (instrument == NULL) { + return 0; + } + int old_what = tstate->what_event; + tstate->what_event = event; + tstate->tracing++; + PyObject *res = _PyObject_VectorcallTstate(tstate, instrument, args, nargsf, NULL); + tstate->tracing--; + tstate->what_event = old_what; + if (res == NULL) { + return -1; + } + Py_DECREF(res); + return (res == &DISABLE); +} + +static const int8_t MOST_SIGNIFICANT_BITS[16] = { + -1, 0, 1, 1, + 2, 2, 2, 2, + 3, 3, 3, 3, + 3, 3, 3, 3, +}; + +/* We could use _Py_bit_length here, but that is designed for larger (32/64) + * bit ints, and can perform relatively poorly on platforms without the + * necessary intrinsics. */ +static inline int most_significant_bit(uint8_t bits) { + assert(bits != 0); + if (bits > 15) { + return MOST_SIGNIFICANT_BITS[bits>>4]+4; + } + return MOST_SIGNIFICANT_BITS[bits]; +} + +static bool +is_version_up_to_date(PyCodeObject *code, PyInterpreterState *interp) +{ + return interp->monitoring_version == code->_co_instrumentation_version; +} + +#ifndef NDEBUG +static bool +instrumentation_cross_checks(PyInterpreterState *interp, PyCodeObject *code) +{ + _Py_Monitors expected = monitors_or( + interp->monitors, + code->_co_monitoring->local_monitors); + return monitors_equals(code->_co_monitoring->active_monitors, expected); +} +#endif + +static inline uint8_t +get_tools_for_instruction(PyCodeObject * code, int i, int event) +{ + uint8_t tools; + assert(event != PY_MONITORING_EVENT_LINE); + assert(event != PY_MONITORING_EVENT_INSTRUCTION); + assert(instrumentation_cross_checks(PyThreadState_GET()->interp, code)); + _PyCoMonitoringData *monitoring = code->_co_monitoring; + if (event >= PY_MONITORING_UNGROUPED_EVENTS) { + assert(event == PY_MONITORING_EVENT_C_RAISE || + event == PY_MONITORING_EVENT_C_RETURN); + event = PY_MONITORING_EVENT_CALL; + } + if (event < PY_MONITORING_INSTRUMENTED_EVENTS && monitoring->tools) { + tools = monitoring->tools[i]; + } + else { + tools = code->_co_monitoring->active_monitors.tools[event]; + } + CHECK(tools_is_subset_for_event(code, event, tools)); + CHECK((tools & code->_co_monitoring->active_monitors.tools[event]) == tools); + return tools; +} + +static int +call_instrumentation_vector( + PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, Py_ssize_t nargs, PyObject *args[]) +{ + if (tstate->tracing) { + return 0; + } + assert(!_PyErr_Occurred(tstate)); + assert(args[0] == NULL); + PyCodeObject *code = frame->f_code; + assert(code->_co_instrumentation_version == tstate->interp->monitoring_version); + assert(is_version_up_to_date(code, tstate->interp)); + assert(instrumentation_cross_checks(tstate->interp, code)); + assert(args[1] == NULL); + args[1] = (PyObject *)code; + int offset = (int)(instr - _PyCode_CODE(code)); + /* Offset visible to user should be the offset in bytes, as that is the + * convention for APIs involving code offsets. */ + int bytes_offset = offset * (int)sizeof(_Py_CODEUNIT); + PyObject *offset_obj = PyLong_FromSsize_t(bytes_offset); + if (offset_obj == NULL) { + return -1; + } + assert(args[2] == NULL); + args[2] = offset_obj; + uint8_t tools = get_tools_for_instruction(code, offset, event); + Py_ssize_t nargsf = nargs | PY_VECTORCALL_ARGUMENTS_OFFSET; + PyObject **callargs = &args[1]; + int err = 0; + PyInterpreterState *interp = tstate->interp; + while (tools) { + int tool = most_significant_bit(tools); + assert(tool >= 0 && tool < 8); + assert(tools & (1 << tool)); + tools ^= (1 << tool); + int res = call_one_instrument(interp, tstate, callargs, nargsf, tool, event); + if (res == 0) { + /* Nothing to do */ + } + else if (res < 0) { + /* error */ + err = -1; + break; + } + else { + /* DISABLE */ + remove_tools(code, offset, event, 1 << tool); + } + } + Py_DECREF(offset_obj); + return err; +} + +int +_Py_call_instrumentation( + PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *instr) +{ + PyObject *args[3] = { NULL, NULL, NULL }; + return call_instrumentation_vector(tstate, event, frame, instr, 2, args); +} + +int +_Py_call_instrumentation_arg( + PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, PyObject *arg) +{ + PyObject *args[4] = { NULL, NULL, NULL, arg }; + return call_instrumentation_vector(tstate, event, frame, instr, 3, args); +} + +int +_Py_call_instrumentation_2args( + PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, PyObject *arg0, PyObject *arg1) +{ + PyObject *args[5] = { NULL, NULL, NULL, arg0, arg1 }; + return call_instrumentation_vector(tstate, event, frame, instr, 4, args); +} + +int +_Py_call_instrumentation_jump( + PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, _Py_CODEUNIT *target) +{ + assert(event == PY_MONITORING_EVENT_JUMP || + event == PY_MONITORING_EVENT_BRANCH); + assert(frame->prev_instr == instr); + frame->prev_instr = target; + PyCodeObject *code = frame->f_code; + int to = (int)(target - _PyCode_CODE(code)); + PyObject *to_obj = PyLong_FromLong(to * (int)sizeof(_Py_CODEUNIT)); + if (to_obj == NULL) { + return -1; + } + PyObject *args[4] = { NULL, NULL, NULL, to_obj }; + int err = call_instrumentation_vector(tstate, event, frame, instr, 3, args); + Py_DECREF(to_obj); + return err; +} + +static void +call_instrumentation_vector_protected( + PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, Py_ssize_t nargs, PyObject *args[]) +{ + assert(_PyErr_Occurred(tstate)); + PyObject *exc = _PyErr_GetRaisedException(tstate); + int err = call_instrumentation_vector(tstate, event, frame, instr, nargs, args); + if (err) { + Py_XDECREF(exc); + } + else { + _PyErr_SetRaisedException(tstate, exc); + } + assert(_PyErr_Occurred(tstate)); +} + +void +_Py_call_instrumentation_exc0( + PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *instr) +{ + assert(_PyErr_Occurred(tstate)); + PyObject *args[3] = { NULL, NULL, NULL }; + call_instrumentation_vector_protected(tstate, event, frame, instr, 2, args); +} + +void +_Py_call_instrumentation_exc2( + PyThreadState *tstate, int event, + _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, PyObject *arg0, PyObject *arg1) +{ + assert(_PyErr_Occurred(tstate)); + PyObject *args[5] = { NULL, NULL, NULL, arg0, arg1 }; + call_instrumentation_vector_protected(tstate, event, frame, instr, 4, args); +} + + +int +_Py_Instrumentation_GetLine(PyCodeObject *code, int index) +{ + _PyCoMonitoringData *monitoring = code->_co_monitoring; + assert(monitoring != NULL); + assert(monitoring->lines != NULL); + assert(index >= code->_co_firsttraceable); + assert(index < Py_SIZE(code)); + _PyCoLineInstrumentationData *line_data = &monitoring->lines[index]; + int8_t line_delta = line_data->line_delta; + int line = compute_line(code, index, line_delta); + return line; +} + +int +_Py_call_instrumentation_line(PyThreadState *tstate, _PyInterpreterFrame* frame, _Py_CODEUNIT *instr) +{ + frame->prev_instr = instr; + PyCodeObject *code = frame->f_code; + assert(is_version_up_to_date(code, tstate->interp)); + assert(instrumentation_cross_checks(tstate->interp, code)); + int i = (int)(instr - _PyCode_CODE(code)); + _PyCoMonitoringData *monitoring = code->_co_monitoring; + _PyCoLineInstrumentationData *line_data = &monitoring->lines[i]; + uint8_t original_opcode = line_data->original_opcode; + if (tstate->tracing) { + goto done; + } + PyInterpreterState *interp = tstate->interp; + int8_t line_delta = line_data->line_delta; + int line = compute_line(code, i, line_delta); + uint8_t tools = code->_co_monitoring->line_tools != NULL ? + code->_co_monitoring->line_tools[i] : + (interp->monitors.tools[PY_MONITORING_EVENT_LINE] | + code->_co_monitoring->local_monitors.tools[PY_MONITORING_EVENT_LINE] + ); + PyObject *line_obj = PyLong_FromSsize_t(line); + if (line_obj == NULL) { + return -1; + } + PyObject *args[3] = { NULL, (PyObject *)code, line_obj }; + while (tools) { + int tool = most_significant_bit(tools); + assert(tool >= 0 && tool < 8); + assert(tools & (1 << tool)); + tools &= ~(1 << tool); + int res = call_one_instrument(interp, tstate, &args[1], + 2 | PY_VECTORCALL_ARGUMENTS_OFFSET, + tool, PY_MONITORING_EVENT_LINE); + if (res == 0) { + /* Nothing to do */ + } + else if (res < 0) { + /* error */ + Py_DECREF(line_obj); + return -1; + } + else { + /* DISABLE */ + remove_line_tools(code, i, 1 << tool); + } + } + Py_DECREF(line_obj); +done: + assert(original_opcode != 0); + assert(original_opcode < INSTRUMENTED_LINE); + assert(_PyOpcode_Deopt[original_opcode] == original_opcode); + return original_opcode; +} + +int +_Py_call_instrumentation_instruction(PyThreadState *tstate, _PyInterpreterFrame* frame, _Py_CODEUNIT *instr) +{ + PyCodeObject *code = frame->f_code; + assert(is_version_up_to_date(code, tstate->interp)); + assert(instrumentation_cross_checks(tstate->interp, code)); + int offset = (int)(instr - _PyCode_CODE(code)); + _PyCoMonitoringData *instrumentation_data = code->_co_monitoring; + assert(instrumentation_data->per_instruction_opcodes); + int next_opcode = instrumentation_data->per_instruction_opcodes[offset]; + if (tstate->tracing) { + return next_opcode; + } + PyInterpreterState *interp = tstate->interp; + uint8_t tools = instrumentation_data->per_instruction_tools != NULL ? + instrumentation_data->per_instruction_tools[offset] : + (interp->monitors.tools[PY_MONITORING_EVENT_INSTRUCTION] | + code->_co_monitoring->local_monitors.tools[PY_MONITORING_EVENT_INSTRUCTION] + ); + int bytes_offset = offset * (int)sizeof(_Py_CODEUNIT); + PyObject *offset_obj = PyLong_FromSsize_t(bytes_offset); + if (offset_obj == NULL) { + return -1; + } + PyObject *args[3] = { NULL, (PyObject *)code, offset_obj }; + while (tools) { + int tool = most_significant_bit(tools); + assert(tool >= 0 && tool < 8); + assert(tools & (1 << tool)); + tools &= ~(1 << tool); + int res = call_one_instrument(interp, tstate, &args[1], + 2 | PY_VECTORCALL_ARGUMENTS_OFFSET, + tool, PY_MONITORING_EVENT_INSTRUCTION); + if (res == 0) { + /* Nothing to do */ + } + else if (res < 0) { + /* error */ + Py_DECREF(offset_obj); + return -1; + } + else { + /* DISABLE */ + remove_per_instruction_tools(code, offset, 1 << tool); + } + } + Py_DECREF(offset_obj); + assert(next_opcode != 0); + return next_opcode; +} + + +PyObject * +_PyMonitoring_RegisterCallback(int tool_id, int event_id, PyObject *obj) +{ + PyInterpreterState *is = _PyInterpreterState_Get(); + assert(0 <= tool_id && tool_id < PY_MONITORING_TOOL_IDS); + assert(0 <= event_id && event_id < PY_MONITORING_EVENTS); + PyObject *callback = is->monitoring_callables[tool_id][event_id]; + is->monitoring_callables[tool_id][event_id] = Py_XNewRef(obj); + return callback; +} + +static void +initialize_tools(PyCodeObject *code) +{ + uint8_t* tools = code->_co_monitoring->tools; + assert(tools != NULL); + int code_len = (int)Py_SIZE(code); + for (int i = 0; i < code_len; i++) { + _Py_CODEUNIT *instr = &_PyCode_CODE(code)[i]; + int opcode = instr->op.code; + if (opcode == INSTRUMENTED_LINE) { + opcode = code->_co_monitoring->lines[i].original_opcode; + } + bool instrumented = is_instrumented(opcode); + if (instrumented) { + opcode = DE_INSTRUMENT[opcode]; + assert(opcode != 0); + } + opcode = _PyOpcode_Deopt[opcode]; + if (opcode_has_event(opcode)) { + if (instrumented) { + int8_t event; + if (opcode == RESUME) { + event = instr->op.arg != 0; + } + else { + event = EVENT_FOR_OPCODE[opcode]; + assert(event > 0); + } + assert(event >= 0); + assert(event < PY_MONITORING_INSTRUMENTED_EVENTS); + tools[i] = code->_co_monitoring->active_monitors.tools[event]; + CHECK(tools[i] != 0); + } + else { + tools[i] = 0; + } + } +#ifdef Py_DEBUG + /* Initialize tools for invalid locations to all ones to try to catch errors */ + else { + tools[i] = 0xff; + } + for (int j = 1; j <= _PyOpcode_Caches[opcode]; j++) { + tools[i+j] = 0xff; + } +#endif + i += _PyOpcode_Caches[opcode]; + } +} + +#define NO_LINE -128 + +static void +initialize_lines(PyCodeObject *code) +{ + _PyCoLineInstrumentationData *line_data = code->_co_monitoring->lines; + assert(line_data != NULL); + int code_len = (int)Py_SIZE(code); + PyCodeAddressRange range; + _PyCode_InitAddressRange(code, &range); + for (int i = 0; i < code->_co_firsttraceable && i < code_len; i++) { + line_data[i].original_opcode = 0; + line_data[i].line_delta = -127; + } + int current_line = -1; + for (int i = code->_co_firsttraceable; i < code_len; ) { + int opcode = _Py_GetBaseOpcode(code, i); + int line = _PyCode_CheckLineNumber(i*(int)sizeof(_Py_CODEUNIT), &range); + line_data[i].line_delta = compute_line_delta(code, i, line); + int length = instruction_length(code, i); + switch (opcode) { + case END_ASYNC_FOR: + case END_FOR: + case END_SEND: + case RESUME: + /* END_FOR cannot start a line, as it is skipped by FOR_ITER + * END_SEND cannot start a line, as it is skipped by SEND + * RESUME must not be instrumented with INSTRUMENT_LINE */ + line_data[i].original_opcode = 0; + break; + default: + if (line != current_line && line >= 0) { + line_data[i].original_opcode = opcode; + } + else { + line_data[i].original_opcode = 0; + } + if (line >= 0) { + current_line = line; + } + } + for (int j = 1; j < length; j++) { + line_data[i+j].original_opcode = 0; + line_data[i+j].line_delta = NO_LINE; + } + switch (opcode) { + case RETURN_VALUE: + case RAISE_VARARGS: + case RERAISE: + /* Blocks of code after these terminators + * should be treated as different lines */ + current_line = -1; + } + i += length; + } +} + +static void +initialize_line_tools(PyCodeObject *code, _Py_Monitors *all_events) +{ + uint8_t *line_tools = code->_co_monitoring->line_tools; + assert(line_tools != NULL); + int code_len = (int)Py_SIZE(code); + for (int i = 0; i < code_len; i++) { + line_tools[i] = all_events->tools[PY_MONITORING_EVENT_LINE]; + } +} + +static int +allocate_instrumentation_data(PyCodeObject *code) +{ + + if (code->_co_monitoring == NULL) { + code->_co_monitoring = PyMem_Malloc(sizeof(_PyCoMonitoringData)); + if (code->_co_monitoring == NULL) { + PyErr_NoMemory(); + return -1; + } + code->_co_monitoring->local_monitors = (_Py_Monitors){ 0 }; + code->_co_monitoring->active_monitors = (_Py_Monitors){ 0 }; + code->_co_monitoring->tools = NULL; + code->_co_monitoring->lines = NULL; + code->_co_monitoring->line_tools = NULL; + code->_co_monitoring->per_instruction_opcodes = NULL; + code->_co_monitoring->per_instruction_tools = NULL; + } + return 0; +} + +static int +update_instrumentation_data(PyCodeObject *code, PyInterpreterState *interp) +{ + int code_len = (int)Py_SIZE(code); + if (allocate_instrumentation_data(code)) { + return -1; + } + _Py_Monitors all_events = monitors_or( + interp->monitors, + code->_co_monitoring->local_monitors); + bool multitools = multiple_tools(&all_events); + if (code->_co_monitoring->tools == NULL && multitools) { + code->_co_monitoring->tools = PyMem_Malloc(code_len); + if (code->_co_monitoring->tools == NULL) { + PyErr_NoMemory(); + return -1; + } + initialize_tools(code); + } + if (all_events.tools[PY_MONITORING_EVENT_LINE]) { + if (code->_co_monitoring->lines == NULL) { + code->_co_monitoring->lines = PyMem_Malloc(code_len * sizeof(_PyCoLineInstrumentationData)); + if (code->_co_monitoring->lines == NULL) { + PyErr_NoMemory(); + return -1; + } + initialize_lines(code); + } + if (multitools && code->_co_monitoring->line_tools == NULL) { + code->_co_monitoring->line_tools = PyMem_Malloc(code_len); + if (code->_co_monitoring->line_tools == NULL) { + PyErr_NoMemory(); + return -1; + } + initialize_line_tools(code, &all_events); + } + } + if (all_events.tools[PY_MONITORING_EVENT_INSTRUCTION]) { + if (code->_co_monitoring->per_instruction_opcodes == NULL) { + code->_co_monitoring->per_instruction_opcodes = PyMem_Malloc(code_len * sizeof(_PyCoLineInstrumentationData)); + if (code->_co_monitoring->per_instruction_opcodes == NULL) { + PyErr_NoMemory(); + return -1; + } + /* This may not be necessary, as we can initialize this memory lazily, but it helps catch errors. */ + for (int i = 0; i < code_len; i++) { + code->_co_monitoring->per_instruction_opcodes[i] = 0; + } + } + if (multitools && code->_co_monitoring->per_instruction_tools == NULL) { + code->_co_monitoring->per_instruction_tools = PyMem_Malloc(code_len); + if (code->_co_monitoring->per_instruction_tools == NULL) { + PyErr_NoMemory(); + return -1; + } + /* This may not be necessary, as we can initialize this memory lazily, but it helps catch errors. */ + for (int i = 0; i < code_len; i++) { + code->_co_monitoring->per_instruction_tools[i] = 0; + } + } + } + return 0; +} + +static const uint8_t super_instructions[256] = { + [LOAD_FAST__LOAD_FAST] = 1, + [LOAD_FAST__LOAD_CONST] = 1, + [STORE_FAST__LOAD_FAST] = 1, + [STORE_FAST__STORE_FAST] = 1, + [LOAD_CONST__LOAD_FAST] = 1, +}; + +/* Should use instruction metadata for this */ +static bool +is_super_instruction(uint8_t opcode) { + return super_instructions[opcode] != 0; +} + +int +_Py_Instrument(PyCodeObject *code, PyInterpreterState *interp) +{ + + if (is_version_up_to_date(code, interp)) { + assert( + interp->monitoring_version == 0 || + instrumentation_cross_checks(interp, code) + ); + return 0; + } + int code_len = (int)Py_SIZE(code); + if (update_instrumentation_data(code, interp)) { + return -1; + } + _Py_Monitors active_events = monitors_or( + interp->monitors, + code->_co_monitoring->local_monitors); + _Py_Monitors new_events; + _Py_Monitors removed_events; + + bool restarted = interp->last_restart_version > code->_co_instrumentation_version; + if (restarted) { + removed_events = code->_co_monitoring->active_monitors; + new_events = active_events; + } + else { + removed_events = monitors_sub(code->_co_monitoring->active_monitors, active_events); + new_events = monitors_sub(active_events, code->_co_monitoring->active_monitors); + assert(monitors_are_empty(monitors_and(new_events, removed_events))); + } + code->_co_monitoring->active_monitors = active_events; + code->_co_instrumentation_version = interp->monitoring_version; + if (monitors_are_empty(new_events) && monitors_are_empty(removed_events)) { +#ifdef INSTRUMENT_DEBUG + sanity_check_instrumentation(code); +#endif + return 0; + } + /* Insert instrumentation */ + for (int i = 0; i < code_len; i+= instruction_length(code, i)) { + _Py_CODEUNIT *instr = &_PyCode_CODE(code)[i]; + if (is_super_instruction(instr->op.code)) { + instr->op.code = _PyOpcode_Deopt[instr->op.code]; + } + CHECK(instr->op.code != 0); + int base_opcode = _Py_GetBaseOpcode(code, i); + if (opcode_has_event(base_opcode)) { + int8_t event; + if (base_opcode == RESUME) { + event = instr->op.arg > 0; + } + else { + event = EVENT_FOR_OPCODE[base_opcode]; + assert(event > 0); + } + uint8_t removed_tools = removed_events.tools[event]; + if (removed_tools) { + remove_tools(code, i, event, removed_tools); + } + uint8_t new_tools = new_events.tools[event]; + if (new_tools) { + add_tools(code, i, event, new_tools); + } + } + } + uint8_t new_line_tools = new_events.tools[PY_MONITORING_EVENT_LINE]; + uint8_t removed_line_tools = removed_events.tools[PY_MONITORING_EVENT_LINE]; + if (new_line_tools | removed_line_tools) { + _PyCoLineInstrumentationData *line_data = code->_co_monitoring->lines; + for (int i = code->_co_firsttraceable; i < code_len;) { + if (line_data[i].original_opcode) { + if (removed_line_tools) { + remove_line_tools(code, i, removed_line_tools); + } + if (new_line_tools) { + add_line_tools(code, i, new_line_tools); + } + } + i += instruction_length(code, i); + } + } + uint8_t new_per_instruction_tools = new_events.tools[PY_MONITORING_EVENT_INSTRUCTION]; + uint8_t removed_per_instruction_tools = removed_events.tools[PY_MONITORING_EVENT_INSTRUCTION]; + if (new_per_instruction_tools | removed_per_instruction_tools) { + for (int i = code->_co_firsttraceable; i < code_len;) { + int opcode = _Py_GetBaseOpcode(code, i); + if (opcode == RESUME || opcode == END_FOR) { + i += instruction_length(code, i); + continue; + } + if (removed_per_instruction_tools) { + remove_per_instruction_tools(code, i, removed_per_instruction_tools); + } + if (new_per_instruction_tools) { + add_per_instruction_tools(code, i, new_per_instruction_tools); + } + i += instruction_length(code, i); + } + } +#ifdef INSTRUMENT_DEBUG + sanity_check_instrumentation(code); +#endif + return 0; +} + +#define C_RETURN_EVENTS \ + ((1 << PY_MONITORING_EVENT_C_RETURN) | \ + (1 << PY_MONITORING_EVENT_C_RAISE)) + +#define C_CALL_EVENTS \ + (C_RETURN_EVENTS | (1 << PY_MONITORING_EVENT_CALL)) + + +static int +instrument_all_executing_code_objects(PyInterpreterState *interp) { + _PyRuntimeState *runtime = &_PyRuntime; + HEAD_LOCK(runtime); + PyThreadState* ts = PyInterpreterState_ThreadHead(interp); + HEAD_UNLOCK(runtime); + while (ts) { + _PyInterpreterFrame *frame = ts->cframe->current_frame; + while (frame) { + if (frame->owner != FRAME_OWNED_BY_CSTACK) { + if (_Py_Instrument(frame->f_code, interp)) { + return -1; + } + } + frame = frame->previous; + } + HEAD_LOCK(runtime); + ts = PyThreadState_Next(ts); + HEAD_UNLOCK(runtime); + } + return 0; +} + +static void +set_events(_Py_Monitors *m, int tool_id, _PyMonitoringEventSet events) +{ + assert(0 <= tool_id && tool_id < PY_MONITORING_TOOL_IDS); + for (int e = 0; e < PY_MONITORING_UNGROUPED_EVENTS; e++) { + uint8_t *tools = &m->tools[e]; + int val = (events >> e) & 1; + *tools &= ~(1 << tool_id); + *tools |= (val << tool_id); + } +} + +static int +check_tool(PyInterpreterState *interp, int tool_id) +{ + if (tool_id < PY_MONITORING_SYS_PROFILE_ID && + interp->monitoring_tool_names[tool_id] == NULL) + { + PyErr_Format(PyExc_ValueError, "tool %d is not in use", tool_id); + return -1; + } + return 0; +} + +int +_PyMonitoring_SetEvents(int tool_id, _PyMonitoringEventSet events) +{ + assert(0 <= tool_id && tool_id < PY_MONITORING_TOOL_IDS); + PyInterpreterState *interp = _PyInterpreterState_Get(); + assert(events < (1 << PY_MONITORING_UNGROUPED_EVENTS)); + if (check_tool(interp, tool_id)) { + return -1; + } + uint32_t existing_events = get_events(&interp->monitors, tool_id); + if (existing_events == events) { + return 0; + } + set_events(&interp->monitors, tool_id, events); + interp->monitoring_version++; + return instrument_all_executing_code_objects(interp); +} + +int +_PyMonitoring_SetLocalEvents(PyCodeObject *code, int tool_id, _PyMonitoringEventSet events) +{ + assert(0 <= tool_id && tool_id < PY_MONITORING_TOOL_IDS); + PyInterpreterState *interp = _PyInterpreterState_Get(); + assert(events < (1 << PY_MONITORING_UNGROUPED_EVENTS)); + if (check_tool(interp, tool_id)) { + return -1; + } + if (allocate_instrumentation_data(code)) { + return -1; + } + _Py_Monitors *local = &code->_co_monitoring->local_monitors; + uint32_t existing_events = get_events(local, tool_id); + if (existing_events == events) { + return 0; + } + set_events(local, tool_id, events); + if (is_version_up_to_date(code, interp)) { + /* Force instrumentation update */ + code->_co_instrumentation_version = UINT64_MAX; + } + if (_Py_Instrument(code, interp)) { + return -1; + } + return 0; +} + +/*[clinic input] +module monitoring +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=37257f5987a360cf]*/ +/*[clinic end generated code]*/ + +#include "clinic/instrumentation.c.h" + +static int +check_valid_tool(int tool_id) +{ + if (tool_id < 0 || tool_id >= PY_MONITORING_SYS_PROFILE_ID) { + PyErr_Format(PyExc_ValueError, "invalid tool %d (must be between 0 and 5)", tool_id); + return -1; + } + return 0; +} + +/*[clinic input] +monitoring.use_tool_id + + tool_id: int + name: object + / + +[clinic start generated code]*/ + +static PyObject * +monitoring_use_tool_id_impl(PyObject *module, int tool_id, PyObject *name) +/*[clinic end generated code: output=30d76dc92b7cd653 input=ebc453761c621be1]*/ +{ + if (check_valid_tool(tool_id)) { + return NULL; + } + if (!PyUnicode_Check(name)) { + PyErr_SetString(PyExc_ValueError, "tool name must be a str"); + return NULL; + } + PyInterpreterState *interp = _PyInterpreterState_Get(); + if (interp->monitoring_tool_names[tool_id] != NULL) { + PyErr_Format(PyExc_ValueError, "tool %d is already in use", tool_id); + return NULL; + } + interp->monitoring_tool_names[tool_id] = Py_NewRef(name); + Py_RETURN_NONE; +} + +/*[clinic input] +monitoring.free_tool_id + + tool_id: int + / + +[clinic start generated code]*/ + +static PyObject * +monitoring_free_tool_id_impl(PyObject *module, int tool_id) +/*[clinic end generated code: output=86c2d2a1219a8591 input=a23fb6be3a8618e9]*/ +{ + if (check_valid_tool(tool_id)) { + return NULL; + } + PyInterpreterState *interp = _PyInterpreterState_Get(); + Py_CLEAR(interp->monitoring_tool_names[tool_id]); + Py_RETURN_NONE; +} + +/*[clinic input] +monitoring.get_tool + + tool_id: int + / + +[clinic start generated code]*/ + +static PyObject * +monitoring_get_tool_impl(PyObject *module, int tool_id) +/*[clinic end generated code: output=1c05a98b404a9a16 input=eeee9bebd0bcae9d]*/ + +/*[clinic end generated code]*/ +{ + if (check_valid_tool(tool_id)) { + return NULL; + } + PyInterpreterState *interp = _PyInterpreterState_Get(); + PyObject *name = interp->monitoring_tool_names[tool_id]; + if (name == NULL) { + Py_RETURN_NONE; + } + return Py_NewRef(name); +} + +/*[clinic input] +monitoring.register_callback + + + tool_id: int + event: int + func: object + / + +[clinic start generated code]*/ + +static PyObject * +monitoring_register_callback_impl(PyObject *module, int tool_id, int event, + PyObject *func) +/*[clinic end generated code: output=e64daa363004030c input=df6d70ea4cf81007]*/ +{ + if (check_valid_tool(tool_id)) { + return NULL; + } + if (_Py_popcount32(event) != 1) { + PyErr_SetString(PyExc_ValueError, "The callback can only be set for one event at a time"); + return NULL; + } + int event_id = _Py_bit_length(event)-1; + if (event_id < 0 || event_id >= PY_MONITORING_EVENTS) { + PyErr_Format(PyExc_ValueError, "invalid event %d", event); + return NULL; + } + if (func == Py_None) { + func = NULL; + } + func = _PyMonitoring_RegisterCallback(tool_id, event_id, func); + if (func == NULL) { + Py_RETURN_NONE; + } + return func; +} + +/*[clinic input] +monitoring.get_events -> int + + tool_id: int + / + +[clinic start generated code]*/ + +static int +monitoring_get_events_impl(PyObject *module, int tool_id) +/*[clinic end generated code: output=4450cc13f826c8c0 input=a64b238f76c4b2f7]*/ +{ + if (check_valid_tool(tool_id)) { + return -1; + } + _Py_Monitors *m = &_PyInterpreterState_Get()->monitors; + _PyMonitoringEventSet event_set = get_events(m, tool_id); + return event_set; +} + +/*[clinic input] +monitoring.set_events + + tool_id: int + event_set: int + / + +[clinic start generated code]*/ + +static PyObject * +monitoring_set_events_impl(PyObject *module, int tool_id, int event_set) +/*[clinic end generated code: output=1916c1e49cfb5bdb input=a77ba729a242142b]*/ +{ + if (check_valid_tool(tool_id)) { + return NULL; + } + if (event_set < 0 || event_set >= (1 << PY_MONITORING_EVENTS)) { + PyErr_Format(PyExc_ValueError, "invalid event set 0x%x", event_set); + return NULL; + } + if ((event_set & C_RETURN_EVENTS) && (event_set & C_CALL_EVENTS) != C_CALL_EVENTS) { + PyErr_Format(PyExc_ValueError, "cannot set C_RETURN or C_RAISE events independently"); + return NULL; + } + event_set &= ~C_RETURN_EVENTS; + if (_PyMonitoring_SetEvents(tool_id, event_set)) { + return NULL; + } + Py_RETURN_NONE; +} + +/*[clinic input] +monitoring.get_local_events -> int + + tool_id: int + code: object + / + +[clinic start generated code]*/ + +static int +monitoring_get_local_events_impl(PyObject *module, int tool_id, + PyObject *code) +/*[clinic end generated code: output=d3e92c1c9c1de8f9 input=bb0f927530386a94]*/ +{ + if (!PyCode_Check(code)) { + PyErr_Format( + PyExc_TypeError, + "code must be a code object" + ); + return -1; + } + if (check_valid_tool(tool_id)) { + return -1; + } + _PyMonitoringEventSet event_set = 0; + _PyCoMonitoringData *data = ((PyCodeObject *)code)->_co_monitoring; + if (data != NULL) { + for (int e = 0; e < PY_MONITORING_UNGROUPED_EVENTS; e++) { + if ((data->local_monitors.tools[e] >> tool_id) & 1) { + event_set |= (1 << e); + } + } + } + return event_set; +} + +/*[clinic input] +monitoring.set_local_events + + tool_id: int + code: object + event_set: int + / + +[clinic start generated code]*/ + +static PyObject * +monitoring_set_local_events_impl(PyObject *module, int tool_id, + PyObject *code, int event_set) +/*[clinic end generated code: output=68cc755a65dfea99 input=5655ecd78d937a29]*/ +{ + if (!PyCode_Check(code)) { + PyErr_Format( + PyExc_TypeError, + "code must be a code object" + ); + return NULL; + } + if (check_valid_tool(tool_id)) { + return NULL; + } + if (event_set < 0 || event_set >= (1 << PY_MONITORING_EVENTS)) { + PyErr_Format(PyExc_ValueError, "invalid event set 0x%x", event_set); + return NULL; + } + if ((event_set & C_RETURN_EVENTS) && (event_set & C_CALL_EVENTS) != C_CALL_EVENTS) { + PyErr_Format(PyExc_ValueError, "cannot set C_RETURN or C_RAISE events independently"); + return NULL; + } + event_set &= ~C_RETURN_EVENTS; + if (_PyMonitoring_SetLocalEvents((PyCodeObject*)code, tool_id, event_set)) { + return NULL; + } + Py_RETURN_NONE; +} + +/*[clinic input] +monitoring.restart_events + +[clinic start generated code]*/ + +static PyObject * +monitoring_restart_events_impl(PyObject *module) +/*[clinic end generated code: output=e025dd5ba33314c4 input=add8a855063c8008]*/ +{ + /* We want to ensure that: + * last restart version > instrumented version for all code objects + * last restart version < current version + */ + PyInterpreterState *interp = _PyInterpreterState_Get(); + interp->last_restart_version = interp->monitoring_version + 1; + interp->monitoring_version = interp->last_restart_version + 1; + if (instrument_all_executing_code_objects(interp)) { + return NULL; + } + Py_RETURN_NONE; +} + +static int +add_power2_constant(PyObject *obj, const char *name, int i) +{ + PyObject *val = PyLong_FromLong(1<<i); + if (val == NULL) { + return -1; + } + int err = PyObject_SetAttrString(obj, name, val); + Py_DECREF(val); + return err; +} + +static const char *const event_names [] = { + [PY_MONITORING_EVENT_PY_START] = "PY_START", + [PY_MONITORING_EVENT_PY_RESUME] = "PY_RESUME", + [PY_MONITORING_EVENT_PY_RETURN] = "PY_RETURN", + [PY_MONITORING_EVENT_PY_YIELD] = "PY_YIELD", + [PY_MONITORING_EVENT_CALL] = "CALL", + [PY_MONITORING_EVENT_LINE] = "LINE", + [PY_MONITORING_EVENT_INSTRUCTION] = "INSTRUCTION", + [PY_MONITORING_EVENT_JUMP] = "JUMP", + [PY_MONITORING_EVENT_BRANCH] = "BRANCH", + [PY_MONITORING_EVENT_C_RETURN] = "C_RETURN", + [PY_MONITORING_EVENT_PY_THROW] = "PY_THROW", + [PY_MONITORING_EVENT_RAISE] = "RAISE", + [PY_MONITORING_EVENT_EXCEPTION_HANDLED] = "EXCEPTION_HANDLED", + [PY_MONITORING_EVENT_C_RAISE] = "C_RAISE", + [PY_MONITORING_EVENT_PY_UNWIND] = "PY_UNWIND", + [PY_MONITORING_EVENT_STOP_ITERATION] = "STOP_ITERATION", +}; + +/*[clinic input] +monitoring._all_events +[clinic start generated code]*/ + +static PyObject * +monitoring__all_events_impl(PyObject *module) +/*[clinic end generated code: output=6b7581e2dbb690f6 input=62ee9672c17b7f0e]*/ +{ + PyInterpreterState *interp = _PyInterpreterState_Get(); + PyObject *res = PyDict_New(); + if (res == NULL) { + return NULL; + } + for (int e = 0; e < PY_MONITORING_UNGROUPED_EVENTS; e++) { + uint8_t tools = interp->monitors.tools[e]; + if (tools == 0) { + continue; + } + PyObject *tools_obj = PyLong_FromLong(tools); + assert(tools_obj != NULL); + int err = PyDict_SetItemString(res, event_names[e], tools_obj); + Py_DECREF(tools_obj); + if (err < 0) { + Py_DECREF(res); + return NULL; + } + } + return res; +} + +static PyMethodDef methods[] = { + MONITORING_USE_TOOL_ID_METHODDEF + MONITORING_FREE_TOOL_ID_METHODDEF + MONITORING_GET_TOOL_METHODDEF + MONITORING_REGISTER_CALLBACK_METHODDEF + MONITORING_GET_EVENTS_METHODDEF + MONITORING_SET_EVENTS_METHODDEF + MONITORING_GET_LOCAL_EVENTS_METHODDEF + MONITORING_SET_LOCAL_EVENTS_METHODDEF + MONITORING_RESTART_EVENTS_METHODDEF + MONITORING__ALL_EVENTS_METHODDEF + {NULL, NULL} // sentinel +}; + +static struct PyModuleDef monitoring_module = { + PyModuleDef_HEAD_INIT, + .m_name = "sys.monitoring", + .m_size = -1, /* multiple "initialization" just copies the module dict. */ + .m_methods = methods, +}; + +PyObject *_Py_CreateMonitoringObject(void) +{ + PyObject *mod = _PyModule_CreateInitialized(&monitoring_module, PYTHON_API_VERSION); + if (mod == NULL) { + return NULL; + } + if (PyObject_SetAttrString(mod, "DISABLE", &DISABLE)) { + goto error; + } + if (PyObject_SetAttrString(mod, "MISSING", &_PyInstrumentation_MISSING)) { + goto error; + } + PyObject *events = _PyNamespace_New(NULL); + if (events == NULL) { + goto error; + } + int err = PyObject_SetAttrString(mod, "events", events); + Py_DECREF(events); + if (err) { + goto error; + } + for (int i = 0; i < PY_MONITORING_EVENTS; i++) { + if (add_power2_constant(events, event_names[i], i)) { + goto error; + } + } + err = PyObject_SetAttrString(events, "NO_EVENTS", _PyLong_GetZero()); + if (err) goto error; + PyObject *val = PyLong_FromLong(PY_MONITORING_DEBUGGER_ID); + err = PyObject_SetAttrString(mod, "DEBUGGER_ID", val); + Py_DECREF(val); + if (err) goto error; + val = PyLong_FromLong(PY_MONITORING_COVERAGE_ID); + err = PyObject_SetAttrString(mod, "COVERAGE_ID", val); + Py_DECREF(val); + if (err) goto error; + val = PyLong_FromLong(PY_MONITORING_PROFILER_ID); + err = PyObject_SetAttrString(mod, "PROFILER_ID", val); + Py_DECREF(val); + if (err) goto error; + val = PyLong_FromLong(PY_MONITORING_OPTIMIZER_ID); + err = PyObject_SetAttrString(mod, "OPTIMIZER_ID", val); + Py_DECREF(val); + if (err) goto error; + return mod; +error: + Py_DECREF(mod); + return NULL; +} diff --git a/Python/legacy_tracing.c b/Python/legacy_tracing.c new file mode 100644 index 00000000000000..e509e63a087a52 --- /dev/null +++ b/Python/legacy_tracing.c @@ -0,0 +1,528 @@ +/* Support for legacy tracing on top of PEP 669 instrumentation + * Provides callables to forward PEP 669 events to legacy events. + */ + +#include <stddef.h> +#include "Python.h" +#include "pycore_ceval.h" +#include "pycore_object.h" +#include "pycore_sysmodule.h" + +typedef struct _PyLegacyEventHandler { + PyObject_HEAD + vectorcallfunc vectorcall; + int event; +} _PyLegacyEventHandler; + +/* The Py_tracefunc function expects the following arguments: + * obj: the trace object (PyObject *) + * frame: the current frame (PyFrameObject *) + * kind: the kind of event, see PyTrace_XXX #defines (int) + * arg: The arg (a PyObject *) + */ + +static PyObject * +call_profile_func(_PyLegacyEventHandler *self, PyObject *arg) +{ + PyThreadState *tstate = _PyThreadState_GET(); + if (tstate->c_profilefunc == NULL) { + Py_RETURN_NONE; + } + PyFrameObject *frame = PyEval_GetFrame(); + if (frame == NULL) { + PyErr_SetString(PyExc_SystemError, + "Missing frame when calling profile function."); + return NULL; + } + Py_INCREF(frame); + int err = tstate->c_profilefunc(tstate->c_profileobj, frame, self->event, arg); + Py_DECREF(frame); + if (err) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +sys_profile_func2( + _PyLegacyEventHandler *self, PyObject *const *args, + size_t nargsf, PyObject *kwnames +) { + assert(kwnames == NULL); + assert(PyVectorcall_NARGS(nargsf) == 2); + return call_profile_func(self, Py_None); +} + +static PyObject * +sys_profile_func3( + _PyLegacyEventHandler *self, PyObject *const *args, + size_t nargsf, PyObject *kwnames +) { + assert(kwnames == NULL); + assert(PyVectorcall_NARGS(nargsf) == 3); + return call_profile_func(self, args[2]); +} + +static PyObject * +sys_profile_call_or_return( + _PyLegacyEventHandler *self, PyObject *const *args, + size_t nargsf, PyObject *kwnames +) { + assert(kwnames == NULL); + assert(PyVectorcall_NARGS(nargsf) == 4); + PyObject *callable = args[2]; + if (PyCFunction_Check(callable)) { + return call_profile_func(self, callable); + } + if (Py_TYPE(callable) == &PyMethodDescr_Type) { + PyObject *self_arg = args[3]; + /* For backwards compatibility need to + * convert to builtin method */ + + /* If no arg, skip */ + if (self_arg == &_PyInstrumentation_MISSING) { + Py_RETURN_NONE; + } + PyObject *meth = Py_TYPE(callable)->tp_descr_get( + callable, self_arg, (PyObject*)Py_TYPE(self_arg)); + if (meth == NULL) { + return NULL; + } + PyObject *res = call_profile_func(self, meth); + Py_DECREF(meth); + return res; + } + Py_RETURN_NONE; +} + +static PyObject * +call_trace_func(_PyLegacyEventHandler *self, PyObject *arg) +{ + PyThreadState *tstate = _PyThreadState_GET(); + if (tstate->c_tracefunc == NULL) { + Py_RETURN_NONE; + } + PyFrameObject *frame = PyEval_GetFrame(); + if (frame == NULL) { + PyErr_SetString(PyExc_SystemError, + "Missing frame when calling trace function."); + return NULL; + } + Py_INCREF(frame); + int err = tstate->c_tracefunc(tstate->c_traceobj, frame, self->event, arg); + Py_DECREF(frame); + if (err) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +sys_trace_exception_func( + _PyLegacyEventHandler *self, PyObject *const *args, + size_t nargsf, PyObject *kwnames +) { + assert(kwnames == NULL); + assert(PyVectorcall_NARGS(nargsf) == 3); + PyObject *exc = args[2]; + assert(PyExceptionInstance_Check(exc)); + PyObject *type = (PyObject *)Py_TYPE(exc); + PyObject *tb = PyException_GetTraceback(exc); + if (tb == NULL) { + tb = Py_NewRef(Py_None); + } + PyObject *tuple = PyTuple_Pack(3, type, exc, tb); + Py_DECREF(tb); + if (tuple == NULL) { + return NULL; + } + PyObject *res = call_trace_func(self, tuple); + Py_DECREF(tuple); + return res; +} + +static PyObject * +sys_trace_func2( + _PyLegacyEventHandler *self, PyObject *const *args, + size_t nargsf, PyObject *kwnames +) { + assert(kwnames == NULL); + assert(PyVectorcall_NARGS(nargsf) == 2); + return call_trace_func(self, Py_None); +} + +static PyObject * +sys_trace_return( + _PyLegacyEventHandler *self, PyObject *const *args, + size_t nargsf, PyObject *kwnames +) { + assert(!PyErr_Occurred()); + assert(kwnames == NULL); + assert(PyVectorcall_NARGS(nargsf) == 3); + assert(PyCode_Check(args[0])); + PyObject *val = args[2]; + PyObject *res = call_trace_func(self, val); + return res; +} + +static PyObject * +sys_trace_yield( + _PyLegacyEventHandler *self, PyObject *const *args, + size_t nargsf, PyObject *kwnames +) { + assert(kwnames == NULL); + assert(PyVectorcall_NARGS(nargsf) == 3); + return call_trace_func(self, args[2]); +} + +static PyObject * +sys_trace_instruction_func( + _PyLegacyEventHandler *self, PyObject *const *args, + size_t nargsf, PyObject *kwnames +) { + assert(kwnames == NULL); + assert(PyVectorcall_NARGS(nargsf) == 2); + PyFrameObject *frame = PyEval_GetFrame(); + if (frame == NULL) { + PyErr_SetString(PyExc_SystemError, + "Missing frame when calling trace function."); + return NULL; + } + if (!frame->f_trace_opcodes) { + Py_RETURN_NONE; + } + Py_INCREF(frame); + PyThreadState *tstate = _PyThreadState_GET(); + int err = tstate->c_tracefunc(tstate->c_traceobj, frame, self->event, Py_None); + frame->f_lineno = 0; + Py_DECREF(frame); + if (err) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +trace_line( + PyThreadState *tstate, _PyLegacyEventHandler *self, + PyFrameObject *frame, int line +) { + if (!frame->f_trace_lines) { + Py_RETURN_NONE; + } + if (line < 0) { + Py_RETURN_NONE; + } + frame ->f_last_traced_line = line; + Py_INCREF(frame); + frame->f_lineno = line; + int err = tstate->c_tracefunc(tstate->c_traceobj, frame, self->event, Py_None); + frame->f_lineno = 0; + Py_DECREF(frame); + if (err) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +sys_trace_line_func( + _PyLegacyEventHandler *self, PyObject *const *args, + size_t nargsf, PyObject *kwnames +) { + assert(kwnames == NULL); + PyThreadState *tstate = _PyThreadState_GET(); + if (tstate->c_tracefunc == NULL) { + Py_RETURN_NONE; + } + assert(PyVectorcall_NARGS(nargsf) == 2); + int line = _PyLong_AsInt(args[1]); + assert(line >= 0); + PyFrameObject *frame = PyEval_GetFrame(); + if (frame == NULL) { + PyErr_SetString(PyExc_SystemError, + "Missing frame when calling trace function."); + return NULL; + } + assert(args[0] == (PyObject *)frame->f_frame->f_code); + if (frame ->f_last_traced_line == line) { + /* Already traced this line */ + Py_RETURN_NONE; + } + return trace_line(tstate, self, frame, line); +} + + +static PyObject * +sys_trace_jump_func( + _PyLegacyEventHandler *self, PyObject *const *args, + size_t nargsf, PyObject *kwnames +) { + assert(kwnames == NULL); + PyThreadState *tstate = _PyThreadState_GET(); + if (tstate->c_tracefunc == NULL) { + Py_RETURN_NONE; + } + assert(PyVectorcall_NARGS(nargsf) == 3); + int from = _PyLong_AsInt(args[1])/sizeof(_Py_CODEUNIT); + assert(from >= 0); + int to = _PyLong_AsInt(args[2])/sizeof(_Py_CODEUNIT); + assert(to >= 0); + PyFrameObject *frame = PyEval_GetFrame(); + if (frame == NULL) { + PyErr_SetString(PyExc_SystemError, + "Missing frame when calling trace function."); + return NULL; + } + if (!frame->f_trace_lines) { + Py_RETURN_NONE; + } + PyCodeObject *code = (PyCodeObject *)args[0]; + assert(PyCode_Check(code)); + assert(code == frame->f_frame->f_code); + /* We can call _Py_Instrumentation_GetLine because we always set + * line events for tracing */ + int to_line = _Py_Instrumentation_GetLine(code, to); + /* Backward jump: Always generate event + * Forward jump: Only generate event if jumping to different line. */ + if (to > from && frame->f_last_traced_line == to_line) { + /* Already traced this line */ + Py_RETURN_NONE; + } + return trace_line(tstate, self, frame, to_line); +} + +/* We don't care about the exception here, + * we just treat it as a possible new line + */ +static PyObject * +sys_trace_exception_handled( + _PyLegacyEventHandler *self, PyObject *const *args, + size_t nargsf, PyObject *kwnames +) { + assert(kwnames == NULL); + PyThreadState *tstate = _PyThreadState_GET(); + if (tstate->c_tracefunc == NULL) { + Py_RETURN_NONE; + } + assert(PyVectorcall_NARGS(nargsf) == 3); + PyFrameObject *frame = PyEval_GetFrame(); + PyCodeObject *code = (PyCodeObject *)args[0]; + assert(PyCode_Check(code)); + assert(code == frame->f_frame->f_code); + assert(PyLong_Check(args[1])); + int offset = _PyLong_AsInt(args[1])/sizeof(_Py_CODEUNIT); + /* We can call _Py_Instrumentation_GetLine because we always set + * line events for tracing */ + int line = _Py_Instrumentation_GetLine(code, offset); + if (frame->f_last_traced_line == line) { + /* Already traced this line */ + Py_RETURN_NONE; + } + return trace_line(tstate, self, frame, line); +} + + +PyTypeObject _PyLegacyEventHandler_Type = { + PyVarObject_HEAD_INIT(&PyType_Type, 0) + "sys.legacy_event_handler", + sizeof(_PyLegacyEventHandler), + .tp_dealloc = (destructor)PyObject_Free, + .tp_vectorcall_offset = offsetof(_PyLegacyEventHandler, vectorcall), + .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_HAVE_VECTORCALL | Py_TPFLAGS_DISALLOW_INSTANTIATION, + .tp_call = PyVectorcall_Call, +}; + +static int +set_callbacks(int tool, vectorcallfunc vectorcall, int legacy_event, int event1, int event2) +{ + _PyLegacyEventHandler *callback = + PyObject_NEW(_PyLegacyEventHandler, &_PyLegacyEventHandler_Type); + if (callback == NULL) { + return -1; + } + callback->vectorcall = vectorcall; + callback->event = legacy_event; + Py_XDECREF(_PyMonitoring_RegisterCallback(tool, event1, (PyObject *)callback)); + if (event2 >= 0) { + Py_XDECREF(_PyMonitoring_RegisterCallback(tool, event2, (PyObject *)callback)); + } + Py_DECREF(callback); + return 0; +} + +#ifndef NDEBUG +/* Ensure that tstate is valid: sanity check for PyEval_AcquireThread() and + PyEval_RestoreThread(). Detect if tstate memory was freed. It can happen + when a thread continues to run after Python finalization, especially + daemon threads. */ +static int +is_tstate_valid(PyThreadState *tstate) +{ + assert(!_PyMem_IsPtrFreed(tstate)); + assert(!_PyMem_IsPtrFreed(tstate->interp)); + return 1; +} +#endif + +int +_PyEval_SetProfile(PyThreadState *tstate, Py_tracefunc func, PyObject *arg) +{ + assert(is_tstate_valid(tstate)); + /* The caller must hold the GIL */ + assert(PyGILState_Check()); + + /* Call _PySys_Audit() in the context of the current thread state, + even if tstate is not the current thread state. */ + PyThreadState *current_tstate = _PyThreadState_GET(); + if (_PySys_Audit(current_tstate, "sys.setprofile", NULL) < 0) { + return -1; + } + /* Setup PEP 669 monitoring callbacks and events. */ + if (!tstate->interp->sys_profile_initialized) { + tstate->interp->sys_profile_initialized = true; + if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID, + (vectorcallfunc)sys_profile_func2, PyTrace_CALL, + PY_MONITORING_EVENT_PY_START, PY_MONITORING_EVENT_PY_RESUME)) { + return -1; + } + if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID, + (vectorcallfunc)sys_profile_func3, PyTrace_RETURN, + PY_MONITORING_EVENT_PY_RETURN, PY_MONITORING_EVENT_PY_YIELD)) { + return -1; + } + if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID, + (vectorcallfunc)sys_profile_func2, PyTrace_RETURN, + PY_MONITORING_EVENT_PY_UNWIND, -1)) { + return -1; + } + if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID, + (vectorcallfunc)sys_profile_call_or_return, PyTrace_C_CALL, + PY_MONITORING_EVENT_CALL, -1)) { + return -1; + } + if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID, + (vectorcallfunc)sys_profile_call_or_return, PyTrace_C_RETURN, + PY_MONITORING_EVENT_C_RETURN, -1)) { + return -1; + } + if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID, + (vectorcallfunc)sys_profile_call_or_return, PyTrace_C_EXCEPTION, + PY_MONITORING_EVENT_C_RAISE, -1)) { + return -1; + } + } + + int delta = (func != NULL) - (tstate->c_profilefunc != NULL); + tstate->c_profilefunc = func; + PyObject *old_profileobj = tstate->c_profileobj; + tstate->c_profileobj = Py_XNewRef(arg); + Py_XDECREF(old_profileobj); + tstate->interp->sys_profiling_threads += delta; + assert(tstate->interp->sys_profiling_threads >= 0); + + uint32_t events = 0; + if (tstate->interp->sys_profiling_threads) { + events = + (1 << PY_MONITORING_EVENT_PY_START) | (1 << PY_MONITORING_EVENT_PY_RESUME) | + (1 << PY_MONITORING_EVENT_PY_RETURN) | (1 << PY_MONITORING_EVENT_PY_YIELD) | + (1 << PY_MONITORING_EVENT_CALL) | (1 << PY_MONITORING_EVENT_PY_UNWIND); + } + return _PyMonitoring_SetEvents(PY_MONITORING_SYS_PROFILE_ID, events); +} + +int +_PyEval_SetTrace(PyThreadState *tstate, Py_tracefunc func, PyObject *arg) +{ + assert(is_tstate_valid(tstate)); + /* The caller must hold the GIL */ + assert(PyGILState_Check()); + + /* Call _PySys_Audit() in the context of the current thread state, + even if tstate is not the current thread state. */ + PyThreadState *current_tstate = _PyThreadState_GET(); + if (_PySys_Audit(current_tstate, "sys.settrace", NULL) < 0) { + return -1; + } + + assert(tstate->interp->sys_tracing_threads >= 0); + /* Setup PEP 669 monitoring callbacks and events. */ + if (!tstate->interp->sys_trace_initialized) { + tstate->interp->sys_trace_initialized = true; + if (set_callbacks(PY_MONITORING_SYS_TRACE_ID, + (vectorcallfunc)sys_trace_func2, PyTrace_CALL, + PY_MONITORING_EVENT_PY_START, PY_MONITORING_EVENT_PY_RESUME)) { + return -1; + } + if (set_callbacks(PY_MONITORING_SYS_TRACE_ID, + (vectorcallfunc)sys_trace_func2, PyTrace_CALL, + PY_MONITORING_EVENT_PY_THROW, -1)) { + return -1; + } + if (set_callbacks(PY_MONITORING_SYS_TRACE_ID, + (vectorcallfunc)sys_trace_return, PyTrace_RETURN, + PY_MONITORING_EVENT_PY_RETURN, -1)) { + return -1; + } + if (set_callbacks(PY_MONITORING_SYS_TRACE_ID, + (vectorcallfunc)sys_trace_yield, PyTrace_RETURN, + PY_MONITORING_EVENT_PY_YIELD, -1)) { + return -1; + } + if (set_callbacks(PY_MONITORING_SYS_TRACE_ID, + (vectorcallfunc)sys_trace_exception_func, PyTrace_EXCEPTION, + PY_MONITORING_EVENT_RAISE, PY_MONITORING_EVENT_STOP_ITERATION)) { + return -1; + } + if (set_callbacks(PY_MONITORING_SYS_TRACE_ID, + (vectorcallfunc)sys_trace_line_func, PyTrace_LINE, + PY_MONITORING_EVENT_LINE, -1)) { + return -1; + } + if (set_callbacks(PY_MONITORING_SYS_TRACE_ID, + (vectorcallfunc)sys_trace_func2, PyTrace_RETURN, + PY_MONITORING_EVENT_PY_UNWIND, -1)) { + return -1; + } + if (set_callbacks(PY_MONITORING_SYS_TRACE_ID, + (vectorcallfunc)sys_trace_jump_func, PyTrace_LINE, + PY_MONITORING_EVENT_JUMP, PY_MONITORING_EVENT_BRANCH)) { + return -1; + } + if (set_callbacks(PY_MONITORING_SYS_TRACE_ID, + (vectorcallfunc)sys_trace_instruction_func, PyTrace_OPCODE, + PY_MONITORING_EVENT_INSTRUCTION, -1)) { + return -1; + } + if (set_callbacks(PY_MONITORING_SYS_TRACE_ID, + (vectorcallfunc)sys_trace_exception_handled, PyTrace_LINE, + PY_MONITORING_EVENT_EXCEPTION_HANDLED, -1)) { + return -1; + } + } + + int delta = (func != NULL) - (tstate->c_tracefunc != NULL); + tstate->c_tracefunc = func; + PyObject *old_traceobj = tstate->c_traceobj; + tstate->c_traceobj = Py_XNewRef(arg); + Py_XDECREF(old_traceobj); + tstate->interp->sys_tracing_threads += delta; + assert(tstate->interp->sys_tracing_threads >= 0); + + uint32_t events = 0; + if (tstate->interp->sys_tracing_threads) { + events = + (1 << PY_MONITORING_EVENT_PY_START) | (1 << PY_MONITORING_EVENT_PY_RESUME) | + (1 << PY_MONITORING_EVENT_PY_RETURN) | (1 << PY_MONITORING_EVENT_PY_YIELD) | + (1 << PY_MONITORING_EVENT_RAISE) | (1 << PY_MONITORING_EVENT_LINE) | + (1 << PY_MONITORING_EVENT_JUMP) | (1 << PY_MONITORING_EVENT_BRANCH) | + (1 << PY_MONITORING_EVENT_PY_UNWIND) | (1 << PY_MONITORING_EVENT_PY_THROW) | + (1 << PY_MONITORING_EVENT_STOP_ITERATION) | + (1 << PY_MONITORING_EVENT_EXCEPTION_HANDLED); + if (tstate->interp->f_opcode_trace_set) { + events |= (1 << PY_MONITORING_EVENT_INSTRUCTION); + } + } + return _PyMonitoring_SetEvents(PY_MONITORING_SYS_TRACE_ID, events); +} diff --git a/Python/makeopcodetargets.py b/Python/makeopcodetargets.py index 33a4b4a76a1253..2b402ae0b6a031 100755 --- a/Python/makeopcodetargets.py +++ b/Python/makeopcodetargets.py @@ -7,24 +7,18 @@ import sys -try: - from importlib.machinery import SourceFileLoader -except ImportError: - import imp - - def find_module(modname): - """Finds and returns a module in the local dist/checkout. - """ - modpath = os.path.join( - os.path.dirname(os.path.dirname(__file__)), "Lib") - return imp.load_module(modname, *imp.find_module(modname, [modpath])) -else: - def find_module(modname): - """Finds and returns a module in the local dist/checkout. - """ - modpath = os.path.join( - os.path.dirname(os.path.dirname(__file__)), "Lib", modname + ".py") - return SourceFileLoader(modname, modpath).load_module() +# 2023-04-27(warsaw): Pre-Python 3.12, this would catch ImportErrors and try to +# import imp, and then use imp.load_module(). The imp module was removed in +# Python 3.12 (and long deprecated before that), and it's unclear under what +# conditions this import will now fail, so the fallback was simply removed. +from importlib.machinery import SourceFileLoader + +def find_module(modname): + """Finds and returns a module in the local dist/checkout. + """ + modpath = os.path.join( + os.path.dirname(os.path.dirname(__file__)), "Lib", modname + ".py") + return SourceFileLoader(modname, modpath).load_module() def write_contents(f): @@ -32,7 +26,6 @@ def write_contents(f): """ opcode = find_module('opcode') targets = ['_unknown_opcode'] * 256 - targets[255] = "TARGET_DO_TRACING" for opname, op in opcode.opmap.items(): if not opcode.is_pseudo(op): targets[op] = "TARGET_%s" % opname diff --git a/Python/marshal.c b/Python/marshal.c index 94e79d4392ae6d..2966139cec9ae9 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -11,6 +11,7 @@ #include "Python.h" #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_code.h" // _PyCode_New() +#include "pycore_long.h" // _PyLong_DigitCount #include "pycore_hashtable.h" // _Py_hashtable_t #include "marshal.h" // Py_MARSHAL_VERSION @@ -232,13 +233,13 @@ w_PyLong(const PyLongObject *ob, char flag, WFILE *p) digit d; W_TYPE(TYPE_LONG, p); - if (Py_SIZE(ob) == 0) { + if (_PyLong_IsZero(ob)) { w_long((long)0, p); return; } /* set l to number of base PyLong_MARSHAL_BASE digits */ - n = Py_ABS(Py_SIZE(ob)); + n = _PyLong_DigitCount(ob); l = (n-1) * PyLong_MARSHAL_RATIO; d = ob->long_value.ob_digit[n-1]; assert(d != 0); /* a PyLong is always normalized */ @@ -251,7 +252,7 @@ w_PyLong(const PyLongObject *ob, char flag, WFILE *p) p->error = WFERR_UNMARSHALLABLE; return; } - w_long((long)(Py_SIZE(ob) > 0 ? l : -l), p); + w_long((long)(_PyLong_IsNegative(ob) ? -l : l), p); for (i=0; i < n-1; i++) { d = ob->long_value.ob_digit[i]; @@ -839,7 +840,7 @@ r_PyLong(RFILE *p) if (ob == NULL) return NULL; - Py_SET_SIZE(ob, n > 0 ? size : -size); + _PyLong_SetSignAndDigitCount(ob, n < 0 ? -1 : 1, size); for (i = 0; i < size-1; i++) { d = 0; diff --git a/Python/modsupport.c b/Python/modsupport.c index 75698455c88166..be229c987b8a78 100644 --- a/Python/modsupport.c +++ b/Python/modsupport.c @@ -3,6 +3,7 @@ #include "Python.h" #include "pycore_abstract.h" // _PyIndex_Check() +#include "pycore_object.h" // _PyType_IsReady() #define FLAG_SIZE_T 1 typedef double va_double; @@ -693,7 +694,7 @@ PyModule_AddStringConstant(PyObject *m, const char *name, const char *value) int PyModule_AddType(PyObject *module, PyTypeObject *type) { - if (PyType_Ready(type) < 0) { + if (!_PyType_IsReady(type) && PyType_Ready(type) < 0) { return -1; } diff --git a/Python/opcode_metadata.h b/Python/opcode_metadata.h index fbd50967dd0aed..c1a6ed4c18ab0a 100644 --- a/Python/opcode_metadata.h +++ b/Python/opcode_metadata.h @@ -3,7 +3,7 @@ // Python/bytecodes.c // Do not edit! -#ifndef NEED_OPCODE_TABLES +#ifndef NEED_OPCODE_METADATA extern int _PyOpcode_num_popped(int opcode, int oparg, bool jump); #else int @@ -13,6 +13,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 0; case RESUME: return 0; + case INSTRUMENTED_RESUME: + return 0; case LOAD_CLOSURE: return 0; case LOAD_FAST_CHECK: @@ -41,6 +43,12 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 0; case END_FOR: return 1+1; + case INSTRUMENTED_END_FOR: + return 2; + case END_SEND: + return 2; + case INSTRUMENTED_END_SEND: + return 2; case UNARY_NEGATIVE: return 1; case UNARY_NOT: @@ -99,8 +107,12 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 1; case RETURN_VALUE: return 1; + case INSTRUMENTED_RETURN_VALUE: + return 1; case RETURN_CONST: return 0; + case INSTRUMENTED_RETURN_CONST: + return 0; case GET_AITER: return 1; case GET_ANEXT: @@ -111,6 +123,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 2; case SEND_GEN: return 2; + case INSTRUMENTED_YIELD_VALUE: + return 1; case YIELD_VALUE: return 1; case POP_EXCEPT: @@ -193,6 +207,10 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 1; case MAP_ADD: return 2; + case LOAD_SUPER_ATTR: + return 3; + case LOAD_SUPER_ATTR_METHOD: + return 3; case LOAD_ATTR: return 1; case LOAD_ATTR_INSTANCE_VALUE: @@ -217,13 +235,11 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 2; case COMPARE_OP: return 2; - case COMPARE_AND_BRANCH: - return 2; - case COMPARE_AND_BRANCH_FLOAT: + case COMPARE_OP_FLOAT: return 2; - case COMPARE_AND_BRANCH_INT: + case COMPARE_OP_INT: return 2; - case COMPARE_AND_BRANCH_STR: + case COMPARE_OP_STR: return 2; case IS_OP: return 2; @@ -249,10 +265,6 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 1; case POP_JUMP_IF_NONE: return 1; - case JUMP_IF_FALSE_OR_POP: - return 1; - case JUMP_IF_TRUE_OR_POP: - return 1; case JUMP_BACKWARD_NO_INTERRUPT: return 0; case GET_LEN: @@ -271,6 +283,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 1; case FOR_ITER: return 1; + case INSTRUMENTED_FOR_ITER: + return 0; case FOR_ITER_LIST: return 1; case FOR_ITER_TUPLE: @@ -295,6 +309,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 1; case KW_NAMES: return 0; + case INSTRUMENTED_CALL: + return 0; case CALL: return oparg + 2; case CALL_BOUND_METHOD_EXACT_ARGS: @@ -331,6 +347,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return oparg + 2; case CALL_NO_KW_METHOD_DESCRIPTOR_FAST: return oparg + 2; + case INSTRUMENTED_CALL_FUNCTION_EX: + return 0; case CALL_FUNCTION_EX: return ((oparg & 1) ? 1 : 0) + 3; case MAKE_FUNCTION: @@ -347,17 +365,35 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 2; case SWAP: return (oparg-2) + 2; + case INSTRUMENTED_LINE: + return 0; + case INSTRUMENTED_INSTRUCTION: + return 0; + case INSTRUMENTED_JUMP_FORWARD: + return 0; + case INSTRUMENTED_JUMP_BACKWARD: + return 0; + case INSTRUMENTED_POP_JUMP_IF_TRUE: + return 0; + case INSTRUMENTED_POP_JUMP_IF_FALSE: + return 0; + case INSTRUMENTED_POP_JUMP_IF_NONE: + return 0; + case INSTRUMENTED_POP_JUMP_IF_NOT_NONE: + return 0; case EXTENDED_ARG: return 0; case CACHE: return 0; + case RESERVED: + return 0; default: return -1; } } #endif -#ifndef NEED_OPCODE_TABLES +#ifndef NEED_OPCODE_METADATA extern int _PyOpcode_num_pushed(int opcode, int oparg, bool jump); #else int @@ -367,6 +403,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 0; case RESUME: return 0; + case INSTRUMENTED_RESUME: + return 0; case LOAD_CLOSURE: return 1; case LOAD_FAST_CHECK: @@ -395,6 +433,12 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 1; case END_FOR: return 0+0; + case INSTRUMENTED_END_FOR: + return 0; + case END_SEND: + return 1; + case INSTRUMENTED_END_SEND: + return 1; case UNARY_NEGATIVE: return 1; case UNARY_NOT: @@ -453,8 +497,12 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 0; case RETURN_VALUE: return 0; + case INSTRUMENTED_RETURN_VALUE: + return 0; case RETURN_CONST: return 0; + case INSTRUMENTED_RETURN_CONST: + return 0; case GET_AITER: return 1; case GET_ANEXT: @@ -465,6 +513,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 2; case SEND_GEN: return 1; + case INSTRUMENTED_YIELD_VALUE: + return 1; case YIELD_VALUE: return 1; case POP_EXCEPT: @@ -547,6 +597,10 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 0; case MAP_ADD: return 0; + case LOAD_SUPER_ATTR: + return ((oparg & 1) ? 1 : 0) + 1; + case LOAD_SUPER_ATTR_METHOD: + return 2; case LOAD_ATTR: return ((oparg & 1) ? 1 : 0) + 1; case LOAD_ATTR_INSTANCE_VALUE: @@ -571,14 +625,12 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 0; case COMPARE_OP: return 1; - case COMPARE_AND_BRANCH: - return 0; - case COMPARE_AND_BRANCH_FLOAT: - return 0; - case COMPARE_AND_BRANCH_INT: - return 0; - case COMPARE_AND_BRANCH_STR: - return 0; + case COMPARE_OP_FLOAT: + return 1; + case COMPARE_OP_INT: + return 1; + case COMPARE_OP_STR: + return 1; case IS_OP: return 1; case CONTAINS_OP: @@ -603,10 +655,6 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 0; case POP_JUMP_IF_NONE: return 0; - case JUMP_IF_FALSE_OR_POP: - return (jump ? 1 : 0); - case JUMP_IF_TRUE_OR_POP: - return (jump ? 1 : 0); case JUMP_BACKWARD_NO_INTERRUPT: return 0; case GET_LEN: @@ -625,6 +673,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 1; case FOR_ITER: return 2; + case INSTRUMENTED_FOR_ITER: + return 0; case FOR_ITER_LIST: return 2; case FOR_ITER_TUPLE: @@ -649,6 +699,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return ((oparg & 1) ? 1 : 0) + 1; case KW_NAMES: return 0; + case INSTRUMENTED_CALL: + return 0; case CALL: return 1; case CALL_BOUND_METHOD_EXACT_ARGS: @@ -685,6 +737,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 1; case CALL_NO_KW_METHOD_DESCRIPTOR_FAST: return 1; + case INSTRUMENTED_CALL_FUNCTION_EX: + return 0; case CALL_FUNCTION_EX: return 1; case MAKE_FUNCTION: @@ -701,28 +755,47 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 1; case SWAP: return (oparg-2) + 2; + case INSTRUMENTED_LINE: + return 0; + case INSTRUMENTED_INSTRUCTION: + return 0; + case INSTRUMENTED_JUMP_FORWARD: + return 0; + case INSTRUMENTED_JUMP_BACKWARD: + return 0; + case INSTRUMENTED_POP_JUMP_IF_TRUE: + return 0; + case INSTRUMENTED_POP_JUMP_IF_FALSE: + return 0; + case INSTRUMENTED_POP_JUMP_IF_NONE: + return 0; + case INSTRUMENTED_POP_JUMP_IF_NOT_NONE: + return 0; case EXTENDED_ARG: return 0; case CACHE: return 0; + case RESERVED: + return 0; default: return -1; } } #endif -enum InstructionFormat { INSTR_FMT_IB, INSTR_FMT_IBC, INSTR_FMT_IBC0, INSTR_FMT_IBC000, INSTR_FMT_IBC00000000, INSTR_FMT_IBIB, INSTR_FMT_IX, INSTR_FMT_IXC, INSTR_FMT_IXC000 }; +enum InstructionFormat { INSTR_FMT_IB, INSTR_FMT_IBC, INSTR_FMT_IBC00, INSTR_FMT_IBC000, INSTR_FMT_IBC00000000, INSTR_FMT_IBIB, INSTR_FMT_IX, INSTR_FMT_IXC, INSTR_FMT_IXC000, INSTR_FMT_IXC00000000 }; struct opcode_metadata { bool valid_entry; enum InstructionFormat instr_format; }; -#ifndef NEED_OPCODE_TABLES +#ifndef NEED_OPCODE_METADATA extern const struct opcode_metadata _PyOpcode_opcode_metadata[256]; #else const struct opcode_metadata _PyOpcode_opcode_metadata[256] = { [NOP] = { true, INSTR_FMT_IX }, [RESUME] = { true, INSTR_FMT_IB }, + [INSTRUMENTED_RESUME] = { true, INSTR_FMT_IB }, [LOAD_CLOSURE] = { true, INSTR_FMT_IB }, [LOAD_FAST_CHECK] = { true, INSTR_FMT_IB }, [LOAD_FAST] = { true, INSTR_FMT_IB }, @@ -737,6 +810,9 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = { [POP_TOP] = { true, INSTR_FMT_IX }, [PUSH_NULL] = { true, INSTR_FMT_IX }, [END_FOR] = { true, INSTR_FMT_IB }, + [INSTRUMENTED_END_FOR] = { true, INSTR_FMT_IX }, + [END_SEND] = { true, INSTR_FMT_IX }, + [INSTRUMENTED_END_SEND] = { true, INSTR_FMT_IX }, [UNARY_NEGATIVE] = { true, INSTR_FMT_IX }, [UNARY_NOT] = { true, INSTR_FMT_IX }, [UNARY_INVERT] = { true, INSTR_FMT_IX }, @@ -748,13 +824,13 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = { [BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IX }, [BINARY_OP_ADD_FLOAT] = { true, INSTR_FMT_IXC }, [BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC }, - [BINARY_SUBSCR] = { true, INSTR_FMT_IXC000 }, + [BINARY_SUBSCR] = { true, INSTR_FMT_IXC }, [BINARY_SLICE] = { true, INSTR_FMT_IX }, [STORE_SLICE] = { true, INSTR_FMT_IX }, - [BINARY_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC000 }, - [BINARY_SUBSCR_TUPLE_INT] = { true, INSTR_FMT_IXC000 }, - [BINARY_SUBSCR_DICT] = { true, INSTR_FMT_IXC000 }, - [BINARY_SUBSCR_GETITEM] = { true, INSTR_FMT_IXC000 }, + [BINARY_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC }, + [BINARY_SUBSCR_TUPLE_INT] = { true, INSTR_FMT_IXC }, + [BINARY_SUBSCR_DICT] = { true, INSTR_FMT_IXC }, + [BINARY_SUBSCR_GETITEM] = { true, INSTR_FMT_IXC }, [LIST_APPEND] = { true, INSTR_FMT_IB }, [SET_ADD] = { true, INSTR_FMT_IB }, [STORE_SUBSCR] = { true, INSTR_FMT_IXC }, @@ -766,12 +842,15 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = { [RAISE_VARARGS] = { true, INSTR_FMT_IB }, [INTERPRETER_EXIT] = { true, INSTR_FMT_IX }, [RETURN_VALUE] = { true, INSTR_FMT_IX }, + [INSTRUMENTED_RETURN_VALUE] = { true, INSTR_FMT_IX }, [RETURN_CONST] = { true, INSTR_FMT_IB }, + [INSTRUMENTED_RETURN_CONST] = { true, INSTR_FMT_IB }, [GET_AITER] = { true, INSTR_FMT_IX }, [GET_ANEXT] = { true, INSTR_FMT_IX }, [GET_AWAITABLE] = { true, INSTR_FMT_IB }, [SEND] = { true, INSTR_FMT_IBC }, [SEND_GEN] = { true, INSTR_FMT_IBC }, + [INSTRUMENTED_YIELD_VALUE] = { true, INSTR_FMT_IX }, [YIELD_VALUE] = { true, INSTR_FMT_IX }, [POP_EXCEPT] = { true, INSTR_FMT_IX }, [RERAISE] = { true, INSTR_FMT_IB }, @@ -813,6 +892,8 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = { [DICT_UPDATE] = { true, INSTR_FMT_IB }, [DICT_MERGE] = { true, INSTR_FMT_IB }, [MAP_ADD] = { true, INSTR_FMT_IB }, + [LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC00000000 }, + [LOAD_SUPER_ATTR_METHOD] = { true, INSTR_FMT_IXC00000000 }, [LOAD_ATTR] = { true, INSTR_FMT_IBC00000000 }, [LOAD_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IBC00000000 }, [LOAD_ATTR_MODULE] = { true, INSTR_FMT_IBC00000000 }, @@ -825,10 +906,9 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = { [STORE_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC000 }, [STORE_ATTR_SLOT] = { true, INSTR_FMT_IXC000 }, [COMPARE_OP] = { true, INSTR_FMT_IBC }, - [COMPARE_AND_BRANCH] = { true, INSTR_FMT_IBC0 }, - [COMPARE_AND_BRANCH_FLOAT] = { true, INSTR_FMT_IBC0 }, - [COMPARE_AND_BRANCH_INT] = { true, INSTR_FMT_IBC0 }, - [COMPARE_AND_BRANCH_STR] = { true, INSTR_FMT_IBC0 }, + [COMPARE_OP_FLOAT] = { true, INSTR_FMT_IBC }, + [COMPARE_OP_INT] = { true, INSTR_FMT_IBC }, + [COMPARE_OP_STR] = { true, INSTR_FMT_IBC }, [IS_OP] = { true, INSTR_FMT_IB }, [CONTAINS_OP] = { true, INSTR_FMT_IB }, [CHECK_EG_MATCH] = { true, INSTR_FMT_IX }, @@ -841,8 +921,6 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = { [POP_JUMP_IF_TRUE] = { true, INSTR_FMT_IB }, [POP_JUMP_IF_NOT_NONE] = { true, INSTR_FMT_IB }, [POP_JUMP_IF_NONE] = { true, INSTR_FMT_IB }, - [JUMP_IF_FALSE_OR_POP] = { true, INSTR_FMT_IB }, - [JUMP_IF_TRUE_OR_POP] = { true, INSTR_FMT_IB }, [JUMP_BACKWARD_NO_INTERRUPT] = { true, INSTR_FMT_IB }, [GET_LEN] = { true, INSTR_FMT_IX }, [MATCH_CLASS] = { true, INSTR_FMT_IB }, @@ -852,6 +930,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = { [GET_ITER] = { true, INSTR_FMT_IX }, [GET_YIELD_FROM_ITER] = { true, INSTR_FMT_IX }, [FOR_ITER] = { true, INSTR_FMT_IBC }, + [INSTRUMENTED_FOR_ITER] = { true, INSTR_FMT_IB }, [FOR_ITER_LIST] = { true, INSTR_FMT_IBC }, [FOR_ITER_TUPLE] = { true, INSTR_FMT_IBC }, [FOR_ITER_RANGE] = { true, INSTR_FMT_IBC }, @@ -864,24 +943,26 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = { [LOAD_ATTR_METHOD_NO_DICT] = { true, INSTR_FMT_IBC00000000 }, [LOAD_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IBC00000000 }, [KW_NAMES] = { true, INSTR_FMT_IB }, - [CALL] = { true, INSTR_FMT_IBC000 }, - [CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IBC000 }, - [CALL_PY_EXACT_ARGS] = { true, INSTR_FMT_IBC000 }, - [CALL_PY_WITH_DEFAULTS] = { true, INSTR_FMT_IBC000 }, - [CALL_NO_KW_TYPE_1] = { true, INSTR_FMT_IBC000 }, - [CALL_NO_KW_STR_1] = { true, INSTR_FMT_IBC000 }, - [CALL_NO_KW_TUPLE_1] = { true, INSTR_FMT_IBC000 }, - [CALL_BUILTIN_CLASS] = { true, INSTR_FMT_IBC000 }, - [CALL_NO_KW_BUILTIN_O] = { true, INSTR_FMT_IBC000 }, - [CALL_NO_KW_BUILTIN_FAST] = { true, INSTR_FMT_IBC000 }, - [CALL_BUILTIN_FAST_WITH_KEYWORDS] = { true, INSTR_FMT_IBC000 }, - [CALL_NO_KW_LEN] = { true, INSTR_FMT_IBC000 }, - [CALL_NO_KW_ISINSTANCE] = { true, INSTR_FMT_IBC000 }, - [CALL_NO_KW_LIST_APPEND] = { true, INSTR_FMT_IBC000 }, - [CALL_NO_KW_METHOD_DESCRIPTOR_O] = { true, INSTR_FMT_IBC000 }, - [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = { true, INSTR_FMT_IBC000 }, - [CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS] = { true, INSTR_FMT_IBC000 }, - [CALL_NO_KW_METHOD_DESCRIPTOR_FAST] = { true, INSTR_FMT_IBC000 }, + [INSTRUMENTED_CALL] = { true, INSTR_FMT_IB }, + [CALL] = { true, INSTR_FMT_IBC00 }, + [CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IBC00 }, + [CALL_PY_EXACT_ARGS] = { true, INSTR_FMT_IBC00 }, + [CALL_PY_WITH_DEFAULTS] = { true, INSTR_FMT_IBC00 }, + [CALL_NO_KW_TYPE_1] = { true, INSTR_FMT_IBC00 }, + [CALL_NO_KW_STR_1] = { true, INSTR_FMT_IBC00 }, + [CALL_NO_KW_TUPLE_1] = { true, INSTR_FMT_IBC00 }, + [CALL_BUILTIN_CLASS] = { true, INSTR_FMT_IBC00 }, + [CALL_NO_KW_BUILTIN_O] = { true, INSTR_FMT_IBC00 }, + [CALL_NO_KW_BUILTIN_FAST] = { true, INSTR_FMT_IBC00 }, + [CALL_BUILTIN_FAST_WITH_KEYWORDS] = { true, INSTR_FMT_IBC00 }, + [CALL_NO_KW_LEN] = { true, INSTR_FMT_IBC00 }, + [CALL_NO_KW_ISINSTANCE] = { true, INSTR_FMT_IBC00 }, + [CALL_NO_KW_LIST_APPEND] = { true, INSTR_FMT_IBC00 }, + [CALL_NO_KW_METHOD_DESCRIPTOR_O] = { true, INSTR_FMT_IBC00 }, + [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = { true, INSTR_FMT_IBC00 }, + [CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS] = { true, INSTR_FMT_IBC00 }, + [CALL_NO_KW_METHOD_DESCRIPTOR_FAST] = { true, INSTR_FMT_IBC00 }, + [INSTRUMENTED_CALL_FUNCTION_EX] = { true, INSTR_FMT_IX }, [CALL_FUNCTION_EX] = { true, INSTR_FMT_IB }, [MAKE_FUNCTION] = { true, INSTR_FMT_IB }, [RETURN_GENERATOR] = { true, INSTR_FMT_IX }, @@ -890,7 +971,16 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = { [COPY] = { true, INSTR_FMT_IB }, [BINARY_OP] = { true, INSTR_FMT_IBC }, [SWAP] = { true, INSTR_FMT_IB }, + [INSTRUMENTED_LINE] = { true, INSTR_FMT_IX }, + [INSTRUMENTED_INSTRUCTION] = { true, INSTR_FMT_IX }, + [INSTRUMENTED_JUMP_FORWARD] = { true, INSTR_FMT_IB }, + [INSTRUMENTED_JUMP_BACKWARD] = { true, INSTR_FMT_IB }, + [INSTRUMENTED_POP_JUMP_IF_TRUE] = { true, INSTR_FMT_IB }, + [INSTRUMENTED_POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IB }, + [INSTRUMENTED_POP_JUMP_IF_NONE] = { true, INSTR_FMT_IB }, + [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = { true, INSTR_FMT_IB }, [EXTENDED_ARG] = { true, INSTR_FMT_IB }, [CACHE] = { true, INSTR_FMT_IX }, + [RESERVED] = { true, INSTR_FMT_IX }, }; #endif diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h index 62be4ce0fc1cc8..00f15ff98da418 100644 --- a/Python/opcode_targets.h +++ b/Python/opcode_targets.h @@ -4,17 +4,19 @@ static void *opcode_targets[256] = { &&TARGET_PUSH_NULL, &&TARGET_INTERPRETER_EXIT, &&TARGET_END_FOR, + &&TARGET_END_SEND, &&TARGET_BINARY_OP_ADD_FLOAT, &&TARGET_BINARY_OP_ADD_INT, &&TARGET_BINARY_OP_ADD_UNICODE, - &&TARGET_BINARY_OP_INPLACE_ADD_UNICODE, &&TARGET_NOP, - &&TARGET_BINARY_OP_MULTIPLY_FLOAT, + &&TARGET_BINARY_OP_INPLACE_ADD_UNICODE, &&TARGET_UNARY_NEGATIVE, &&TARGET_UNARY_NOT, + &&TARGET_BINARY_OP_MULTIPLY_FLOAT, &&TARGET_BINARY_OP_MULTIPLY_INT, - &&TARGET_BINARY_OP_SUBTRACT_FLOAT, &&TARGET_UNARY_INVERT, + &&TARGET_BINARY_OP_SUBTRACT_FLOAT, + &&TARGET_RESERVED, &&TARGET_BINARY_OP_SUBTRACT_INT, &&TARGET_BINARY_SUBSCR_DICT, &&TARGET_BINARY_SUBSCR_GETITEM, @@ -22,21 +24,21 @@ static void *opcode_targets[256] = { &&TARGET_BINARY_SUBSCR_TUPLE_INT, &&TARGET_CALL_PY_EXACT_ARGS, &&TARGET_CALL_PY_WITH_DEFAULTS, - &&TARGET_CALL_BOUND_METHOD_EXACT_ARGS, - &&TARGET_CALL_BUILTIN_CLASS, &&TARGET_BINARY_SUBSCR, &&TARGET_BINARY_SLICE, &&TARGET_STORE_SLICE, - &&TARGET_CALL_BUILTIN_FAST_WITH_KEYWORDS, - &&TARGET_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, + &&TARGET_CALL_BOUND_METHOD_EXACT_ARGS, + &&TARGET_CALL_BUILTIN_CLASS, &&TARGET_GET_LEN, &&TARGET_MATCH_MAPPING, &&TARGET_MATCH_SEQUENCE, &&TARGET_MATCH_KEYS, - &&TARGET_CALL_NO_KW_BUILTIN_FAST, + &&TARGET_CALL_BUILTIN_FAST_WITH_KEYWORDS, &&TARGET_PUSH_EXC_INFO, &&TARGET_CHECK_EXC_MATCH, &&TARGET_CHECK_EG_MATCH, + &&TARGET_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, + &&TARGET_CALL_NO_KW_BUILTIN_FAST, &&TARGET_CALL_NO_KW_BUILTIN_O, &&TARGET_CALL_NO_KW_ISINSTANCE, &&TARGET_CALL_NO_KW_LEN, @@ -46,8 +48,6 @@ static void *opcode_targets[256] = { &&TARGET_CALL_NO_KW_METHOD_DESCRIPTOR_O, &&TARGET_CALL_NO_KW_STR_1, &&TARGET_CALL_NO_KW_TUPLE_1, - &&TARGET_CALL_NO_KW_TYPE_1, - &&TARGET_COMPARE_AND_BRANCH_FLOAT, &&TARGET_WITH_EXCEPT_START, &&TARGET_GET_AITER, &&TARGET_GET_ANEXT, @@ -55,39 +55,39 @@ static void *opcode_targets[256] = { &&TARGET_BEFORE_WITH, &&TARGET_END_ASYNC_FOR, &&TARGET_CLEANUP_THROW, - &&TARGET_COMPARE_AND_BRANCH_INT, - &&TARGET_COMPARE_AND_BRANCH_STR, - &&TARGET_FOR_ITER_LIST, - &&TARGET_FOR_ITER_TUPLE, + &&TARGET_CALL_NO_KW_TYPE_1, + &&TARGET_COMPARE_OP_FLOAT, + &&TARGET_COMPARE_OP_INT, + &&TARGET_COMPARE_OP_STR, &&TARGET_STORE_SUBSCR, &&TARGET_DELETE_SUBSCR, + &&TARGET_FOR_ITER_LIST, + &&TARGET_FOR_ITER_TUPLE, &&TARGET_FOR_ITER_RANGE, &&TARGET_FOR_ITER_GEN, + &&TARGET_LOAD_SUPER_ATTR_METHOD, &&TARGET_LOAD_ATTR_CLASS, + &&TARGET_GET_ITER, + &&TARGET_GET_YIELD_FROM_ITER, &&TARGET_LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, + &&TARGET_LOAD_BUILD_CLASS, &&TARGET_LOAD_ATTR_INSTANCE_VALUE, &&TARGET_LOAD_ATTR_MODULE, - &&TARGET_GET_ITER, - &&TARGET_GET_YIELD_FROM_ITER, + &&TARGET_LOAD_ASSERTION_ERROR, + &&TARGET_RETURN_GENERATOR, &&TARGET_LOAD_ATTR_PROPERTY, - &&TARGET_LOAD_BUILD_CLASS, &&TARGET_LOAD_ATTR_SLOT, &&TARGET_LOAD_ATTR_WITH_HINT, - &&TARGET_LOAD_ASSERTION_ERROR, - &&TARGET_RETURN_GENERATOR, &&TARGET_LOAD_ATTR_METHOD_LAZY_DICT, &&TARGET_LOAD_ATTR_METHOD_NO_DICT, &&TARGET_LOAD_ATTR_METHOD_WITH_VALUES, &&TARGET_LOAD_CONST__LOAD_FAST, + &&TARGET_RETURN_VALUE, &&TARGET_LOAD_FAST__LOAD_CONST, + &&TARGET_SETUP_ANNOTATIONS, &&TARGET_LOAD_FAST__LOAD_FAST, &&TARGET_LOAD_GLOBAL_BUILTIN, - &&TARGET_RETURN_VALUE, &&TARGET_LOAD_GLOBAL_MODULE, - &&TARGET_SETUP_ANNOTATIONS, - &&TARGET_STORE_ATTR_INSTANCE_VALUE, - &&TARGET_STORE_ATTR_SLOT, - &&TARGET_STORE_ATTR_WITH_HINT, &&TARGET_POP_EXCEPT, &&TARGET_STORE_NAME, &&TARGET_DELETE_NAME, @@ -110,9 +110,9 @@ static void *opcode_targets[256] = { &&TARGET_IMPORT_NAME, &&TARGET_IMPORT_FROM, &&TARGET_JUMP_FORWARD, - &&TARGET_JUMP_IF_FALSE_OR_POP, - &&TARGET_JUMP_IF_TRUE_OR_POP, - &&TARGET_STORE_FAST__LOAD_FAST, + &&TARGET_STORE_ATTR_INSTANCE_VALUE, + &&TARGET_STORE_ATTR_SLOT, + &&TARGET_STORE_ATTR_WITH_HINT, &&TARGET_POP_JUMP_IF_FALSE, &&TARGET_POP_JUMP_IF_TRUE, &&TARGET_LOAD_GLOBAL, @@ -140,7 +140,7 @@ static void *opcode_targets[256] = { &&TARGET_STORE_DEREF, &&TARGET_DELETE_DEREF, &&TARGET_JUMP_BACKWARD, - &&TARGET_COMPARE_AND_BRANCH, + &&TARGET_LOAD_SUPER_ATTR, &&TARGET_CALL_FUNCTION_EX, &&TARGET_LOAD_FAST_AND_CLEAR, &&TARGET_EXTENDED_ARG, @@ -152,24 +152,24 @@ static void *opcode_targets[256] = { &&TARGET_YIELD_VALUE, &&TARGET_RESUME, &&TARGET_MATCH_CLASS, + &&TARGET_STORE_FAST__LOAD_FAST, &&TARGET_STORE_FAST__STORE_FAST, - &&TARGET_STORE_SUBSCR_DICT, &&TARGET_FORMAT_VALUE, &&TARGET_BUILD_CONST_KEY_MAP, &&TARGET_BUILD_STRING, + &&TARGET_STORE_SUBSCR_DICT, &&TARGET_STORE_SUBSCR_LIST_INT, &&TARGET_UNPACK_SEQUENCE_LIST, &&TARGET_UNPACK_SEQUENCE_TUPLE, - &&TARGET_UNPACK_SEQUENCE_TWO_TUPLE, &&TARGET_LIST_EXTEND, &&TARGET_SET_UPDATE, &&TARGET_DICT_MERGE, &&TARGET_DICT_UPDATE, + &&TARGET_UNPACK_SEQUENCE_TWO_TUPLE, &&TARGET_SEND_GEN, &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, - &&_unknown_opcode, &&TARGET_CALL, &&TARGET_KW_NAMES, &&TARGET_CALL_INTRINSIC_1, @@ -237,22 +237,22 @@ static void *opcode_targets[256] = { &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, - &&TARGET_DO_TRACING + &&TARGET_INSTRUMENTED_POP_JUMP_IF_NONE, + &&TARGET_INSTRUMENTED_POP_JUMP_IF_NOT_NONE, + &&TARGET_INSTRUMENTED_RESUME, + &&TARGET_INSTRUMENTED_CALL, + &&TARGET_INSTRUMENTED_RETURN_VALUE, + &&TARGET_INSTRUMENTED_YIELD_VALUE, + &&TARGET_INSTRUMENTED_CALL_FUNCTION_EX, + &&TARGET_INSTRUMENTED_JUMP_FORWARD, + &&TARGET_INSTRUMENTED_JUMP_BACKWARD, + &&TARGET_INSTRUMENTED_RETURN_CONST, + &&TARGET_INSTRUMENTED_FOR_ITER, + &&TARGET_INSTRUMENTED_POP_JUMP_IF_FALSE, + &&TARGET_INSTRUMENTED_POP_JUMP_IF_TRUE, + &&TARGET_INSTRUMENTED_END_FOR, + &&TARGET_INSTRUMENTED_END_SEND, + &&TARGET_INSTRUMENTED_INSTRUCTION, + &&TARGET_INSTRUMENTED_LINE, + &&_unknown_opcode }; diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index d0c65cc1f7fd44..ba248d208e425a 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -2,7 +2,6 @@ #include "Python.h" -#include "pycore_bytesobject.h" // _PyBytes_InitTypes() #include "pycore_ceval.h" // _PyEval_FiniGIL() #include "pycore_context.h" // _PyContext_Init() #include "pycore_exceptions.h" // _PyExc_InitTypes() @@ -26,12 +25,12 @@ #include "pycore_sliceobject.h" // _PySlice_Fini() #include "pycore_sysmodule.h" // _PySys_ClearAuditHooks() #include "pycore_traceback.h" // _Py_DumpTracebackThreads() -#include "pycore_tuple.h" // _PyTuple_InitTypes() #include "pycore_typeobject.h" // _PyTypes_InitTypes() #include "pycore_unicodeobject.h" // _PyUnicode_InitTypes() #include "opcode.h" -extern void _PyIO_Fini(void); +extern PyStatus _PyIO_InitTypes(PyInterpreterState *interp); +extern void _PyIO_FiniTypes(PyInterpreterState *interp); #include <locale.h> // setlocale() #include <stdlib.h> // getenv() @@ -546,11 +545,21 @@ pycore_init_runtime(_PyRuntimeState *runtime, } -static void +static PyStatus init_interp_settings(PyInterpreterState *interp, const _PyInterpreterConfig *config) { assert(interp->feature_flags == 0); + if (config->use_main_obmalloc) { + interp->feature_flags |= Py_RTFLAGS_USE_MAIN_OBMALLOC; + } + else if (!config->check_multi_interp_extensions) { + /* The reason: PyModuleDef.m_base.m_copy leaks objects between + interpreters. */ + return _PyStatus_ERR("per-interpreter obmalloc does not support " + "single-phase init extension modules"); + } + if (config->allow_fork) { interp->feature_flags |= Py_RTFLAGS_FORK; } @@ -569,6 +578,8 @@ init_interp_settings(PyInterpreterState *interp, const _PyInterpreterConfig *con if (config->check_multi_interp_extensions) { interp->feature_flags |= Py_RTFLAGS_MULTI_INTERP_EXTENSIONS; } + + return _PyStatus_OK(); } @@ -621,7 +632,10 @@ pycore_create_interpreter(_PyRuntimeState *runtime, } const _PyInterpreterConfig config = _PyInterpreterConfig_LEGACY_INIT; - init_interp_settings(interp, &config); + status = init_interp_settings(interp, &config); + if (_PyStatus_EXCEPTION(status)) { + return status; + } PyThreadState *tstate = _PyThreadState_New(interp); if (tstate == NULL) { @@ -668,11 +682,6 @@ pycore_init_types(PyInterpreterState *interp) return status; } - status = _PyBytes_InitTypes(interp); - if (_PyStatus_EXCEPTION(status)) { - return status; - } - status = _PyLong_InitTypes(interp); if (_PyStatus_EXCEPTION(status)) { return status; @@ -688,15 +697,15 @@ pycore_init_types(PyInterpreterState *interp) return status; } - status = _PyTuple_InitTypes(interp); - if (_PyStatus_EXCEPTION(status)) { - return status; - } - if (_PyExc_InitTypes(interp) < 0) { return _PyStatus_ERR("failed to initialize an exception type"); } + status = _PyIO_InitTypes(interp); + if (_PyStatus_EXCEPTION(status)) { + return status; + } + status = _PyExc_InitGlobalObjects(interp); if (_PyStatus_EXCEPTION(status)) { return status; @@ -1304,7 +1313,7 @@ finalize_modules_delete_special(PyThreadState *tstate, int verbose) { // List of names to clear in sys static const char * const sys_deletes[] = { - "path", "argv", "ps1", "ps2", + "path", "argv", "ps1", "ps2", "last_exc", "last_type", "last_value", "last_traceback", "__interactivehook__", // path_hooks and path_importer_cache are cleared @@ -1662,6 +1671,8 @@ finalize_interp_types(PyInterpreterState *interp) _PyFloat_FiniType(interp); _PyLong_FiniTypes(interp); _PyThread_FiniType(interp); + // XXX fini collections module static types (_PyStaticType_Dealloc()) + // XXX fini IO module static types (_PyStaticType_Dealloc()) _PyErr_FiniTypes(interp); _PyTypes_FiniTypes(interp); @@ -1695,9 +1706,7 @@ finalize_interp_clear(PyThreadState *tstate) /* Clear interpreter state and all thread states */ _PyInterpreterState_Clear(tstate); - if (is_main_interp) { - _PyIO_Fini(); - } + _PyIO_FiniTypes(tstate->interp); /* Clear all loghooks */ /* Both _PySys_Audit function and users still need PyObject, such as tuple. @@ -1930,7 +1939,9 @@ Py_FinalizeEx(void) if (show_ref_count) { _PyDebug_PrintTotalRefs(); } + _Py_FinalizeRefTotal(runtime); #endif + _Py_FinalizeAllocatedBlocks(runtime); #ifdef Py_TRACE_REFS /* Display addresses (& refcnts) of all objects still alive. @@ -2031,7 +2042,10 @@ new_interpreter(PyThreadState **tstate_p, const _PyInterpreterConfig *config) goto error; } - init_interp_settings(interp, config); + status = init_interp_settings(interp, config); + if (_PyStatus_EXCEPTION(status)) { + goto error; + } status = init_interp_create_gil(tstate); if (_PyStatus_EXCEPTION(status)) { @@ -2056,30 +2070,31 @@ new_interpreter(PyThreadState **tstate_p, const _PyInterpreterConfig *config) /* Oops, it didn't work. Undo it all. */ PyErr_PrintEx(0); + PyThreadState_Swap(save_tstate); PyThreadState_Clear(tstate); PyThreadState_Delete(tstate); PyInterpreterState_Delete(interp); - PyThreadState_Swap(save_tstate); return status; } -PyThreadState * -_Py_NewInterpreterFromConfig(const _PyInterpreterConfig *config) +PyStatus +_Py_NewInterpreterFromConfig(PyThreadState **tstate_p, + const _PyInterpreterConfig *config) { - PyThreadState *tstate = NULL; - PyStatus status = new_interpreter(&tstate, config); - if (_PyStatus_EXCEPTION(status)) { - Py_ExitStatusException(status); - } - return tstate; + return new_interpreter(tstate_p, config); } PyThreadState * Py_NewInterpreter(void) { + PyThreadState *tstate = NULL; const _PyInterpreterConfig config = _PyInterpreterConfig_LEGACY_INIT; - return _Py_NewInterpreterFromConfig(&config); + PyStatus status = _Py_NewInterpreterFromConfig(&tstate, &config); + if (_PyStatus_EXCEPTION(status)) { + Py_ExitStatusException(status); + } + return tstate; } /* Delete an interpreter and its last thread. This requires that the @@ -2158,10 +2173,9 @@ add_main_module(PyInterpreterState *interp) Py_DECREF(bimod); } - /* Main is a little special - imp.is_builtin("__main__") will return - * False, but BuiltinImporter is still the most appropriate initial - * setting for its __loader__ attribute. A more suitable value will - * be set if __main__ gets further initialized later in the startup + /* Main is a little special - BuiltinImporter is the most appropriate + * initial setting for its __loader__ attribute. A more suitable value + * will be set if __main__ gets further initialized later in the startup * process. */ loader = _PyDict_GetItemStringWithError(d, "__loader__"); @@ -2547,6 +2561,7 @@ _Py_FatalError_PrintExc(PyThreadState *tstate) if (ferr == NULL || ferr == Py_None) { /* sys.stderr is not set yet or set to None, no need to try to display the exception */ + Py_DECREF(exc); return 0; } @@ -2555,7 +2570,7 @@ _Py_FatalError_PrintExc(PyThreadState *tstate) PyObject *tb = PyException_GetTraceback(exc); int has_tb = (tb != NULL) && (tb != Py_None); Py_XDECREF(tb); - Py_XDECREF(exc); + Py_DECREF(exc); /* sys.stderr may be buffered: call sys.stderr.flush() */ PyObject *res = PyObject_CallMethodNoArgs(ferr, &_Py_ID(flush)); @@ -2925,23 +2940,23 @@ wait_for_thread_shutdown(PyThreadState *tstate) Py_DECREF(threading); } -#define NEXITFUNCS 32 int Py_AtExit(void (*func)(void)) { - if (_PyRuntime.nexitfuncs >= NEXITFUNCS) + if (_PyRuntime.atexit.ncallbacks >= NEXITFUNCS) return -1; - _PyRuntime.exitfuncs[_PyRuntime.nexitfuncs++] = func; + _PyRuntime.atexit.callbacks[_PyRuntime.atexit.ncallbacks++] = func; return 0; } static void call_ll_exitfuncs(_PyRuntimeState *runtime) { - while (runtime->nexitfuncs > 0) { + struct _atexit_runtime_state *state = &runtime->atexit; + while (state->ncallbacks > 0) { /* pop last function from the list */ - runtime->nexitfuncs--; - void (*exitfunc)(void) = runtime->exitfuncs[runtime->nexitfuncs]; - runtime->exitfuncs[runtime->nexitfuncs] = NULL; + state->ncallbacks--; + atexit_callbackfunc exitfunc = state->callbacks[state->ncallbacks]; + state->callbacks[state->ncallbacks] = NULL; exitfunc(); } diff --git a/Python/pystate.c b/Python/pystate.c index 3a2966c54a4c3b..f103a059f0f369 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -60,23 +60,43 @@ extern "C" { For each of these functions, the GIL must be held by the current thread. */ + +#ifdef HAVE_THREAD_LOCAL +_Py_thread_local PyThreadState *_Py_tss_tstate = NULL; +#endif + static inline PyThreadState * -current_fast_get(_PyRuntimeState *runtime) +current_fast_get(_PyRuntimeState *Py_UNUSED(runtime)) { - return (PyThreadState*)_Py_atomic_load_relaxed(&runtime->tstate_current); +#ifdef HAVE_THREAD_LOCAL + return _Py_tss_tstate; +#else + // XXX Fall back to the PyThread_tss_*() API. +# error "no supported thread-local variable storage classifier" +#endif } static inline void -current_fast_set(_PyRuntimeState *runtime, PyThreadState *tstate) +current_fast_set(_PyRuntimeState *Py_UNUSED(runtime), PyThreadState *tstate) { assert(tstate != NULL); - _Py_atomic_store_relaxed(&runtime->tstate_current, (uintptr_t)tstate); +#ifdef HAVE_THREAD_LOCAL + _Py_tss_tstate = tstate; +#else + // XXX Fall back to the PyThread_tss_*() API. +# error "no supported thread-local variable storage classifier" +#endif } static inline void -current_fast_clear(_PyRuntimeState *runtime) +current_fast_clear(_PyRuntimeState *Py_UNUSED(runtime)) { - _Py_atomic_store_relaxed(&runtime->tstate_current, (uintptr_t)NULL); +#ifdef HAVE_THREAD_LOCAL + _Py_tss_tstate = NULL; +#else + // XXX Fall back to the PyThread_tss_*() API. +# error "no supported thread-local variable storage classifier" +#endif } #define tstate_verify_not_active(tstate) \ @@ -84,6 +104,12 @@ current_fast_clear(_PyRuntimeState *runtime) _Py_FatalErrorFormat(__func__, "tstate %p is still current", tstate); \ } +PyThreadState * +_PyThreadState_GetCurrent(void) +{ + return current_fast_get(&_PyRuntime); +} + //------------------------------------------------ // the thread state bound to the current OS thread @@ -354,47 +380,29 @@ _Py_COMP_DIAG_IGNORE_DEPR_DECLS static const _PyRuntimeState initial = _PyRuntimeState_INIT(_PyRuntime); _Py_COMP_DIAG_POP +#define NUMLOCKS 5 + static int -alloc_for_runtime(PyThread_type_lock *plock1, PyThread_type_lock *plock2, - PyThread_type_lock *plock3, PyThread_type_lock *plock4) +alloc_for_runtime(PyThread_type_lock locks[NUMLOCKS]) { /* Force default allocator, since _PyRuntimeState_Fini() must use the same allocator than this function. */ PyMemAllocatorEx old_alloc; _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); - PyThread_type_lock lock1 = PyThread_allocate_lock(); - if (lock1 == NULL) { - return -1; - } - - PyThread_type_lock lock2 = PyThread_allocate_lock(); - if (lock2 == NULL) { - PyThread_free_lock(lock1); - return -1; - } - - PyThread_type_lock lock3 = PyThread_allocate_lock(); - if (lock3 == NULL) { - PyThread_free_lock(lock1); - PyThread_free_lock(lock2); - return -1; - } - - PyThread_type_lock lock4 = PyThread_allocate_lock(); - if (lock4 == NULL) { - PyThread_free_lock(lock1); - PyThread_free_lock(lock2); - PyThread_free_lock(lock3); - return -1; + for (int i = 0; i < NUMLOCKS; i++) { + PyThread_type_lock lock = PyThread_allocate_lock(); + if (lock == NULL) { + for (int j = 0; j < i; j++) { + PyThread_free_lock(locks[j]); + locks[j] = NULL; + } + break; + } + locks[i] = lock; } PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); - - *plock1 = lock1; - *plock2 = lock2; - *plock3 = lock3; - *plock4 = lock4; return 0; } @@ -403,10 +411,7 @@ init_runtime(_PyRuntimeState *runtime, void *open_code_hook, void *open_code_userdata, _Py_AuditHookEntry *audit_hook_head, Py_ssize_t unicode_next_index, - PyThread_type_lock unicode_ids_mutex, - PyThread_type_lock interpreters_mutex, - PyThread_type_lock xidregistry_mutex, - PyThread_type_lock getargs_mutex) + PyThread_type_lock locks[NUMLOCKS]) { if (runtime->_initialized) { Py_FatalError("runtime already initialized"); @@ -424,17 +429,22 @@ init_runtime(_PyRuntimeState *runtime, PyPreConfig_InitPythonConfig(&runtime->preconfig); - runtime->interpreters.mutex = interpreters_mutex; - - runtime->xidregistry.mutex = xidregistry_mutex; - - runtime->getargs.mutex = getargs_mutex; + PyThread_type_lock *lockptrs[NUMLOCKS] = { + &runtime->interpreters.mutex, + &runtime->xidregistry.mutex, + &runtime->getargs.mutex, + &runtime->unicode_state.ids.lock, + &runtime->imports.extensions.mutex, + }; + for (int i = 0; i < NUMLOCKS; i++) { + assert(locks[i] != NULL); + *lockptrs[i] = locks[i]; + } // Set it to the ID of the main thread of the main interpreter. runtime->main_thread = PyThread_get_thread_ident(); runtime->unicode_state.ids.next_index = unicode_next_index; - runtime->unicode_state.ids.lock = unicode_ids_mutex; runtime->_initialized = 1; } @@ -452,8 +462,8 @@ _PyRuntimeState_Init(_PyRuntimeState *runtime) // is called multiple times. Py_ssize_t unicode_next_index = runtime->unicode_state.ids.next_index; - PyThread_type_lock lock1, lock2, lock3, lock4; - if (alloc_for_runtime(&lock1, &lock2, &lock3, &lock4) != 0) { + PyThread_type_lock locks[NUMLOCKS]; + if (alloc_for_runtime(locks) != 0) { return _PyStatus_NO_MEMORY(); } @@ -474,7 +484,7 @@ _PyRuntimeState_Init(_PyRuntimeState *runtime) } init_runtime(runtime, open_code_hook, open_code_userdata, audit_hook_head, - unicode_next_index, lock1, lock2, lock3, lock4); + unicode_next_index, locks); return _PyStatus_OK(); } @@ -482,6 +492,11 @@ _PyRuntimeState_Init(_PyRuntimeState *runtime) void _PyRuntimeState_Fini(_PyRuntimeState *runtime) { +#ifdef Py_REF_DEBUG + /* The count is cleared by _Py_FinalizeRefTotal(). */ + assert(runtime->object_state.interpreter_leaks == 0); +#endif + if (gilstate_tss_initialized(runtime)) { gilstate_tss_fini(runtime); } @@ -499,10 +514,16 @@ _PyRuntimeState_Fini(_PyRuntimeState *runtime) LOCK = NULL; \ } - FREE_LOCK(runtime->interpreters.mutex); - FREE_LOCK(runtime->xidregistry.mutex); - FREE_LOCK(runtime->unicode_state.ids.lock); - FREE_LOCK(runtime->getargs.mutex); + PyThread_type_lock *lockptrs[NUMLOCKS] = { + &runtime->interpreters.mutex, + &runtime->xidregistry.mutex, + &runtime->getargs.mutex, + &runtime->unicode_state.ids.lock, + &runtime->imports.extensions.mutex, + }; + for (int i = 0; i < NUMLOCKS; i++) { + FREE_LOCK(*lockptrs[i]); + } #undef FREE_LOCK PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); @@ -522,25 +543,26 @@ _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime) PyMemAllocatorEx old_alloc; _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); - int reinit_interp = _PyThread_at_fork_reinit(&runtime->interpreters.mutex); - int reinit_xidregistry = _PyThread_at_fork_reinit(&runtime->xidregistry.mutex); - int reinit_unicode_ids = _PyThread_at_fork_reinit(&runtime->unicode_state.ids.lock); - int reinit_getargs = _PyThread_at_fork_reinit(&runtime->getargs.mutex); + PyThread_type_lock *lockptrs[NUMLOCKS] = { + &runtime->interpreters.mutex, + &runtime->xidregistry.mutex, + &runtime->getargs.mutex, + &runtime->unicode_state.ids.lock, + &runtime->imports.extensions.mutex, + }; + int reinit_err = 0; + for (int i = 0; i < NUMLOCKS; i++) { + reinit_err += _PyThread_at_fork_reinit(lockptrs[i]); + } PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); /* bpo-42540: id_mutex is freed by _PyInterpreterState_Delete, which does * not force the default allocator. */ - int reinit_main_id = _PyThread_at_fork_reinit(&runtime->interpreters.main->id_mutex); + reinit_err += _PyThread_at_fork_reinit(&runtime->interpreters.main->id_mutex); - if (reinit_interp < 0 - || reinit_main_id < 0 - || reinit_xidregistry < 0 - || reinit_unicode_ids < 0 - || reinit_getargs < 0) - { + if (reinit_err < 0) { return _PyStatus_ERR("Failed to reinitialize runtime locks"); - } PyStatus status = gilstate_tss_reinit(runtime); @@ -632,7 +654,6 @@ free_interpreter(PyInterpreterState *interp) main interpreter. We fix those fields here, in addition to the other dynamically initialized fields. */ - static void init_interpreter(PyInterpreterState *interp, _PyRuntimeState *runtime, int64_t id, @@ -653,16 +674,34 @@ init_interpreter(PyInterpreterState *interp, assert(next != NULL || (interp == runtime->interpreters.main)); interp->next = next; + /* Initialize obmalloc, but only for subinterpreters, + since the main interpreter is initialized statically. */ + if (interp != &runtime->_main_interpreter) { + poolp temp[OBMALLOC_USED_POOLS_SIZE] = \ + _obmalloc_pools_INIT(interp->obmalloc.pools); + memcpy(&interp->obmalloc.pools.used, temp, sizeof(temp)); + } + _PyEval_InitState(&interp->ceval, pending_lock); _PyGC_InitState(&interp->gc); PyConfig_InitPythonConfig(&interp->config); _PyType_InitCache(interp); + for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + interp->monitors.tools[i] = 0; + } + for (int t = 0; t < PY_MONITORING_TOOL_IDS; t++) { + for (int e = 0; e < PY_MONITORING_EVENTS; e++) { + interp->monitoring_callables[t][e] = NULL; + } + } + interp->sys_profile_initialized = false; + interp->sys_trace_initialized = false; if (interp != &runtime->_main_interpreter) { /* Fix the self-referential, statically initialized fields. */ interp->dtoa = (struct _dtoa_state)_dtoa_state_INIT(interp); } - + interp->f_opcode_trace_set = false; interp->_initialized = 1; } @@ -795,6 +834,20 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate) Py_CLEAR(interp->audit_hooks); + for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + interp->monitors.tools[i] = 0; + } + for (int t = 0; t < PY_MONITORING_TOOL_IDS; t++) { + for (int e = 0; e < PY_MONITORING_EVENTS; e++) { + Py_CLEAR(interp->monitoring_callables[t][e]); + } + } + interp->sys_profile_initialized = false; + interp->sys_trace_initialized = false; + for (int t = 0; t < PY_MONITORING_TOOL_IDS; t++) { + Py_CLEAR(interp->monitoring_tool_names[t]); + } + PyConfig_Clear(&interp->config); Py_CLEAR(interp->codec_search_path); Py_CLEAR(interp->codec_search_cache); @@ -852,7 +905,7 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate) interp->code_watchers[i] = NULL; } interp->active_code_watchers = 0; - + interp->f_opcode_trace_set = false; // XXX Once we have one allocator per interpreter (i.e. // per-interpreter GC) we must ensure that all of the interpreter's // objects have been cleaned up at the point. @@ -899,6 +952,13 @@ PyInterpreterState_Delete(PyInterpreterState *interp) _PyEval_FiniState(&interp->ceval); + // XXX These two calls should be done at the end of clear_interpreter(), + // but currently some objects get decref'ed after that. +#ifdef Py_REF_DEBUG + _PyInterpreterState_FinalizeRefTotal(interp); +#endif + _PyInterpreterState_FinalizeAllocatedBlocks(interp); + HEAD_LOCK(runtime); PyInterpreterState **p; for (p = &interpreters->head; ; p = &(*p)->next) { @@ -1206,8 +1266,7 @@ free_threadstate(PyThreadState *tstate) static void init_threadstate(PyThreadState *tstate, - PyInterpreterState *interp, uint64_t id, - PyThreadState *next) + PyInterpreterState *interp, uint64_t id) { if (tstate->_status.initialized) { Py_FatalError("thread state already initialized"); @@ -1216,18 +1275,13 @@ init_threadstate(PyThreadState *tstate, assert(interp != NULL); tstate->interp = interp; + // next/prev are set in add_threadstate(). + assert(tstate->next == NULL); + assert(tstate->prev == NULL); + assert(id > 0); tstate->id = id; - assert(interp->threads.head == tstate); - assert((next != NULL && id != 1) || (next == NULL && id == 1)); - if (next != NULL) { - assert(next->prev == NULL || next->prev == tstate); - next->prev = tstate; - } - tstate->next = next; - assert(tstate->prev == NULL); - // thread_id and native_thread_id are set in bind_tstate(). tstate->py_recursion_limit = interp->ceval.recursion_limit, @@ -1244,10 +1298,27 @@ init_threadstate(PyThreadState *tstate, tstate->datastack_chunk = NULL; tstate->datastack_top = NULL; tstate->datastack_limit = NULL; + tstate->what_event = -1; tstate->_status.initialized = 1; } +static void +add_threadstate(PyInterpreterState *interp, PyThreadState *tstate, + PyThreadState *next) +{ + assert(interp->threads.head != tstate); + assert((next != NULL && tstate->id != 1) || + (next == NULL && tstate->id == 1)); + if (next != NULL) { + assert(next->prev == NULL || next->prev == tstate); + next->prev = tstate; + } + tstate->next = next; + assert(tstate->prev == NULL); + interp->threads.head = tstate; +} + static PyThreadState * new_threadstate(PyInterpreterState *interp) { @@ -1287,9 +1358,9 @@ new_threadstate(PyInterpreterState *interp) &initial._main_interpreter._initial_thread, sizeof(*tstate)); } - interp->threads.head = tstate; - init_threadstate(tstate, interp, id, old_head); + init_threadstate(tstate, interp, id); + add_threadstate(interp, tstate, old_head); HEAD_UNLOCK(runtime); if (!used_newtstate) { @@ -1336,6 +1407,19 @@ _PyThreadState_Init(PyThreadState *tstate) Py_FatalError("_PyThreadState_Init() is for internal use only"); } + +static void +clear_datastack(PyThreadState *tstate) +{ + _PyStackChunk *chunk = tstate->datastack_chunk; + tstate->datastack_chunk = NULL; + while (chunk != NULL) { + _PyStackChunk *prev = chunk->previous; + _PyObject_VirtualFree(chunk, chunk->size); + chunk = prev; + } +} + void PyThreadState_Clear(PyThreadState *tstate) { @@ -1390,8 +1474,14 @@ PyThreadState_Clear(PyThreadState *tstate) "PyThreadState_Clear: warning: thread still has a generator\n"); } - tstate->c_profilefunc = NULL; - tstate->c_tracefunc = NULL; + if (tstate->c_profilefunc != NULL) { + tstate->interp->sys_profiling_threads--; + tstate->c_profilefunc = NULL; + } + if (tstate->c_tracefunc != NULL) { + tstate->interp->sys_tracing_threads--; + tstate->c_tracefunc = NULL; + } Py_CLEAR(tstate->c_profileobj); Py_CLEAR(tstate->c_traceobj); @@ -1410,7 +1500,6 @@ PyThreadState_Clear(PyThreadState *tstate) // XXX Do it as early in the function as possible. } - /* Common code for PyThreadState_Delete() and PyThreadState_DeleteCurrent() */ static void tstate_delete_common(PyThreadState *tstate) @@ -1443,18 +1532,11 @@ tstate_delete_common(PyThreadState *tstate) unbind_tstate(tstate); // XXX Move to PyThreadState_Clear()? - _PyStackChunk *chunk = tstate->datastack_chunk; - tstate->datastack_chunk = NULL; - while (chunk != NULL) { - _PyStackChunk *prev = chunk->previous; - _PyObject_VirtualFree(chunk, chunk->size); - chunk = prev; - } + clear_datastack(tstate); tstate->_status.finalized = 1; } - static void zapthreads(PyInterpreterState *interp) { @@ -1541,6 +1623,75 @@ _PyThreadState_DeleteExcept(PyThreadState *tstate) } +//------------------------- +// "detached" thread states +//------------------------- + +void +_PyThreadState_InitDetached(PyThreadState *tstate, PyInterpreterState *interp) +{ + _PyRuntimeState *runtime = interp->runtime; + + HEAD_LOCK(runtime); + interp->threads.next_unique_id += 1; + uint64_t id = interp->threads.next_unique_id; + HEAD_UNLOCK(runtime); + + init_threadstate(tstate, interp, id); + // We do not call add_threadstate(). +} + +void +_PyThreadState_ClearDetached(PyThreadState *tstate) +{ + assert(!tstate->_status.bound); + assert(!tstate->_status.bound_gilstate); + assert(tstate->datastack_chunk == NULL); + assert(tstate->thread_id == 0); + assert(tstate->native_thread_id == 0); + assert(tstate->next == NULL); + assert(tstate->prev == NULL); + + PyThreadState_Clear(tstate); + clear_datastack(tstate); +} + +void +_PyThreadState_BindDetached(PyThreadState *tstate) +{ + assert(!_Py_IsMainInterpreter( + current_fast_get(tstate->interp->runtime)->interp)); + assert(_Py_IsMainInterpreter(tstate->interp)); + bind_tstate(tstate); + /* Unlike _PyThreadState_Bind(), we do not modify gilstate TSS. */ +} + +void +_PyThreadState_UnbindDetached(PyThreadState *tstate) +{ + assert(!_Py_IsMainInterpreter( + current_fast_get(tstate->interp->runtime)->interp)); + assert(_Py_IsMainInterpreter(tstate->interp)); + assert(tstate_is_alive(tstate)); + assert(!tstate->_status.active); + assert(gilstate_tss_get(tstate->interp->runtime) != tstate); + + unbind_tstate(tstate); + + /* This thread state may be bound/unbound repeatedly, + so we must erase evidence that it was ever bound (or unbound). */ + tstate->_status.bound = 0; + tstate->_status.unbound = 0; + + /* We must fully unlink the thread state from any OS thread, + to allow it to be bound more than once. */ + tstate->thread_id = 0; +#ifdef PY_HAVE_THREAD_NATIVE_ID + tstate->native_thread_id = 0; +#endif +} + + //---------- // accessors //---------- @@ -1903,14 +2054,13 @@ _PyThread_CurrentExceptions(void) if (id == NULL) { goto fail; } - PyObject *exc_info = _PyErr_StackItemToExcInfoTuple(err_info); - if (exc_info == NULL) { - Py_DECREF(id); - goto fail; - } - int stat = PyDict_SetItem(result, id, exc_info); + PyObject *exc = err_info->exc_value; + assert(exc == NULL || + exc == Py_None || + PyExceptionInstance_Check(exc)); + + int stat = PyDict_SetItem(result, id, exc == NULL ? Py_None : exc); Py_DECREF(id); - Py_DECREF(exc_info); if (stat < 0) { goto fail; } @@ -2182,11 +2332,11 @@ _PyCrossInterpreterData_InitWithSize(_PyCrossInterpreterData *data, // where it was allocated, so the interpreter is required. assert(interp != NULL); _PyCrossInterpreterData_Init(data, interp, NULL, obj, new_object); - data->data = PyMem_Malloc(size); + data->data = PyMem_RawMalloc(size); if (data->data == NULL) { return -1; } - data->free = PyMem_Free; + data->free = PyMem_RawFree; return 0; } diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 07d119a67847c6..05e7b4370869af 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -18,7 +18,7 @@ #include "pycore_interp.h" // PyInterpreterState.importlib #include "pycore_object.h" // _PyDebug_PrintTotalRefs() #include "pycore_parser.h" // _PyParser_ASTFromString() -#include "pycore_pyerrors.h" // _PyErr_Fetch, _Py_Offer_Suggestions +#include "pycore_pyerrors.h" // _PyErr_GetRaisedException, _Py_Offer_Suggestions #include "pycore_pylifecycle.h" // _Py_UnhandledKeyboardInterrupt #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_sysmodule.h" // _PySys_Audit() @@ -776,6 +776,10 @@ _PyErr_PrintEx(PyThreadState *tstate, int set_sys_last_vars) } if (set_sys_last_vars) { + if (_PySys_SetAttr(&_Py_ID(last_exc), exc) < 0) { + _PyErr_Clear(tstate); + } + /* Legacy version: */ if (_PySys_SetAttr(&_Py_ID(last_type), typ) < 0) { _PyErr_Clear(tstate); } @@ -1103,7 +1107,7 @@ print_exception_notes(struct exception_print_context *ctx, PyObject *value) if (notes == NULL) { return -1; } - if (!PySequence_Check(notes)) { + if (!PySequence_Check(notes) || PyUnicode_Check(notes) || PyBytes_Check(notes)) { int res = 0; if (write_indented_margin(ctx, f) < 0) { res = -1; @@ -1118,6 +1122,9 @@ print_exception_notes(struct exception_print_context *ctx, PyObject *value) Py_DECREF(s); } Py_DECREF(notes); + if (PyFile_WriteString("\n", f) < 0) { + res = -1; + } return res; } Py_ssize_t num_notes = PySequence_Length(notes); diff --git a/Python/specialize.c b/Python/specialize.c index 719bd5bda329ff..b1cc66124cfa4a 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -96,6 +96,7 @@ _Py_GetSpecializationStats(void) { return NULL; } int err = 0; + err += add_stat_dict(stats, LOAD_SUPER_ATTR, "load_super_attr"); err += add_stat_dict(stats, LOAD_ATTR, "load_attr"); err += add_stat_dict(stats, LOAD_GLOBAL, "load_global"); err += add_stat_dict(stats, BINARY_SUBSCR, "binary_subscr"); @@ -147,7 +148,7 @@ print_spec_stats(FILE *out, OpcodeStats *stats) PRIu64 "\n", i, j, val); } } - for(int j = 0; j < 256; j++) { + for (int j = 0; j < 256; j++) { if (stats[i].pair_count[j]) { fprintf(out, "opcode[%d].pair_count[%d] : %" PRIu64 "\n", i, j, stats[i].pair_count[j]); @@ -264,15 +265,6 @@ do { \ #define SPECIALIZATION_FAIL(opcode, kind) ((void)0) #endif -static int compare_masks[] = { - [Py_LT] = COMPARISON_LESS_THAN, - [Py_LE] = COMPARISON_LESS_THAN | COMPARISON_EQUALS, - [Py_EQ] = COMPARISON_EQUALS, - [Py_NE] = COMPARISON_NOT_EQUALS, - [Py_GT] = COMPARISON_GREATER_THAN, - [Py_GE] = COMPARISON_GREATER_THAN | COMPARISON_EQUALS, -}; - // Initialize warmup counters and insert superinstructions. This cannot fail. void _PyCode_Quicken(PyCodeObject *code) @@ -282,7 +274,8 @@ _PyCode_Quicken(PyCodeObject *code) _Py_CODEUNIT *instructions = _PyCode_CODE(code); for (int i = 0; i < Py_SIZE(code); i++) { int previous_opcode = opcode; - opcode = _PyOpcode_Deopt[instructions[i].op.code]; + opcode = _Py_GetBaseOpcode(code, i); + assert(opcode < MIN_INSTRUMENTED_OPCODE); int caches = _PyOpcode_Caches[opcode]; if (caches) { instructions[i + 1].cache = adaptive_counter_warmup(); @@ -305,19 +298,6 @@ _PyCode_Quicken(PyCodeObject *code) case STORE_FAST << 8 | STORE_FAST: instructions[i - 1].op.code = STORE_FAST__STORE_FAST; break; - case COMPARE_OP << 8 | POP_JUMP_IF_TRUE: - case COMPARE_OP << 8 | POP_JUMP_IF_FALSE: - { - int oparg = instructions[i - 1 - INLINE_CACHE_ENTRIES_COMPARE_OP].op.arg; - assert((oparg >> 4) <= Py_GE); - int mask = compare_masks[oparg >> 4]; - if (opcode == POP_JUMP_IF_FALSE) { - mask = mask ^ 0xf; - } - instructions[i - 1 - INLINE_CACHE_ENTRIES_COMPARE_OP].op.code = COMPARE_AND_BRANCH; - instructions[i - 1 - INLINE_CACHE_ENTRIES_COMPARE_OP].op.arg = (oparg & 0xf0) | mask; - break; - } } } #endif /* ENABLE_SPECIALIZATION */ @@ -341,6 +321,14 @@ _PyCode_Quicken(PyCodeObject *code) #define SPEC_FAIL_LOAD_GLOBAL_NON_DICT 17 #define SPEC_FAIL_LOAD_GLOBAL_NON_STRING_OR_SPLIT 18 +/* Super */ + +#define SPEC_FAIL_SUPER_NOT_LOAD_METHOD 9 +#define SPEC_FAIL_SUPER_BAD_CLASS 10 +#define SPEC_FAIL_SUPER_SHADOWED 11 +#define SPEC_FAIL_SUPER_NOT_METHOD 12 +#define SPEC_FAIL_SUPER_ERROR_OR_NOT_FOUND 13 + /* Attributes */ #define SPEC_FAIL_ATTR_OVERRIDING_DESCRIPTOR 9 @@ -436,19 +424,17 @@ _PyCode_Quicken(PyCodeObject *code) #define SPEC_FAIL_CALL_OPERATOR_WRAPPER 29 /* COMPARE_OP */ -#define SPEC_FAIL_COMPARE_DIFFERENT_TYPES 12 -#define SPEC_FAIL_COMPARE_STRING 13 -#define SPEC_FAIL_COMPARE_NOT_FOLLOWED_BY_COND_JUMP 14 -#define SPEC_FAIL_COMPARE_BIG_INT 15 -#define SPEC_FAIL_COMPARE_BYTES 16 -#define SPEC_FAIL_COMPARE_TUPLE 17 -#define SPEC_FAIL_COMPARE_LIST 18 -#define SPEC_FAIL_COMPARE_SET 19 -#define SPEC_FAIL_COMPARE_BOOL 20 -#define SPEC_FAIL_COMPARE_BASEOBJECT 21 -#define SPEC_FAIL_COMPARE_FLOAT_LONG 22 -#define SPEC_FAIL_COMPARE_LONG_FLOAT 23 -#define SPEC_FAIL_COMPARE_EXTENDED_ARG 24 +#define SPEC_FAIL_COMPARE_OP_DIFFERENT_TYPES 12 +#define SPEC_FAIL_COMPARE_OP_STRING 13 +#define SPEC_FAIL_COMPARE_OP_BIG_INT 14 +#define SPEC_FAIL_COMPARE_OP_BYTES 15 +#define SPEC_FAIL_COMPARE_OP_TUPLE 16 +#define SPEC_FAIL_COMPARE_OP_LIST 17 +#define SPEC_FAIL_COMPARE_OP_SET 18 +#define SPEC_FAIL_COMPARE_OP_BOOL 19 +#define SPEC_FAIL_COMPARE_OP_BASEOBJECT 20 +#define SPEC_FAIL_COMPARE_OP_FLOAT_LONG 21 +#define SPEC_FAIL_COMPARE_OP_LONG_FLOAT 22 /* FOR_ITER */ #define SPEC_FAIL_FOR_ITER_GENERATOR 10 @@ -528,6 +514,54 @@ specialize_module_load_attr( /* Attribute specialization */ +void +_Py_Specialize_LoadSuperAttr(PyObject *global_super, PyObject *cls, PyObject *self, + _Py_CODEUNIT *instr, PyObject *name, int load_method) { + assert(ENABLE_SPECIALIZATION); + assert(_PyOpcode_Caches[LOAD_SUPER_ATTR] == INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR); + _PySuperAttrCache *cache = (_PySuperAttrCache *)(instr + 1); + if (!load_method) { + SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_NOT_LOAD_METHOD); + goto fail; + } + if (global_super != (PyObject *)&PySuper_Type) { + SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_SHADOWED); + goto fail; + } + if (!PyType_Check(cls)) { + SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_BAD_CLASS); + goto fail; + } + PyTypeObject *tp = (PyTypeObject *)cls; + PyObject *res = _PySuper_LookupDescr(tp, self, name); + if (res == NULL) { + SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_ERROR_OR_NOT_FOUND); + PyErr_Clear(); + goto fail; + } + if (_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)) { + write_u32(cache->class_version, tp->tp_version_tag); + write_u32(cache->self_type_version, Py_TYPE(self)->tp_version_tag); + write_obj(cache->method, res); // borrowed + instr->op.code = LOAD_SUPER_ATTR_METHOD; + Py_DECREF(res); + goto success; + } + Py_DECREF(res); + SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_NOT_METHOD); + +fail: + STAT_INC(LOAD_SUPER_ATTR, failure); + assert(!PyErr_Occurred()); + instr->op.code = LOAD_SUPER_ATTR; + cache->counter = adaptive_counter_backoff(cache->counter); + return; +success: + STAT_INC(LOAD_SUPER_ATTR, success); + assert(!PyErr_Occurred()); + cache->counter = adaptive_counter_cooldown(); +} + typedef enum { OVERRIDING, /* Is an overriding descriptor, and will remain so. */ METHOD, /* Attribute has Py_TPFLAGS_METHOD_DESCRIPTOR set */ @@ -1308,7 +1342,7 @@ _Py_Specialize_BinarySubscr( PyTypeObject *container_type = Py_TYPE(container); if (container_type == &PyList_Type) { if (PyLong_CheckExact(sub)) { - if (Py_SIZE(sub) == 0 || Py_SIZE(sub) == 1) { + if (_PyLong_IsNonNegativeCompact((PyLongObject *)sub)) { instr->op.code = BINARY_SUBSCR_LIST_INT; goto success; } @@ -1321,7 +1355,7 @@ _Py_Specialize_BinarySubscr( } if (container_type == &PyTuple_Type) { if (PyLong_CheckExact(sub)) { - if (Py_SIZE(sub) == 0 || Py_SIZE(sub) == 1) { + if (_PyLong_IsNonNegativeCompact((PyLongObject *)sub)) { instr->op.code = BINARY_SUBSCR_TUPLE_INT; goto success; } @@ -1354,16 +1388,16 @@ _Py_Specialize_BinarySubscr( SPECIALIZATION_FAIL(BINARY_SUBSCR, SPEC_FAIL_WRONG_NUMBER_ARGUMENTS); goto fail; } - assert(cls->tp_version_tag != 0); - write_u32(cache->type_version, cls->tp_version_tag); - int version = _PyFunction_GetVersionForCurrentState(func); - if (version == 0 || version != (uint16_t)version) { - SPECIALIZATION_FAIL(BINARY_SUBSCR, version == 0 ? - SPEC_FAIL_OUT_OF_VERSIONS : SPEC_FAIL_OUT_OF_RANGE); + uint32_t version = _PyFunction_GetVersionForCurrentState(func); + if (version == 0) { + SPECIALIZATION_FAIL(BINARY_SUBSCR, SPEC_FAIL_OUT_OF_VERSIONS); goto fail; } - cache->func_version = version; - ((PyHeapTypeObject *)container_type)->_spec_cache.getitem = descriptor; + PyHeapTypeObject *ht = (PyHeapTypeObject *)container_type; + // This pointer is invalidated by PyType_Modified (see the comment on + // struct _specialization_cache): + ht->_spec_cache.getitem = descriptor; + ht->_spec_cache.getitem_version = version; instr->op.code = BINARY_SUBSCR_GETITEM; goto success; } @@ -1389,7 +1423,7 @@ _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *ins PyTypeObject *container_type = Py_TYPE(container); if (container_type == &PyList_Type) { if (PyLong_CheckExact(sub)) { - if ((Py_SIZE(sub) == 0 || Py_SIZE(sub) == 1) + if (_PyLong_IsNonNegativeCompact((PyLongObject *)sub) && ((PyLongObject *)sub)->long_value.ob_digit[0] < (size_t)PyList_GET_SIZE(container)) { instr->op.code = STORE_SUBSCR_LIST_INT; @@ -1651,17 +1685,12 @@ specialize_py_call(PyFunctionObject *func, _Py_CODEUNIT *instr, int nargs, assert(nargs <= argcount && nargs >= min_args); assert(min_args >= 0 && defcount >= 0); assert(defcount == 0 || func->func_defaults != NULL); - if (min_args > 0xffff) { - SPECIALIZATION_FAIL(CALL, SPEC_FAIL_OUT_OF_RANGE); - return -1; - } int version = _PyFunction_GetVersionForCurrentState(func); if (version == 0) { SPECIALIZATION_FAIL(CALL, SPEC_FAIL_OUT_OF_VERSIONS); return -1; } write_u32(cache->func_version, version); - cache->min_args = min_args; if (argcount == nargs) { instr->op.code = bound_method ? CALL_BOUND_METHOD_EXACT_ARGS : CALL_PY_EXACT_ARGS; } @@ -1766,6 +1795,7 @@ _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, int nargs, { assert(ENABLE_SPECIALIZATION); assert(_PyOpcode_Caches[CALL] == INLINE_CACHE_ENTRIES_CALL); + assert(_Py_OPCODE(*instr) != INSTRUMENTED_CALL); _PyCallCache *cache = (_PyCallCache *)(instr + 1); int fail; if (PyCFunction_CheckExact(callable)) { @@ -1958,83 +1988,79 @@ compare_op_fail_kind(PyObject *lhs, PyObject *rhs) { if (Py_TYPE(lhs) != Py_TYPE(rhs)) { if (PyFloat_CheckExact(lhs) && PyLong_CheckExact(rhs)) { - return SPEC_FAIL_COMPARE_FLOAT_LONG; + return SPEC_FAIL_COMPARE_OP_FLOAT_LONG; } if (PyLong_CheckExact(lhs) && PyFloat_CheckExact(rhs)) { - return SPEC_FAIL_COMPARE_LONG_FLOAT; + return SPEC_FAIL_COMPARE_OP_LONG_FLOAT; } - return SPEC_FAIL_COMPARE_DIFFERENT_TYPES; + return SPEC_FAIL_COMPARE_OP_DIFFERENT_TYPES; } if (PyBytes_CheckExact(lhs)) { - return SPEC_FAIL_COMPARE_BYTES; + return SPEC_FAIL_COMPARE_OP_BYTES; } if (PyTuple_CheckExact(lhs)) { - return SPEC_FAIL_COMPARE_TUPLE; + return SPEC_FAIL_COMPARE_OP_TUPLE; } if (PyList_CheckExact(lhs)) { - return SPEC_FAIL_COMPARE_LIST; + return SPEC_FAIL_COMPARE_OP_LIST; } if (PySet_CheckExact(lhs) || PyFrozenSet_CheckExact(lhs)) { - return SPEC_FAIL_COMPARE_SET; + return SPEC_FAIL_COMPARE_OP_SET; } if (PyBool_Check(lhs)) { - return SPEC_FAIL_COMPARE_BOOL; + return SPEC_FAIL_COMPARE_OP_BOOL; } if (Py_TYPE(lhs)->tp_richcompare == PyBaseObject_Type.tp_richcompare) { - return SPEC_FAIL_COMPARE_BASEOBJECT; + return SPEC_FAIL_COMPARE_OP_BASEOBJECT; } return SPEC_FAIL_OTHER; } #endif void -_Py_Specialize_CompareAndBranch(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, +_Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr, int oparg) { assert(ENABLE_SPECIALIZATION); - assert(_PyOpcode_Caches[COMPARE_AND_BRANCH] == INLINE_CACHE_ENTRIES_COMPARE_OP); + assert(_PyOpcode_Caches[COMPARE_OP] == INLINE_CACHE_ENTRIES_COMPARE_OP); _PyCompareOpCache *cache = (_PyCompareOpCache *)(instr + 1); -#ifndef NDEBUG - int next_opcode = instr[INLINE_CACHE_ENTRIES_COMPARE_OP + 1].op.code; - assert(next_opcode == POP_JUMP_IF_FALSE || next_opcode == POP_JUMP_IF_TRUE); -#endif if (Py_TYPE(lhs) != Py_TYPE(rhs)) { - SPECIALIZATION_FAIL(COMPARE_AND_BRANCH, compare_op_fail_kind(lhs, rhs)); + SPECIALIZATION_FAIL(COMPARE_OP, compare_op_fail_kind(lhs, rhs)); goto failure; } if (PyFloat_CheckExact(lhs)) { - instr->op.code = COMPARE_AND_BRANCH_FLOAT; + instr->op.code = COMPARE_OP_FLOAT; goto success; } if (PyLong_CheckExact(lhs)) { - if (Py_ABS(Py_SIZE(lhs)) <= 1 && Py_ABS(Py_SIZE(rhs)) <= 1) { - instr->op.code = COMPARE_AND_BRANCH_INT; + if (_PyLong_IsCompact((PyLongObject *)lhs) && _PyLong_IsCompact((PyLongObject *)rhs)) { + instr->op.code = COMPARE_OP_INT; goto success; } else { - SPECIALIZATION_FAIL(COMPARE_AND_BRANCH, SPEC_FAIL_COMPARE_BIG_INT); + SPECIALIZATION_FAIL(COMPARE_OP, SPEC_FAIL_COMPARE_OP_BIG_INT); goto failure; } } if (PyUnicode_CheckExact(lhs)) { int cmp = oparg >> 4; if (cmp != Py_EQ && cmp != Py_NE) { - SPECIALIZATION_FAIL(COMPARE_AND_BRANCH, SPEC_FAIL_COMPARE_STRING); + SPECIALIZATION_FAIL(COMPARE_OP, SPEC_FAIL_COMPARE_OP_STRING); goto failure; } else { - instr->op.code = COMPARE_AND_BRANCH_STR; + instr->op.code = COMPARE_OP_STR; goto success; } } - SPECIALIZATION_FAIL(COMPARE_AND_BRANCH, compare_op_fail_kind(lhs, rhs)); + SPECIALIZATION_FAIL(COMPARE_OP, compare_op_fail_kind(lhs, rhs)); failure: - STAT_INC(COMPARE_AND_BRANCH, failure); - instr->op.code = COMPARE_AND_BRANCH; + STAT_INC(COMPARE_OP, failure); + instr->op.code = COMPARE_OP; cache->counter = adaptive_counter_backoff(cache->counter); return; success: - STAT_INC(COMPARE_AND_BRANCH, success); + STAT_INC(COMPARE_OP, success); cache->counter = adaptive_counter_cooldown(); } @@ -2182,7 +2208,9 @@ _Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg) goto success; } else if (tp == &PyGen_Type && oparg <= SHRT_MAX) { - assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR); + assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR || + instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == INSTRUMENTED_END_FOR + ); instr->op.code = FOR_ITER_GEN; goto success; } diff --git a/Python/stdlib_module_names.h b/Python/stdlib_module_names.h index e9f0061a59d3ba..27f42e5202e571 100644 --- a/Python/stdlib_module_names.h +++ b/Python/stdlib_module_names.h @@ -164,7 +164,6 @@ static const char* _Py_stdlib_module_names[] = { "idlelib", "imaplib", "imghdr", -"imp", "importlib", "inspect", "io", diff --git a/Python/sysmodule.c b/Python/sysmodule.c index cc5b9a6d418bfa..81dabe6102f18d 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -1488,7 +1488,7 @@ static PyStructSequence_Desc windows_version_desc = { }; static PyObject * -_sys_getwindowsversion_from_kernel32() +_sys_getwindowsversion_from_kernel32(void) { #ifndef MS_WINDOWS_DESKTOP return NULL; @@ -1854,7 +1854,9 @@ static Py_ssize_t sys_gettotalrefcount_impl(PyObject *module) /*[clinic end generated code: output=4103886cf17c25bc input=53b744faa5d2e4f6]*/ { - return _Py_GetRefTotal(); + /* It may make sense to return the total for the current interpreter + or have a second function that does so. */ + return _Py_GetGlobalRefTotal(); } #endif /* Py_REF_DEBUG */ @@ -1869,9 +1871,23 @@ static Py_ssize_t sys_getallocatedblocks_impl(PyObject *module) /*[clinic end generated code: output=f0c4e873f0b6dcf7 input=dab13ee346a0673e]*/ { - return _Py_GetAllocatedBlocks(); + // It might make sense to return the count + // for just the current interpreter. + return _Py_GetGlobalAllocatedBlocks(); } +/*[clinic input] +sys.getunicodeinternedsize -> Py_ssize_t + +Return the number of elements of the unicode interned dictionary +[clinic start generated code]*/ + +static Py_ssize_t +sys_getunicodeinternedsize_impl(PyObject *module) +/*[clinic end generated code: output=ad0e4c9738ed4129 input=726298eaa063347a]*/ +{ + return _PyUnicode_InternedSize(); +} /*[clinic input] sys._getframe @@ -2241,6 +2257,7 @@ static PyMethodDef sys_methods[] = { SYS_GETDEFAULTENCODING_METHODDEF SYS_GETDLOPENFLAGS_METHODDEF SYS_GETALLOCATEDBLOCKS_METHODDEF + SYS_GETUNICODEINTERNEDSIZE_METHODDEF SYS_GETFILESYSTEMENCODING_METHODDEF SYS_GETFILESYSTEMENCODEERRORS_METHODDEF #ifdef Py_TRACE_REFS @@ -2670,11 +2687,13 @@ stderr -- standard error object; used for error messages\n\ By assigning other file objects (or objects that behave like files)\n\ to these, it is possible to redirect all of the interpreter's I/O.\n\ \n\ +last_exc - the last uncaught exception\n\ + Only available in an interactive session after a\n\ + traceback has been printed.\n\ last_type -- type of last uncaught exception\n\ last_value -- value of last uncaught exception\n\ last_traceback -- traceback of last uncaught exception\n\ - These three are only available in an interactive session after a\n\ - traceback has been printed.\n\ + These three are the (deprecated) legacy representation of last_exc.\n\ " ) /* concatenating string here */ @@ -3147,10 +3166,8 @@ _PySys_InitCore(PyThreadState *tstate, PyObject *sysdict) SET_SYS("float_info", PyFloat_GetInfo()); SET_SYS("int_info", PyLong_GetInfo()); /* initialize hash_info */ - if (Hash_InfoType.tp_name == NULL) { - if (_PyStructSequence_InitBuiltin(&Hash_InfoType, &hash_info_desc) < 0) { - goto type_init_failed; - } + if (_PyStructSequence_InitBuiltin(&Hash_InfoType, &hash_info_desc) < 0) { + goto type_init_failed; } SET_SYS("hash_info", get_hash_info(tstate)); SET_SYS("maxunicode", PyLong_FromLong(0x10FFFF)); @@ -3172,11 +3189,9 @@ _PySys_InitCore(PyThreadState *tstate, PyObject *sysdict) #define ENSURE_INFO_TYPE(TYPE, DESC) \ do { \ - if (TYPE.tp_name == NULL) { \ - if (_PyStructSequence_InitBuiltinWithFlags( \ - &TYPE, &DESC, Py_TPFLAGS_DISALLOW_INSTANTIATION) < 0) { \ - goto type_init_failed; \ - } \ + if (_PyStructSequence_InitBuiltinWithFlags( \ + &TYPE, &DESC, Py_TPFLAGS_DISALLOW_INSTANTIATION) < 0) { \ + goto type_init_failed; \ } \ } while (0) @@ -3211,11 +3226,9 @@ _PySys_InitCore(PyThreadState *tstate, PyObject *sysdict) SET_SYS("thread_info", PyThread_GetInfo()); /* initialize asyncgen_hooks */ - if (AsyncGenHooksType.tp_name == NULL) { - if (_PyStructSequence_InitBuiltin( - &AsyncGenHooksType, &asyncgen_hooks_desc) < 0) { - goto type_init_failed; - } + if (_PyStructSequence_InitBuiltin( + &AsyncGenHooksType, &asyncgen_hooks_desc) < 0) { + goto type_init_failed; } #ifdef __EMSCRIPTEN__ @@ -3405,6 +3418,7 @@ _PySys_SetPreliminaryStderr(PyObject *sysdict) return _PyStatus_ERR("can't set preliminary stderr"); } +PyObject *_Py_CreateMonitoringObject(void); /* Create sys module without all attributes. _PySys_UpdateConfig() should be called later to add remaining attributes. */ @@ -3454,6 +3468,16 @@ _PySys_Create(PyThreadState *tstate, PyObject **sysmod_p) goto error; } + PyObject *monitoring = _Py_CreateMonitoringObject(); + if (monitoring == NULL) { + goto error; + } + int err = PyDict_SetItemString(sysdict, "monitoring", monitoring); + Py_DECREF(monitoring); + if (err < 0) { + goto error; + } + assert(!_PyErr_Occurred(tstate)); *sysmod_p = sysmod; @@ -3468,13 +3492,13 @@ void _PySys_Fini(PyInterpreterState *interp) { if (_Py_IsMainInterpreter(interp)) { - _PyStructSequence_FiniType(&VersionInfoType); - _PyStructSequence_FiniType(&FlagsType); + _PyStructSequence_FiniBuiltin(&VersionInfoType); + _PyStructSequence_FiniBuiltin(&FlagsType); #if defined(MS_WINDOWS) - _PyStructSequence_FiniType(&WindowsVersionType); + _PyStructSequence_FiniBuiltin(&WindowsVersionType); #endif - _PyStructSequence_FiniType(&Hash_InfoType); - _PyStructSequence_FiniType(&AsyncGenHooksType); + _PyStructSequence_FiniBuiltin(&Hash_InfoType); + _PyStructSequence_FiniBuiltin(&AsyncGenHooksType); #ifdef __EMSCRIPTEN__ Py_CLEAR(EmscriptenInfoType); #endif diff --git a/Python/thread.c b/Python/thread.c index 4581f1af043a37..f90cd34a073540 100644 --- a/Python/thread.c +++ b/Python/thread.c @@ -7,7 +7,7 @@ #include "Python.h" #include "pycore_pystate.h" // _PyInterpreterState_GET() -#include "pycore_structseq.h" // _PyStructSequence_FiniType() +#include "pycore_structseq.h" // _PyStructSequence_FiniBuiltin() #include "pycore_pythread.h" #ifndef DONT_HAVE_STDIO_H @@ -137,10 +137,8 @@ PyThread_GetInfo(void) int len; #endif - if (ThreadInfoType.tp_name == 0) { - if (_PyStructSequence_InitBuiltin(&ThreadInfoType, - &threadinfo_desc) < 0) - return NULL; + if (_PyStructSequence_InitBuiltin(&ThreadInfoType, &threadinfo_desc) < 0) { + return NULL; } threadinfo = PyStructSequence_New(&ThreadInfoType); @@ -197,5 +195,5 @@ _PyThread_FiniType(PyInterpreterState *interp) return; } - _PyStructSequence_FiniType(&ThreadInfoType); + _PyStructSequence_FiniBuiltin(&ThreadInfoType); } diff --git a/README.rst b/README.rst index 6923b692f6c971..06c4f0a1fa3917 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,4 @@ -This is Python version 3.12.0 alpha 6 +This is Python version 3.12.0 alpha 7 ===================================== .. image:: https://github.com/python/cpython/workflows/Tests/badge.svg diff --git a/Tools/README b/Tools/README index 04612b8013db92..e51624f453c5bb 100644 --- a/Tools/README +++ b/Tools/README @@ -1,10 +1,21 @@ This directory contains a number of Python programs that are useful while building or extending Python. +build Automatically generated directory by the build system + contain build artifacts and intermediate files. + buildbot Batchfiles for running on Windows buildbot workers. +c-analyzer Tools to check no new global variables have been added. + +cases_generator Tooling to generate interpreters. + ccbench A Python threads-based concurrency benchmark. (*) +clinic A preprocessor for CPython C files in order to automate + the boilerplate involved with writing argument parsing + code for "builtins". + freeze Create a stand-alone executable from a Python program. gdb Python code to be run inside gdb, to make it easier to @@ -15,11 +26,16 @@ i18n Tools for internationalization. pygettext.py and msgfmt.py generates a binary message catalog from a catalog in text format. +importbench A set of micro-benchmarks for various import scenarios. + iobench Benchmark for the new Python I/O system. (*) msi Support for packaging Python as an MSI package on Windows. -parser Un-parsing tool to generate code from an AST. +nuget Files for the NuGet package manager for .NET. + +patchcheck Tools for checking and applying patches to the Python source code + and verifying the integrity of patch files. peg_generator PEG-based parser generator (pegen) used for new parser. @@ -28,9 +44,14 @@ scripts A number of useful single-file programs, e.g. tabnanny.py tabs and spaces, and 2to3, which converts Python 2 code to Python 3 code. +ssl Scripts to generate ssl_data.h from OpenSSL sources, and run + tests against multiple installations of OpenSSL and LibreSSL. + stringbench A suite of micro-benchmarks for various operations on strings (both 8-bit and unicode). (*) +tz A script to dump timezone from /usr/share/zoneinfo. + unicode Tools for generating unicodedata and codecs from unicode.org and other mapping files (by Fredrik Lundh, Marc-Andre Lemburg and Martin von Loewis). @@ -38,6 +59,8 @@ unicode Tools for generating unicodedata and codecs from unicode.org unittestgui A Tkinter based GUI test runner for unittest, with test discovery. +wasm Config and helpers to facilitate cross compilation of CPython + to WebAssembly (WASM). (*) A generic benchmark suite is maintained separately at https://github.com/python/performance diff --git a/Tools/build/deepfreeze.py b/Tools/build/deepfreeze.py index 511b26a5ce3dc7..b084d3e457f782 100644 --- a/Tools/build/deepfreeze.py +++ b/Tools/build/deepfreeze.py @@ -142,7 +142,7 @@ def block(self, prefix: str, suffix: str = "") -> None: def object_head(self, typename: str) -> None: with self.block(".ob_base =", ","): - self.write(f".ob_refcnt = 999999999,") + self.write(f".ob_refcnt = _Py_IMMORTAL_REFCNT,") self.write(f".ob_type = &{typename},") def object_var_head(self, typename: str, size: int) -> None: @@ -175,6 +175,12 @@ def generate_unicode(self, name: str, s: str) -> str: return f"&_Py_STR({strings[s]})" if s in identifiers: return f"&_Py_ID({s})" + if len(s) == 1: + c = ord(s) + if c < 128: + return f"(PyObject *)&_Py_SINGLETON(strings).ascii[{c}]" + elif c < 256: + return f"(PyObject *)&_Py_SINGLETON(strings).latin1[{c - 128}]" if re.match(r'\A[A-Za-z0-9_]+\Z', s): name = f"const_str_{s}" kind, ascii = analyze_character_width(s) @@ -255,7 +261,6 @@ def generate_code(self, name: str, code: types.CodeType) -> str: self.write(f".co_names = {co_names},") self.write(f".co_exceptiontable = {co_exceptiontable},") self.field(code, "co_flags") - self.write("._co_linearray_entry_size = 0,") self.field(code, "co_argcount") self.field(code, "co_posonlyargcount") self.field(code, "co_kwonlyargcount") @@ -276,7 +281,6 @@ def generate_code(self, name: str, code: types.CodeType) -> str: self.write(f".co_qualname = {co_qualname},") self.write(f".co_linetable = {co_linetable},") self.write(f"._co_cached = NULL,") - self.write("._co_linearray = NULL,") self.write(f".co_code_adaptive = {co_code_adaptive},") for i, op in enumerate(code.co_code[::2]): if op == RESUME: @@ -309,7 +313,7 @@ def generate_tuple(self, name: str, t: Tuple[object, ...]) -> str: return f"& {name}._object.ob_base.ob_base" def _generate_int_for_bits(self, name: str, i: int, digit: int) -> None: - sign = -1 if i < 0 else 0 if i == 0 else +1 + sign = (i > 0) - (i < 0) i = abs(i) digits: list[int] = [] while i: @@ -318,10 +322,12 @@ def _generate_int_for_bits(self, name: str, i: int, digit: int) -> None: self.write("static") with self.indent(): with self.block("struct"): - self.write("PyObject_VAR_HEAD") + self.write("PyObject ob_base;") + self.write("uintptr_t lv_tag;") self.write(f"digit ob_digit[{max(1, len(digits))}];") with self.block(f"{name} =", ";"): - self.object_var_head("PyLong_Type", sign*len(digits)) + self.object_head("PyLong_Type") + self.write(f".lv_tag = TAG_FROM_SIGN_AND_SIZE({sign}, {len(digits)}),") if digits: ds = ", ".join(map(str, digits)) self.write(f".ob_digit = {{ {ds} }},") @@ -345,7 +351,7 @@ def generate_int(self, name: str, i: int) -> str: self.write('#error "PYLONG_BITS_IN_DIGIT should be 15 or 30"') self.write("#endif") # If neither clause applies, it won't compile - return f"& {name}.ob_base.ob_base" + return f"& {name}.ob_base" def generate_float(self, name: str, x: float) -> str: with self.block(f"static PyFloatObject {name} =", ";"): diff --git a/Tools/build/generate_global_objects.py b/Tools/build/generate_global_objects.py index 1f53f02d41ef39..c27817702bf97d 100644 --- a/Tools/build/generate_global_objects.py +++ b/Tools/build/generate_global_objects.py @@ -354,14 +354,14 @@ def generate_static_strings_initializer(identifiers, strings): printer.write(before) printer.write(START) printer.write("static inline void") - with printer.block("_PyUnicode_InitStaticStrings(void)"): + with printer.block("_PyUnicode_InitStaticStrings(PyInterpreterState *interp)"): printer.write(f'PyObject *string;') for i in sorted(identifiers): # This use of _Py_ID() is ignored by iter_global_strings() # since iter_files() ignores .h files. printer.write(f'string = &_Py_ID({i});') printer.write(f'assert(_PyUnicode_CheckConsistency(string, 1));') - printer.write(f'PyUnicode_InternInPlace(&string);') + printer.write(f'_PyUnicode_InternInPlace(interp, &string);') # XXX What about "strings"? printer.write(END) printer.write(after) diff --git a/Tools/build/generate_opcode_h.py b/Tools/build/generate_opcode_h.py index 9b2112f7f5f31d..645b9f1de1170b 100644 --- a/Tools/build/generate_opcode_h.py +++ b/Tools/build/generate_opcode_h.py @@ -60,7 +60,7 @@ def write_int_array_from_ops(name, ops, out): bits = 0 for op in ops: bits |= 1<<op - out.write(f"static const uint32_t {name}[9] = {{\n") + out.write(f"const uint32_t {name}[9] = {{\n") for i in range(9): out.write(f" {bits & UINT32_MASK}U,\n") bits >>= 32 @@ -89,6 +89,7 @@ def main(opcode_py, outfile='Include/opcode.h', internaloutfile='Include/interna HAVE_ARGUMENT = opcode["HAVE_ARGUMENT"] MIN_PSEUDO_OPCODE = opcode["MIN_PSEUDO_OPCODE"] MAX_PSEUDO_OPCODE = opcode["MAX_PSEUDO_OPCODE"] + MIN_INSTRUMENTED_OPCODE = opcode["MIN_INSTRUMENTED_OPCODE"] NUM_OPCODES = len(opname) used = [ False ] * len(opname) @@ -105,9 +106,6 @@ def main(opcode_py, outfile='Include/opcode.h', internaloutfile='Include/interna specialized_opmap[name] = next_op opname_including_specialized[next_op] = name used[next_op] = True - specialized_opmap['DO_TRACING'] = 255 - opname_including_specialized[255] = 'DO_TRACING' - used[255] = True with open(outfile, 'w') as fobj, open(internaloutfile, 'w') as iobj: fobj.write(header) @@ -120,6 +118,8 @@ def main(opcode_py, outfile='Include/opcode.h', internaloutfile='Include/interna fobj.write(DEFINE.format("HAVE_ARGUMENT", HAVE_ARGUMENT)) if op == MIN_PSEUDO_OPCODE: fobj.write(DEFINE.format("MIN_PSEUDO_OPCODE", MIN_PSEUDO_OPCODE)) + if op == MIN_INSTRUMENTED_OPCODE: + fobj.write(DEFINE.format("MIN_INSTRUMENTED_OPCODE", MIN_INSTRUMENTED_OPCODE)) fobj.write(DEFINE.format(name, op)) @@ -130,10 +130,10 @@ def main(opcode_py, outfile='Include/opcode.h', internaloutfile='Include/interna for name, op in specialized_opmap.items(): fobj.write(DEFINE.format(name, op)) + iobj.write("\nextern const uint32_t _PyOpcode_Jump[9];\n") iobj.write("\nextern const uint8_t _PyOpcode_Caches[256];\n") iobj.write("\nextern const uint8_t _PyOpcode_Deopt[256];\n") iobj.write("\n#ifdef NEED_OPCODE_TABLES\n") - write_int_array_from_ops("_PyOpcode_RelativeJump", opcode['hasjrel'], iobj) write_int_array_from_ops("_PyOpcode_Jump", opcode['hasjrel'] + opcode['hasjabs'], iobj) iobj.write("\nconst uint8_t _PyOpcode_Caches[256] = {\n") diff --git a/Tools/build/generate_stdlib_module_names.py b/Tools/build/generate_stdlib_module_names.py index d15e5e2d5450d7..7e0e9602a10765 100644 --- a/Tools/build/generate_stdlib_module_names.py +++ b/Tools/build/generate_stdlib_module_names.py @@ -1,5 +1,5 @@ # This script lists the names of standard library modules -# to update Python/stdlib_mod_names.h +# to update Python/stdlib_module_names.h import _imp import os.path import re diff --git a/Tools/build/generate_token.py b/Tools/build/generate_token.py index fc12835b7762ad..3bd307c1733867 100755 --- a/Tools/build/generate_token.py +++ b/Tools/build/generate_token.py @@ -80,6 +80,8 @@ def update_file(file, content): (x) == NEWLINE || \\ (x) == INDENT || \\ (x) == DEDENT) +#define ISSTRINGLIT(x) ((x) == STRING || \\ + (x) == FSTRING_MIDDLE) // Symbols exported for test_peg_generator diff --git a/Tools/build/verify_ensurepip_wheels.py b/Tools/build/verify_ensurepip_wheels.py index 044d1fd6b3cf2d..09fd5d9e3103ac 100755 --- a/Tools/build/verify_ensurepip_wheels.py +++ b/Tools/build/verify_ensurepip_wheels.py @@ -14,7 +14,7 @@ from pathlib import Path from urllib.request import urlopen -PACKAGE_NAMES = ("pip", "setuptools") +PACKAGE_NAMES = ("pip",) ENSURE_PIP_ROOT = Path(__file__).parent.parent.parent / "Lib/ensurepip" WHEEL_DIR = ENSURE_PIP_ROOT / "_bundled" ENSURE_PIP_INIT_PY_TEXT = (ENSURE_PIP_ROOT / "__init__.py").read_text(encoding="utf-8") diff --git a/Tools/c-analyzer/TODO b/Tools/c-analyzer/TODO index 43760369b1980e..27a535814ea52b 100644 --- a/Tools/c-analyzer/TODO +++ b/Tools/c-analyzer/TODO @@ -495,7 +495,6 @@ Python/import.c:PyImport_ImportModuleLevelObject():PyId___path__ _Py_IDENTIFIER( Python/import.c:PyImport_ImportModuleLevelObject():PyId___spec__ _Py_IDENTIFIER(__spec__) Python/import.c:PyImport_ImportModuleLevelObject():PyId__handle_fromlist _Py_IDENTIFIER(_handle_fromlist) Python/import.c:PyImport_ImportModuleLevelObject():PyId__lock_unlock_module _Py_IDENTIFIER(_lock_unlock_module) -Python/import.c:PyImport_ReloadModule():PyId_imp _Py_IDENTIFIER(imp) Python/import.c:PyImport_ReloadModule():PyId_reload _Py_IDENTIFIER(reload) Python/import.c:_PyImportZip_Init():PyId_zipimporter _Py_IDENTIFIER(zipimporter) Python/import.c:import_find_and_load():PyId__find_and_load _Py_IDENTIFIER(_find_and_load) diff --git a/Tools/c-analyzer/c_analyzer/__main__.py b/Tools/c-analyzer/c_analyzer/__main__.py index 5d89b29adf899e..cde39bc4e649d9 100644 --- a/Tools/c-analyzer/c_analyzer/__main__.py +++ b/Tools/c-analyzer/c_analyzer/__main__.py @@ -18,10 +18,8 @@ configure_logger, get_prog, filter_filenames, - iter_marks, ) from c_parser.info import KIND -from c_parser.match import is_type_decl from .match import filter_forward from . import ( analyze as _analyze, diff --git a/Tools/c-analyzer/c_analyzer/info.py b/Tools/c-analyzer/c_analyzer/info.py index 27c3a5a4ee76f2..d231e07a60dd8e 100644 --- a/Tools/c-analyzer/c_analyzer/info.py +++ b/Tools/c-analyzer/c_analyzer/info.py @@ -1,4 +1,3 @@ -from collections import namedtuple import os.path from c_common import fsutil @@ -13,9 +12,6 @@ from c_parser.match import ( is_type_decl, ) -from .match import ( - is_process_global, -) IGNORED = _misc.Labeled('IGNORED') diff --git a/Tools/c-analyzer/c_common/info.py b/Tools/c-analyzer/c_common/info.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/Tools/c-analyzer/c_common/iterutil.py b/Tools/c-analyzer/c_common/iterutil.py index 6ded105304e454..dda3dd57c1cf5c 100644 --- a/Tools/c-analyzer/c_common/iterutil.py +++ b/Tools/c-analyzer/c_common/iterutil.py @@ -1,7 +1,3 @@ - -_NOT_SET = object() - - def peek_and_iter(items): if not items: return None, None diff --git a/Tools/c-analyzer/c_common/show.py b/Tools/c-analyzer/c_common/show.py deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/Tools/c-analyzer/c_parser/__main__.py b/Tools/c-analyzer/c_parser/__main__.py index 78f47a1808f50b..2454fcba814291 100644 --- a/Tools/c-analyzer/c_parser/__main__.py +++ b/Tools/c-analyzer/c_parser/__main__.py @@ -1,10 +1,7 @@ import logging -import os.path import sys -from c_common import fsutil from c_common.scriptutil import ( - CLIArgSpec as Arg, add_verbosity_cli, add_traceback_cli, add_kind_filtering_cli, @@ -15,7 +12,6 @@ get_prog, main_for_filenames, ) -from .preprocessor import get_preprocessor from .preprocessor.__main__ import ( add_common_cli as add_preprocessor_cli, ) diff --git a/Tools/c-analyzer/c_parser/_state_machine.py b/Tools/c-analyzer/c_parser/_state_machine.py deleted file mode 100644 index 875323188aadfd..00000000000000 --- a/Tools/c-analyzer/c_parser/_state_machine.py +++ /dev/null @@ -1,244 +0,0 @@ - -f''' - struct {ANON_IDENTIFIER}; - struct {{ ... }} - struct {IDENTIFIER} {{ ... }} - - union {ANON_IDENTIFIER}; - union {{ ... }} - union {IDENTIFIER} {{ ... }} - - enum {ANON_IDENTIFIER}; - enum {{ ... }} - enum {IDENTIFIER} {{ ... }} - - typedef {VARTYPE} {IDENTIFIER}; - typedef {IDENTIFIER}; - typedef {IDENTIFIER}; - typedef {IDENTIFIER}; -''' - - -def parse(srclines): - if isinstance(srclines, str): # a filename - raise NotImplementedError - - - -# This only handles at most 10 nested levels. -#MATCHED_PARENS = textwrap.dedent(rf''' -# # matched parens -# (?: -# [(] # level 0 -# (?: -# [^()]* -# [(] # level 1 -# (?: -# [^()]* -# [(] # level 2 -# (?: -# [^()]* -# [(] # level 3 -# (?: -# [^()]* -# [(] # level 4 -# (?: -# [^()]* -# [(] # level 5 -# (?: -# [^()]* -# [(] # level 6 -# (?: -# [^()]* -# [(] # level 7 -# (?: -# [^()]* -# [(] # level 8 -# (?: -# [^()]* -# [(] # level 9 -# (?: -# [^()]* -# [(] # level 10 -# [^()]* -# [)] -# )* -# [^()]* -# [)] -# )* -# [^()]* -# [)] -# )* -# [^()]* -# [)] -# )* -# [^()]* -# [)] -# )* -# [^()]* -# [)] -# )* -# [^()]* -# [)] -# )* -# [^()]* -# [)] -# )* -# [^()]* -# [)] -# )* -# [^()]* -# [)] -# )* -# [^()]* -# [)] -# ) -# # end matched parens -# ''') - -r''' - # for loop - (?: - \s* \b for - \s* [(] - ( - [^;]* ; - [^;]* ; - .*? - ) # <header> - [)] - \s* - (?: - (?: - ( - {_ind(SIMPLE_STMT, 6)} - ) # <stmt> - ; - ) - | - ( {{ ) # <open> - ) - ) - | - - - - ( - (?: - (?: - (?: - {_ind(SIMPLE_STMT, 6)} - )? - return \b \s* - {_ind(INITIALIZER, 5)} - ) - | - (?: - (?: - {IDENTIFIER} \s* - (?: . | -> ) \s* - )* - {IDENTIFIER} - \s* = \s* - {_ind(INITIALIZER, 5)} - ) - | - (?: - {_ind(SIMPLE_STMT, 5)} - ) - ) - | - # cast compound literal - (?: - (?: - [^'"{{}};]* - {_ind(STRING_LITERAL, 5)} - )* - [^'"{{}};]*? - [^'"{{}};=] - = - \s* [(] [^)]* [)] - \s* {{ [^;]* }} - ) - ) # <stmt> - - - - # compound statement - (?: - ( - (?: - - # "for" statements are handled separately above. - (?: (?: else \s+ )? if | switch | while ) \s* - {_ind(COMPOUND_HEAD, 5)} - ) - | - (?: else | do ) - # We do not worry about compound statements for labels, - # "case", or "default". - )? # <header> - \s* - ( {{ ) # <open> - ) - - - - ( - (?: - [^'"{{}};]* - {_ind(STRING_LITERAL, 5)} - )* - [^'"{{}};]* - # Presumably we will not see "== {{". - [^\s='"{{}};] - )? # <header> - - - - ( - \b - (?: - # We don't worry about labels with a compound statement. - (?: - switch \s* [(] [^{{]* [)] - ) - | - (?: - case \b \s* [^:]+ [:] - ) - | - (?: - default \s* [:] - ) - | - (?: - do - ) - | - (?: - while \s* [(] [^{{]* [)] - ) - | - #(?: - # for \s* [(] [^{{]* [)] - # ) - #| - (?: - if \s* [(] - (?: [^{{]* [^)] \s* {{ )* [^{{]* - [)] - ) - | - (?: - else - (?: - \s* - if \s* [(] - (?: [^{{]* [^)] \s* {{ )* [^{{]* - [)] - )? - ) - ) - )? # <header> -''' diff --git a/Tools/c-analyzer/c_parser/info.py b/Tools/c-analyzer/c_parser/info.py index 3fa9fefbd5ec0b..799f9237877447 100644 --- a/Tools/c-analyzer/c_parser/info.py +++ b/Tools/c-analyzer/c_parser/info.py @@ -1,6 +1,5 @@ from collections import namedtuple import enum -import os.path import re from c_common import fsutil @@ -8,7 +7,7 @@ import c_common.misc as _misc import c_common.strutil as _strutil import c_common.tables as _tables -from .parser._regexes import SIMPLE_TYPE, _STORAGE +from .parser._regexes import _STORAGE FIXED_TYPE = _misc.Labeled('FIXED_TYPE') diff --git a/Tools/c-analyzer/c_parser/parser/_alt.py b/Tools/c-analyzer/c_parser/parser/_alt.py deleted file mode 100644 index 05a9101b4f529a..00000000000000 --- a/Tools/c-analyzer/c_parser/parser/_alt.py +++ /dev/null @@ -1,6 +0,0 @@ - -def _parse(srclines, anon_name): - text = ' '.join(l for _, l in srclines) - - from ._delim import parse - yield from parse(text, anon_name) diff --git a/Tools/c-analyzer/c_parser/parser/_delim.py b/Tools/c-analyzer/c_parser/parser/_delim.py deleted file mode 100644 index 51433a629d3a35..00000000000000 --- a/Tools/c-analyzer/c_parser/parser/_delim.py +++ /dev/null @@ -1,54 +0,0 @@ -import re -import textwrap - -from ._regexes import _ind, STRING_LITERAL - - -def parse(text, anon_name): - context = None - data = None - for m in DELIMITER_RE.find_iter(text): - before, opened, closed = m.groups() - delim = opened or closed - - handle_segment = HANDLERS[context][delim] - result, context, data = handle_segment(before, delim, data) - if result: - yield result - - -DELIMITER = textwrap.dedent(rf''' - ( - (?: - [^'"()\[\]{};]* - {_ind(STRING_LITERAL, 3)} - }* - [^'"()\[\]{};]+ - )? # <before> - (?: - ( - [(\[{] - ) # <open> - | - ( - [)\]};] - ) # <close> - )? - ''') -DELIMITER_RE = re.compile(DELIMITER, re.VERBOSE) - -_HANDLERS = { - None: { # global - # opened - '{': ..., - '[': None, - '(': None, - # closed - '}': None, - ']': None, - ')': None, - ';': ..., - }, - '': { - }, -} diff --git a/Tools/c-analyzer/c_parser/parser/_global.py b/Tools/c-analyzer/c_parser/parser/_global.py index 35947c12998135..b1ac9f5db034e1 100644 --- a/Tools/c-analyzer/c_parser/parser/_global.py +++ b/Tools/c-analyzer/c_parser/parser/_global.py @@ -9,7 +9,6 @@ set_capture_groups, ) from ._compound_decl_body import DECL_BODY_PARSERS -#from ._func_body import parse_function_body from ._func_body import parse_function_statics as parse_function_body diff --git a/Tools/c-analyzer/cpython/__main__.py b/Tools/c-analyzer/cpython/__main__.py index fe7a16726f45a9..ec026c6932f1f4 100644 --- a/Tools/c-analyzer/cpython/__main__.py +++ b/Tools/c-analyzer/cpython/__main__.py @@ -2,7 +2,6 @@ import sys import textwrap -from c_common.fsutil import expand_filenames, iter_files_by_suffix from c_common.scriptutil import ( VERBOSITY, add_verbosity_cli, @@ -11,7 +10,6 @@ add_kind_filtering_cli, add_files_cli, add_progress_cli, - main_for_filenames, process_args_by_key, configure_logger, get_prog, diff --git a/Tools/c-analyzer/cpython/_analyzer.py b/Tools/c-analyzer/cpython/_analyzer.py index cfe5e75f2f4df6..68d6b31cf2b6f0 100644 --- a/Tools/c-analyzer/cpython/_analyzer.py +++ b/Tools/c-analyzer/cpython/_analyzer.py @@ -4,16 +4,12 @@ from c_common.clsutil import classonly from c_parser.info import ( KIND, - DeclID, Declaration, TypeDeclaration, - TypeDef, - Struct, Member, FIXED_TYPE, ) from c_parser.match import ( - is_type_decl, is_pots, is_funcptr, ) diff --git a/Tools/c-analyzer/cpython/_capi.py b/Tools/c-analyzer/cpython/_capi.py index df8159a8cc169f..4552f71479bd06 100644 --- a/Tools/c-analyzer/cpython/_capi.py +++ b/Tools/c-analyzer/cpython/_capi.py @@ -7,7 +7,7 @@ from c_common.tables import build_table, resolve_columns from c_parser.parser._regexes import _ind -from ._files import iter_header_files, resolve_filename +from ._files import iter_header_files from . import REPO_ROOT @@ -610,8 +610,7 @@ def _render_item_full(item, groupby, verbose): yield item.name yield f' {"filename:":10} {item.relfile}' for extra in ('kind', 'level'): - #if groupby != extra: - yield f' {extra+":":10} {getattr(item, extra)}' + yield f' {extra+":":10} {getattr(item, extra)}' if verbose: print(' ---------------------------------------') for lno, line in enumerate(item.text, item.lno): @@ -636,7 +635,6 @@ def render_summary(items, *, subtotals = summary['totals']['subs'] bygroup = summary['totals']['bygroup'] - lastempty = False for outer, subtotal in subtotals.items(): if bygroup: subtotal = f'({subtotal})' @@ -646,10 +644,6 @@ def render_summary(items, *, if outer in bygroup: for inner, count in bygroup[outer].items(): yield f' {inner + ":":9} {count}' - lastempty = False - else: - lastempty = True - total = f'*{summary["totals"]["all"]}*' label = '*total*:' if bygroup: diff --git a/Tools/c-analyzer/cpython/_parser.py b/Tools/c-analyzer/cpython/_parser.py index acf30e2c4020b3..5924ab7860d8d5 100644 --- a/Tools/c-analyzer/cpython/_parser.py +++ b/Tools/c-analyzer/cpython/_parser.py @@ -47,6 +47,7 @@ def clean_lines(text): ''' # XXX Handle these. +# Tab separated: EXCLUDED = clean_lines(''' # @begin=conf@ @@ -327,7 +328,6 @@ def clean_lines(text): _abs('Python/frozen_modules/*.h'): (20_000, 500), _abs('Python/opcode_targets.h'): (10_000, 500), _abs('Python/stdlib_module_names.h'): (5_000, 500), - _abs('Python/importlib.h'): (200_000, 5000), # These large files are currently ignored (see above). _abs('Modules/_ssl_data.h'): (80_000, 10_000), diff --git a/Tools/c-analyzer/cpython/globals-to-fix.tsv b/Tools/c-analyzer/cpython/globals-to-fix.tsv index 57b8542fb46482..4dfbbe72df56a0 100644 --- a/Tools/c-analyzer/cpython/globals-to-fix.tsv +++ b/Tools/c-analyzer/cpython/globals-to-fix.tsv @@ -135,6 +135,9 @@ Objects/stringlib/unicode_format.h - PyFieldNameIter_Type - Objects/unicodeobject.c - EncodingMapType - #Objects/unicodeobject.c - PyFieldNameIter_Type - #Objects/unicodeobject.c - PyFormatterIter_Type - +Python/legacy_tracing.c - _PyLegacyEventHandler_Type - +Objects/object.c - _PyLegacyEventHandler_Type - + ##----------------------- ## static builtin structseq @@ -297,6 +300,8 @@ Objects/object.c - _Py_NotImplementedStruct - Objects/setobject.c - _dummy_struct - Objects/setobject.c - _PySet_Dummy - Objects/sliceobject.c - _Py_EllipsisObject - +Python/instrumentation.c - DISABLE - +Python/instrumentation.c - _PyInstrumentation_MISSING - ################################## @@ -311,11 +316,6 @@ Objects/sliceobject.c - _Py_EllipsisObject - ##----------------------- ## static types -Modules/_collectionsmodule.c - defdict_type - -Modules/_collectionsmodule.c - deque_type - -Modules/_collectionsmodule.c - dequeiter_type - -Modules/_collectionsmodule.c - dequereviter_type - -Modules/_collectionsmodule.c - tuplegetter_type - Modules/_io/bufferedio.c - PyBufferedIOBase_Type - Modules/_io/bytesio.c - _PyBytesIOBuffer_Type - Modules/_io/iobase.c - PyIOBase_Type - @@ -341,7 +341,6 @@ Modules/_testcapi/vectorcall.c - MethodDescriptor2_Type - ##----------------------- ## static types -Modules/_ctypes/_ctypes.c - DictRemover_Type - Modules/_ctypes/_ctypes.c - PyCArrayType_Type - Modules/_ctypes/_ctypes.c - PyCArray_Type - Modules/_ctypes/_ctypes.c - PyCData_Type - @@ -352,18 +351,14 @@ Modules/_ctypes/_ctypes.c - PyCPointer_Type - Modules/_ctypes/_ctypes.c - PyCSimpleType_Type - Modules/_ctypes/_ctypes.c - PyCStructType_Type - Modules/_ctypes/_ctypes.c - Simple_Type - -Modules/_ctypes/_ctypes.c - StructParam_Type - Modules/_ctypes/_ctypes.c - Struct_Type - Modules/_ctypes/_ctypes.c - UnionType_Type - Modules/_ctypes/_ctypes.c - Union_Type - -Modules/_ctypes/callbacks.c - PyCThunk_Type - Modules/_ctypes/callproc.c - PyCArg_Type - -Modules/_ctypes/cfield.c - PyCField_Type - Modules/_ctypes/ctypes.h - PyCArg_Type - Modules/_ctypes/ctypes.h - PyCArrayType_Type - Modules/_ctypes/ctypes.h - PyCArray_Type - Modules/_ctypes/ctypes.h - PyCData_Type - -Modules/_ctypes/ctypes.h - PyCField_Type - Modules/_ctypes/ctypes.h - PyCFuncPtrType_Type - Modules/_ctypes/ctypes.h - PyCFuncPtr_Type - Modules/_ctypes/ctypes.h - PyCPointerType_Type - @@ -371,7 +366,6 @@ Modules/_ctypes/ctypes.h - PyCPointer_Type - Modules/_ctypes/ctypes.h - PyCSimpleType_Type - Modules/_ctypes/ctypes.h - PyCStgDict_Type - Modules/_ctypes/ctypes.h - PyCStructType_Type - -Modules/_ctypes/ctypes.h - PyCThunk_Type - Modules/_ctypes/ctypes.h - PyExc_ArgError - Modules/_ctypes/ctypes.h - _ctypes_conversion_encoding - Modules/_ctypes/ctypes.h - _ctypes_conversion_errors - @@ -390,14 +384,8 @@ Modules/_decimal/_decimal.c - PyDecContextManager_Type - Modules/_decimal/_decimal.c - PyDecContext_Type - Modules/_decimal/_decimal.c - PyDecSignalDictMixin_Type - Modules/_decimal/_decimal.c - PyDec_Type - -Modules/_pickle.c - Pdata_Type - -Modules/_pickle.c - PicklerMemoProxyType - -Modules/_pickle.c - Pickler_Type - -Modules/_pickle.c - UnpicklerMemoProxyType - -Modules/_pickle.c - Unpickler_Type - Modules/ossaudiodev.c - OSSAudioType - Modules/ossaudiodev.c - OSSMixerType - -Modules/socketmodule.c - sock_type - Modules/xxmodule.c - Null_Type - Modules/xxmodule.c - Str_Type - Modules/xxmodule.c - Xxo_Type - @@ -421,8 +409,6 @@ Modules/_cursesmodule.c - PyCursesError - Modules/_decimal/_decimal.c - DecimalException - Modules/_tkinter.c - Tkinter_TclError - Modules/ossaudiodev.c - OSSAudioError - -Modules/socketmodule.c - socket_herror - -Modules/socketmodule.c - socket_gaierror - Modules/xxlimited_35.c - ErrorObject - Modules/xxmodule.c - ErrorObject - @@ -457,12 +443,13 @@ Modules/_decimal/_decimal.c - extended_context_template - Modules/_decimal/_decimal.c - round_map - Modules/_decimal/_decimal.c - Rational - Modules/_decimal/_decimal.c - SignalTuple - -Modules/arraymodule.c array_array___reduce_ex___impl array_reconstructor - ## state Modules/_asynciomodule.c - fi_freelist - Modules/_asynciomodule.c - fi_freelist_len - Modules/_ctypes/_ctypes.c - _ctypes_ptrtype_cache - +Modules/_ctypes/_ctypes.c - global_state - +Modules/_ctypes/ctypes.h - global_state - Modules/_tkinter.c - tcl_lock - Modules/_tkinter.c - excInCmd - Modules/_tkinter.c - valInCmd - @@ -494,34 +481,9 @@ Modules/_decimal/_decimal.c - _py_float_abs - Modules/_decimal/_decimal.c - _py_long_bit_length - Modules/_decimal/_decimal.c - _py_float_as_integer_ratio - Modules/_elementtree.c - expat_capi - -Modules/cjkcodecs/_codecs_hk.c - big5_encmap - -Modules/cjkcodecs/_codecs_hk.c - big5_decmap - -Modules/cjkcodecs/_codecs_hk.c big5hkscs_codec_init initialized - -Modules/cjkcodecs/_codecs_iso2022.c - cp949_encmap - -Modules/cjkcodecs/_codecs_iso2022.c - ksx1001_decmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisxcommon_encmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0208_decmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0212_decmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_bmp_encmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_1_bmp_decmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_2_bmp_decmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_emp_encmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_1_emp_decmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_2_emp_decmap - -Modules/cjkcodecs/_codecs_iso2022.c - gbcommon_encmap - -Modules/cjkcodecs/_codecs_iso2022.c - gb2312_decmap - -Modules/cjkcodecs/_codecs_iso2022.c ksx1001_init initialized - -Modules/cjkcodecs/_codecs_iso2022.c jisx0208_init initialized - -Modules/cjkcodecs/_codecs_iso2022.c jisx0212_init initialized - -Modules/cjkcodecs/_codecs_iso2022.c jisx0213_init initialized - -Modules/cjkcodecs/_codecs_iso2022.c gb2312_init initialized - -Modules/cjkcodecs/cjkcodecs.h - codec_list - -Modules/cjkcodecs/cjkcodecs.h - mapping_list - Modules/readline.c - libedit_append_replace_history_offset - Modules/readline.c - using_libedit_emulation - Modules/readline.c - libedit_history_start - -Modules/socketmodule.c - accept4_works - -Modules/socketmodule.c - sock_cloexec_works - ##----------------------- ## state @@ -539,7 +501,6 @@ Modules/_tkinter.c - command_mutex - Modules/_tkinter.c - HeadFHCD - Modules/_tkinter.c - stdin_ready - Modules/_tkinter.c - event_tstate - -Modules/_xxinterpchannelsmodule.c - _globals - Modules/readline.c - completer_word_break_characters - Modules/readline.c - _history_length - Modules/readline.c - should_auto_add_history - @@ -548,4 +509,3 @@ Modules/readline.c - sigwinch_ohandler - Modules/readline.c - completed_input_string - Modules/rotatingtree.c - random_stream - Modules/rotatingtree.c - random_value - -Modules/socketmodule.c - defaulttimeout - diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv index 048112dd992555..7a5d7d45f5184b 100644 --- a/Tools/c-analyzer/cpython/ignored.tsv +++ b/Tools/c-analyzer/cpython/ignored.tsv @@ -141,7 +141,6 @@ Modules/syslogmodule.c - S_log_open - ##----------------------- ## kept for stable ABI compatibility -# XXX should be per-interpreter, without impacting stable ABI extensions Objects/object.c - _Py_RefTotal - ##----------------------- @@ -157,6 +156,9 @@ Modules/faulthandler.c faulthandler_dump_traceback reentrant - Python/pylifecycle.c _Py_FatalErrorFormat reentrant - Python/pylifecycle.c fatal_error reentrant - +# explicitly protected, internal-only +Modules/_xxinterpchannelsmodule.c - _globals - + ################################## ## not significant @@ -301,11 +303,13 @@ Objects/genobject.c - NON_INIT_CORO_MSG - Objects/longobject.c - _PyLong_DigitValue - Objects/object.c - _Py_SwappedOp - Objects/object.c - _Py_abstract_hack - +Objects/object.c - last_final_reftotal - Objects/object.c - static_types - Objects/obmalloc.c - _PyMem - Objects/obmalloc.c - _PyMem_Debug - Objects/obmalloc.c - _PyMem_Raw - Objects/obmalloc.c - _PyObject - +Objects/obmalloc.c - last_final_leaks - Objects/obmalloc.c - usedpools - Objects/typeobject.c - name_op - Objects/typeobject.c - slotdefs - @@ -350,7 +354,6 @@ Python/pylifecycle.c - INTERPRETER_TRAMPOLINE_CODEDEF - Python/pystate.c - initial - Python/specialize.c - adaptive_opcodes - Python/specialize.c - cache_requirements - -Python/specialize.c - compare_masks - Python/stdlib_module_names.h - _Py_stdlib_module_names - Python/sysmodule.c - _PySys_ImplCacheTag - Python/sysmodule.c - _PySys_ImplName - diff --git a/Tools/cases_generator/generate_cases.py b/Tools/cases_generator/generate_cases.py index f1b655b8b0545b..62ddeac0265ad8 100644 --- a/Tools/cases_generator/generate_cases.py +++ b/Tools/cases_generator/generate_cases.py @@ -122,8 +122,9 @@ def __init__( self.prefix = " " * indent self.emit_line_directives = emit_line_directives self.lineno = 1 + filename = os.path.relpath(self.stream.name, ROOT) # Make filename more user-friendly and less platform-specific - filename = self.stream.name.replace("\\", "/") + filename = filename.replace("\\", "/") if filename.startswith("./"): filename = filename[2:] if filename.endswith(".new"): @@ -567,6 +568,7 @@ def parse_file(self, filename: str, instrs_idx: dict[str, int]) -> None: with open(filename) as file: src = file.read() + filename = os.path.relpath(filename, ROOT) # Make filename more user-friendly and less platform-specific filename = filename.replace("\\", "/") if filename.startswith("./"): @@ -928,7 +930,7 @@ def write_function( direction: str, data: list[tuple[AnyInstruction, str]] ) -> None: self.out.emit("") - self.out.emit("#ifndef NEED_OPCODE_TABLES") + self.out.emit("#ifndef NEED_OPCODE_METADATA") self.out.emit(f"extern int _PyOpcode_num_{direction}(int opcode, int oparg, bool jump);") self.out.emit("#else") self.out.emit("int") @@ -997,7 +999,7 @@ def write_metadata(self) -> None: self.out.emit("") # Write metadata array declaration - self.out.emit("#ifndef NEED_OPCODE_TABLES") + self.out.emit("#ifndef NEED_OPCODE_METADATA") self.out.emit("extern const struct opcode_metadata _PyOpcode_opcode_metadata[256];") self.out.emit("#else") self.out.emit("const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {") diff --git a/Tools/cases_generator/interpreter_definition.md b/Tools/cases_generator/interpreter_definition.md index c7bd38d32ff411..6f902f60c68ee7 100644 --- a/Tools/cases_generator/interpreter_definition.md +++ b/Tools/cases_generator/interpreter_definition.md @@ -137,9 +137,9 @@ The following definitions may occur: `foo_1` is legal. `$` is not legal, nor is `struct` or `class`. The optional `type` in an `object` is the C type. It defaults to `PyObject *`. -The objects before the "--" are the objects on top of the the stack at the start -of the instruction. Those after the "--" are the objects on top of the the stack -at the end of the instruction. +The objects before the "--" are the objects on top of the stack at the start of +the instruction. Those after the "--" are the objects on top of the stack at the +end of the instruction. An `inst` without `stack_effect` is a transitional form to allow the original C code definitions to be copied. It lacks information to generate anything other than the diff --git a/Tools/gdb/libpython.py b/Tools/gdb/libpython.py index 56d6970b29249c..e38bd59e20a305 100755 --- a/Tools/gdb/libpython.py +++ b/Tools/gdb/libpython.py @@ -882,10 +882,16 @@ class PyLongObjectPtr(PyObjectPtr): def proxyval(self, visited): ''' Python's Include/longobjrep.h has this declaration: - struct _longobject { - PyObject_VAR_HEAD - digit ob_digit[1]; - }; + + typedef struct _PyLongValue { + uintptr_t lv_tag; /* Number of digits, sign and flags */ + digit ob_digit[1]; + } _PyLongValue; + + struct _longobject { + PyObject_HEAD + _PyLongValue long_value; + }; with this description: The absolute value of a number is equal to @@ -897,11 +903,13 @@ def proxyval(self, visited): #define PyLong_SHIFT 30 #define PyLong_SHIFT 15 ''' - ob_size = int(self.field('ob_size')) - if ob_size == 0: + long_value = self.field('long_value') + lv_tag = int(long_value['lv_tag']) + size = lv_tag >> 3 + if size == 0: return 0 - ob_digit = self.field('long_value')['ob_digit'] + ob_digit = long_value['ob_digit'] if gdb.lookup_type('digit').sizeof == 2: SHIFT = 15 @@ -909,9 +917,9 @@ def proxyval(self, visited): SHIFT = 30 digits = [int(ob_digit[i]) * 2**(SHIFT*i) - for i in safe_range(abs(ob_size))] + for i in safe_range(size)] result = sum(digits) - if ob_size < 0: + if (lv_tag & 3) == 2: result = -result return result diff --git a/Tools/importbench/importbench.py b/Tools/importbench/importbench.py index 6c4a537ad86e6c..619263b553c081 100644 --- a/Tools/importbench/importbench.py +++ b/Tools/importbench/importbench.py @@ -6,7 +6,7 @@ """ from test.test_importlib import util import decimal -import imp +from importlib.util import cache_from_source import importlib import importlib.machinery import json @@ -65,7 +65,7 @@ def source_wo_bytecode(seconds, repeat): name = '__importlib_test_benchmark__' # Clears out sys.modules and puts an entry at the front of sys.path. with util.create_modules(name) as mapping: - assert not os.path.exists(imp.cache_from_source(mapping[name])) + assert not os.path.exists(cache_from_source(mapping[name])) sys.meta_path.append(importlib.machinery.PathFinder) loader = (importlib.machinery.SourceFileLoader, importlib.machinery.SOURCE_SUFFIXES) @@ -80,7 +80,7 @@ def _wo_bytecode(module): name = module.__name__ def benchmark_wo_bytecode(seconds, repeat): """Source w/o bytecode: {}""" - bytecode_path = imp.cache_from_source(module.__file__) + bytecode_path = cache_from_source(module.__file__) if os.path.exists(bytecode_path): os.unlink(bytecode_path) sys.dont_write_bytecode = True @@ -108,9 +108,9 @@ def source_writing_bytecode(seconds, repeat): sys.path_hooks.append(importlib.machinery.FileFinder.path_hook(loader)) def cleanup(): sys.modules.pop(name) - os.unlink(imp.cache_from_source(mapping[name])) + os.unlink(cache_from_source(mapping[name])) for result in bench(name, cleanup, repeat=repeat, seconds=seconds): - assert not os.path.exists(imp.cache_from_source(mapping[name])) + assert not os.path.exists(cache_from_source(mapping[name])) yield result @@ -121,7 +121,7 @@ def writing_bytecode_benchmark(seconds, repeat): assert not sys.dont_write_bytecode def cleanup(): sys.modules.pop(name) - os.unlink(imp.cache_from_source(module.__file__)) + os.unlink(cache_from_source(module.__file__)) yield from bench(name, cleanup, repeat=repeat, seconds=seconds) writing_bytecode_benchmark.__doc__ = ( @@ -141,7 +141,7 @@ def source_using_bytecode(seconds, repeat): importlib.machinery.SOURCE_SUFFIXES) sys.path_hooks.append(importlib.machinery.FileFinder.path_hook(loader)) py_compile.compile(mapping[name]) - assert os.path.exists(imp.cache_from_source(mapping[name])) + assert os.path.exists(cache_from_source(mapping[name])) yield from bench(name, lambda: sys.modules.pop(name), repeat=repeat, seconds=seconds) diff --git a/Tools/iobench/iobench.py b/Tools/iobench/iobench.py index b0a7feb92e4f92..4017149ec91630 100644 --- a/Tools/iobench/iobench.py +++ b/Tools/iobench/iobench.py @@ -1,6 +1,3 @@ -# -*- coding: utf-8 -*- -# This file should be kept compatible with both Python 2.6 and Python >= 3.0. - import itertools import os import platform @@ -14,39 +11,37 @@ TEXT_ENCODING = 'utf8' NEWLINES = 'lf' -# Compatibility -try: - xrange -except NameError: - xrange = range def text_open(fn, mode, encoding=None): try: return open(fn, mode, encoding=encoding or TEXT_ENCODING) except TypeError: - if 'r' in mode: - mode += 'U' # 'U' mode is needed only in Python 2.x return open(fn, mode) + def get_file_sizes(): for s in ['20 KiB', '400 KiB', '10 MiB']: size, unit = s.split() size = int(size) * {'KiB': 1024, 'MiB': 1024 ** 2}[unit] yield s.replace(' ', ''), size + def get_binary_files(): return ((name + ".bin", size) for name, size in get_file_sizes()) + def get_text_files(): - return (("%s-%s-%s.txt" % (name, TEXT_ENCODING, NEWLINES), size) + return ((f"{name}-{TEXT_ENCODING}-{NEWLINES}.txt", size) for name, size in get_file_sizes()) + def with_open_mode(mode): def decorate(f): f.file_open_mode = mode return f return decorate + def with_sizes(*sizes): def decorate(f): f.file_sizes = sizes @@ -64,6 +59,7 @@ def read_bytewise(f): while f.read(1): pass + @with_open_mode("r") @with_sizes("medium") def read_small_chunks(f): @@ -72,6 +68,7 @@ def read_small_chunks(f): while f.read(20): pass + @with_open_mode("r") @with_sizes("medium") def read_big_chunks(f): @@ -80,6 +77,7 @@ def read_big_chunks(f): while f.read(4096): pass + @with_open_mode("r") @with_sizes("small", "medium", "large") def read_whole_file(f): @@ -88,6 +86,7 @@ def read_whole_file(f): while f.read(): pass + @with_open_mode("rt") @with_sizes("medium") def read_lines(f): @@ -96,6 +95,7 @@ def read_lines(f): for line in f: pass + @with_open_mode("r") @with_sizes("medium") def seek_forward_bytewise(f): @@ -103,9 +103,10 @@ def seek_forward_bytewise(f): f.seek(0, 2) size = f.tell() f.seek(0, 0) - for i in xrange(0, size - 1): + for i in range(0, size - 1): f.seek(i, 0) + @with_open_mode("r") @with_sizes("medium") def seek_forward_blockwise(f): @@ -113,9 +114,10 @@ def seek_forward_blockwise(f): f.seek(0, 2) size = f.tell() f.seek(0, 0) - for i in xrange(0, size - 1, 1000): + for i in range(0, size - 1, 1000): f.seek(i, 0) + @with_open_mode("rb") @with_sizes("medium") def read_seek_bytewise(f): @@ -124,6 +126,7 @@ def read_seek_bytewise(f): while f.read(1): f.seek(1, 1) + @with_open_mode("rb") @with_sizes("medium") def read_seek_blockwise(f): @@ -137,28 +140,31 @@ def read_seek_blockwise(f): @with_sizes("small") def write_bytewise(f, source): """ write one unit at a time """ - for i in xrange(0, len(source)): + for i in range(0, len(source)): f.write(source[i:i+1]) + @with_open_mode("w") @with_sizes("medium") def write_small_chunks(f, source): """ write 20 units at a time """ - for i in xrange(0, len(source), 20): + for i in range(0, len(source), 20): f.write(source[i:i+20]) + @with_open_mode("w") @with_sizes("medium") def write_medium_chunks(f, source): """ write 4096 units at a time """ - for i in xrange(0, len(source), 4096): + for i in range(0, len(source), 4096): f.write(source[i:i+4096]) + @with_open_mode("w") @with_sizes("large") def write_large_chunks(f, source): """ write 1e6 units at a time """ - for i in xrange(0, len(source), 1000000): + for i in range(0, len(source), 1000000): f.write(source[i:i+1000000]) @@ -167,59 +173,65 @@ def write_large_chunks(f, source): def modify_bytewise(f, source): """ modify one unit at a time """ f.seek(0) - for i in xrange(0, len(source)): + for i in range(0, len(source)): f.write(source[i:i+1]) + @with_open_mode("w+") @with_sizes("medium") def modify_small_chunks(f, source): """ modify 20 units at a time """ f.seek(0) - for i in xrange(0, len(source), 20): + for i in range(0, len(source), 20): f.write(source[i:i+20]) + @with_open_mode("w+") @with_sizes("medium") def modify_medium_chunks(f, source): """ modify 4096 units at a time """ f.seek(0) - for i in xrange(0, len(source), 4096): + for i in range(0, len(source), 4096): f.write(source[i:i+4096]) + @with_open_mode("wb+") @with_sizes("medium") def modify_seek_forward_bytewise(f, source): """ alternate write & seek one unit """ f.seek(0) - for i in xrange(0, len(source), 2): + for i in range(0, len(source), 2): f.write(source[i:i+1]) f.seek(i+2) + @with_open_mode("wb+") @with_sizes("medium") def modify_seek_forward_blockwise(f, source): """ alternate write & seek 1000 units """ f.seek(0) - for i in xrange(0, len(source), 2000): + for i in range(0, len(source), 2000): f.write(source[i:i+1000]) f.seek(i+2000) + # XXX the 2 following tests don't work with py3k's text IO @with_open_mode("wb+") @with_sizes("medium") def read_modify_bytewise(f, source): """ alternate read & write one unit """ f.seek(0) - for i in xrange(0, len(source), 2): + for i in range(0, len(source), 2): f.read(1) f.write(source[i+1:i+2]) + @with_open_mode("wb+") @with_sizes("medium") def read_modify_blockwise(f, source): """ alternate read & write 1000 units """ f.seek(0) - for i in xrange(0, len(source), 2000): + for i in range(0, len(source), 2000): f.read(1000) f.write(source[i+1000:i+2000]) @@ -242,6 +254,7 @@ def read_modify_blockwise(f, source): read_modify_bytewise, read_modify_blockwise, ] + def run_during(duration, func): _t = time.time n = 0 @@ -257,6 +270,7 @@ def run_during(duration, func): real = (end[4] if start[4] else time.time()) - real_start return n, real, sum(end[0:2]) - sum(start[0:2]) + def warm_cache(filename): with open(filename, "rb") as f: f.read() @@ -266,9 +280,7 @@ def run_all_tests(options): def print_label(filename, func): name = re.split(r'[-.]', filename)[0] out.write( - ("[%s] %s... " - % (name.center(7), func.__doc__.strip()) - ).ljust(52)) + f"[{name.center(7)}] {func.__doc__.strip()}... ".ljust(52)) out.flush() def print_results(size, n, real, cpu): @@ -276,8 +288,9 @@ def print_results(size, n, real, cpu): bw = ("%4d MiB/s" if bw > 100 else "%.3g MiB/s") % bw out.write(bw.rjust(12) + "\n") if cpu < 0.90 * real: - out.write(" warning: test above used only %d%% CPU, " - "result may be flawed!\n" % (100.0 * cpu / real)) + out.write(" warning: test above used only " + f"{cpu / real:%} CPU, " + "result may be flawed!\n") def run_one_test(name, size, open_func, test_func, *args): mode = test_func.file_open_mode @@ -308,22 +321,15 @@ def run_test_family(tests, mode_filter, files, open_func, *make_args): "large": 2, } - print("Python %s" % sys.version) - if sys.version_info < (3, 3): - if sys.maxunicode > 0xffff: - text = "UCS-4 (wide build)" - else: - text = "UTF-16 (narrow build)" - else: - text = "PEP 393" - print("Unicode: %s" % text) + print(f"Python {sys.version}") + print("Unicode: PEP 393") print(platform.platform()) binary_files = list(get_binary_files()) text_files = list(get_text_files()) if "b" in options: print("Binary unit = one byte") if "t" in options: - print("Text unit = one character (%s-decoded)" % TEXT_ENCODING) + print(f"Text unit = one character ({TEXT_ENCODING}-decoded)") # Binary reads if "b" in options and "r" in options: @@ -338,6 +344,7 @@ def run_test_family(tests, mode_filter, files, open_func, *make_args): # Binary writes if "b" in options and "w" in options: print("\n** Binary append **\n") + def make_test_source(name, size): with open(name, "rb") as f: return f.read() @@ -347,6 +354,7 @@ def make_test_source(name, size): # Text writes if "t" in options and "w" in options: print("\n** Text append **\n") + def make_test_source(name, size): with text_open(name, "r") as f: return f.read() @@ -356,6 +364,7 @@ def make_test_source(name, size): # Binary overwrites if "b" in options and "w" in options: print("\n** Binary overwrite **\n") + def make_test_source(name, size): with open(name, "rb") as f: return f.read() @@ -365,6 +374,7 @@ def make_test_source(name, size): # Text overwrites if "t" in options and "w" in options: print("\n** Text overwrite **\n") + def make_test_source(name, size): with text_open(name, "r") as f: return f.read() @@ -388,7 +398,7 @@ def prepare_files(): break else: raise RuntimeError( - "Couldn't find chunk marker in %s !" % __file__) + f"Couldn't find chunk marker in {__file__} !") if NEWLINES == "all": it = itertools.cycle(["\n", "\r", "\r\n"]) else: @@ -414,6 +424,7 @@ def prepare_files(): f.write(head) f.write(tail) + def main(): global TEXT_ENCODING, NEWLINES @@ -433,7 +444,7 @@ def main(): help="run write & modify tests") parser.add_option("-E", "--encoding", action="store", dest="encoding", default=None, - help="encoding for text tests (default: %s)" % TEXT_ENCODING) + help=f"encoding for text tests (default: {TEXT_ENCODING})") parser.add_option("-N", "--newlines", action="store", dest="newlines", default='lf', help="line endings for text tests " @@ -446,7 +457,7 @@ def main(): parser.error("unexpected arguments") NEWLINES = options.newlines.lower() if NEWLINES not in ('lf', 'cr', 'crlf', 'all'): - parser.error("invalid 'newlines' option: %r" % NEWLINES) + parser.error(f"invalid 'newlines' option: {NEWLINES!r}") test_options = "" if options.read: @@ -471,6 +482,7 @@ def main(): prepare_files() run_all_tests(test_options) + if __name__ == "__main__": main() diff --git a/Tools/patchcheck/patchcheck.py b/Tools/patchcheck/patchcheck.py index 6dcf612066199c..fa3a43af6e6048 100755 --- a/Tools/patchcheck/patchcheck.py +++ b/Tools/patchcheck/patchcheck.py @@ -130,9 +130,10 @@ def changed_files(base_branch=None): with subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, cwd=SRCDIR) as st: - if st.wait() != 0: + git_file_status, _ = st.communicate() + if st.returncode != 0: sys.exit(f'error running {cmd}') - for line in st.stdout: + for line in git_file_status.splitlines(): line = line.decode().rstrip() status_text, filename = line.split(maxsplit=1) status = set(status_text) @@ -169,12 +170,24 @@ def report_modified_files(file_paths): return "\n".join(lines) +#: Python files that have tabs by design: +_PYTHON_FILES_WITH_TABS = frozenset({ + 'Tools/c-analyzer/cpython/_parser.py', +}) + + @status("Fixing Python file whitespace", info=report_modified_files) def normalize_whitespace(file_paths): """Make sure that the whitespace for .py files have been normalized.""" reindent.makebackup = False # No need to create backups. - fixed = [path for path in file_paths if path.endswith('.py') and - reindent.check(os.path.join(SRCDIR, path))] + fixed = [ + path for path in file_paths + if ( + path.endswith('.py') + and path not in _PYTHON_FILES_WITH_TABS + and reindent.check(os.path.join(SRCDIR, path)) + ) + ] return fixed diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index e72ce7afdc4796..f57b6275f671d3 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -68,6 +68,7 @@ class NodeTypes(Enum): KEYWORD = 4 SOFT_KEYWORD = 5 CUT_OPERATOR = 6 + F_STRING_CHUNK = 7 BASE_NODETYPES = { diff --git a/Tools/scripts/README b/Tools/scripts/README index b9522681595901..9dbb89a8dae633 100644 --- a/Tools/scripts/README +++ b/Tools/scripts/README @@ -2,9 +2,15 @@ This directory contains a collection of executable Python scripts that are useful while building, extending or managing Python. 2to3 Main script for running the 2to3 conversion tool +checkpip.py Checks the version of the projects bundled in ensurepip + are the latest available combinerefs.py A helper for analyzing PYTHONDUMPREFS output +divmod_threshold.py Determine threshold for switching from longobject.c + divmod to _pylong.int_divmod() idle3 Main program to start IDLE -parse_html5_entities.py Utility for parsing HTML5 entity definitions pydoc3 Python documentation browser run_tests.py Run the test suite with more sensible default options -stable_abi.py Stable ABI checks and file generators. +summarize_stats.py Summarize specialization stats for all files in the + default stats folders +var_access_benchmark.py Show relative speeds of local, nonlocal, global, + and built-in access diff --git a/Tools/scripts/summarize_stats.py b/Tools/scripts/summarize_stats.py index 7789c4d3a17d38..ce25374f3a9a52 100644 --- a/Tools/scripts/summarize_stats.py +++ b/Tools/scripts/summarize_stats.py @@ -228,8 +228,6 @@ def kind_to_text(kind, defines, opname): return pretty(defines[kind][0]) if opname.endswith("ATTR"): opname = "ATTR" - if opname in ("COMPARE_OP", "COMPARE_AND_BRANCH"): - opname = "COMPARE" if opname.endswith("SUBSCR"): opname = "SUBSCR" for name in defines[kind]: diff --git a/Tools/wasm/Setup.local.example b/Tools/wasm/Setup.local.example index ad58c31a2efe31..cfb9f7fc8755f8 100644 --- a/Tools/wasm/Setup.local.example +++ b/Tools/wasm/Setup.local.example @@ -5,6 +5,7 @@ audioop _bz2 _crypt _decimal +nis _pickle pyexpat _elementtree _sha3 _blake2 diff --git a/Tools/wasm/wasm_assets.py b/Tools/wasm/wasm_assets.py index 9dc8bda4017e2c..1fc97fd5e70a10 100755 --- a/Tools/wasm/wasm_assets.py +++ b/Tools/wasm/wasm_assets.py @@ -6,7 +6,8 @@ - a stripped down, pyc-only stdlib zip file, e.g. {PREFIX}/lib/python311.zip - os.py as marker module {PREFIX}/lib/python3.11/os.py -- empty lib-dynload directory, to make sure it is copied into the bundle {PREFIX}/lib/python3.11/lib-dynload/.empty +- empty lib-dynload directory, to make sure it is copied into the bundle: + {PREFIX}/lib/python3.11/lib-dynload/.empty """ import argparse diff --git a/Tools/wasm/wasm_build.py b/Tools/wasm/wasm_build.py index 493682c5b138a3..241a5d4eed5ae8 100755 --- a/Tools/wasm/wasm_build.py +++ b/Tools/wasm/wasm_build.py @@ -73,7 +73,7 @@ run "make clean -C '{SRCDIR}'". """ -INSTALL_NATIVE = f""" +INSTALL_NATIVE = """ Builds require a C compiler (gcc, clang), make, pkg-config, and development headers for dependencies like zlib. @@ -598,7 +598,7 @@ def run_browser(self, bind="127.0.0.1", port=8000): end = time.monotonic() + 3.0 while time.monotonic() < end and srv.returncode is None: try: - with socket.create_connection((bind, port), timeout=0.1) as s: + with socket.create_connection((bind, port), timeout=0.1) as _: pass except OSError: time.sleep(0.01) diff --git a/configure b/configure index 9e99352f589f21..8133d47f61355b 100755 --- a/configure +++ b/configure @@ -892,6 +892,8 @@ PGO_PROF_USE_FLAG PGO_PROF_GEN_FLAG MERGE_FDATA LLVM_BOLT +ac_ct_READELF +READELF PREBOLT_RULE LLVM_AR_FOUND LLVM_AR @@ -3104,7 +3106,6 @@ if test "$srcdir" != . -a "$srcdir" != "$(pwd)"; then # resources get picked up before their $srcdir counterparts. # Objects/ -> typeslots.inc # Include/ -> Python.h - # Python/ -> importlib.h # (A side effect of this is that these resources will automatically be # regenerated when building out-of-tree, regardless of whether or not # the $srcdir counterpart is up-to-date. This is an acceptable trade @@ -7917,6 +7918,112 @@ if test "$Py_BOLT" = 'true' ; then DEF_MAKE_ALL_RULE="bolt-opt" DEF_MAKE_RULE="build_all" + + if test -n "$ac_tool_prefix"; then + for ac_prog in readelf + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_READELF+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$READELF"; then + ac_cv_prog_READELF="$READELF" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_READELF="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +READELF=$ac_cv_prog_READELF +if test -n "$READELF"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $READELF" >&5 +$as_echo "$READELF" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$READELF" && break + done +fi +if test -z "$READELF"; then + ac_ct_READELF=$READELF + for ac_prog in readelf +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_READELF+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_READELF"; then + ac_cv_prog_ac_ct_READELF="$ac_ct_READELF" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_READELF="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_READELF=$ac_cv_prog_ac_ct_READELF +if test -n "$ac_ct_READELF"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_READELF" >&5 +$as_echo "$ac_ct_READELF" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$ac_ct_READELF" && break +done + + if test "x$ac_ct_READELF" = x; then + READELF=""notfound"" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + READELF=$ac_ct_READELF + fi +fi + + if test "$READELF" == "notfound" + then + as_fn_error $? "readelf is required for a --enable-bolt build but could not be found." "$LINENO" 5 + fi + # -fno-reorder-blocks-and-partition is required for bolt to work. # Possibly GCC only. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether C compiler accepts -fno-reorder-blocks-and-partition" >&5 diff --git a/configure.ac b/configure.ac index 31b7a2157a2bcc..3f20d8980d8abc 100644 --- a/configure.ac +++ b/configure.ac @@ -97,7 +97,6 @@ if test "$srcdir" != . -a "$srcdir" != "$(pwd)"; then # resources get picked up before their $srcdir counterparts. # Objects/ -> typeslots.inc # Include/ -> Python.h - # Python/ -> importlib.h # (A side effect of this is that these resources will automatically be # regenerated when building out-of-tree, regardless of whether or not # the $srcdir counterpart is up-to-date. This is an acceptable trade @@ -1939,6 +1938,13 @@ if test "$Py_BOLT" = 'true' ; then DEF_MAKE_ALL_RULE="bolt-opt" DEF_MAKE_RULE="build_all" + AC_SUBST(READELF) + AC_CHECK_TOOLS(READELF, [readelf], "notfound") + if test "$READELF" == "notfound" + then + AC_MSG_ERROR([readelf is required for a --enable-bolt build but could not be found.]) + fi + # -fno-reorder-blocks-and-partition is required for bolt to work. # Possibly GCC only. AX_CHECK_COMPILE_FLAG([-fno-reorder-blocks-and-partition],[ diff --git a/netlify.toml b/netlify.toml deleted file mode 100644 index f5790fc5fec74f..00000000000000 --- a/netlify.toml +++ /dev/null @@ -1,11 +0,0 @@ -[build] - base = "Doc/" - command = "make html" - publish = "build/html" - # Do not trigger netlify builds if docs were not changed. - # Changed files should be in sync with `.github/workflows/doc.yml` - ignore = "git diff --quiet $CACHED_COMMIT_REF $COMMIT_REF . ../netlify.toml" - -[build.environment] - PYTHON_VERSION = "3.8" - IS_DEPLOYMENT_PREVIEW = "true"