diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 9149b38d87601c..3422ef835279bc 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -11,7 +11,7 @@ configure* @erlend-aasland @corona10 # asyncio -**/*asyncio* @1st1 @asvetlov @gvanrossum @kumaraditya303 +**/*asyncio* @1st1 @asvetlov @gvanrossum @kumaraditya303 @willingc # Core **/*context* @1st1 @@ -25,6 +25,8 @@ Objects/frameobject.c @markshannon Objects/call.c @markshannon Python/ceval.c @markshannon Python/compile.c @markshannon @iritkatriel +Python/assemble.c @markshannon @iritkatriel +Python/flowgraph.c @markshannon @iritkatriel Python/ast_opt.c @isidentical Lib/test/test_patma.py @brandtbucher Lib/test/test_peepholer.py @brandtbucher diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7eba212cbb9a6a..df0f107a541614 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -308,6 +308,10 @@ jobs: run: echo "::add-matcher::.github/problem-matchers/gcc.json" - name: Install Dependencies run: sudo ./.github/workflows/posix-deps-apt.sh + - name: Set up GCC-10 for ASAN + uses: egor-tensin/setup-gcc@v1 + with: + version: 10 - name: Configure OpenSSL env vars run: | echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> $GITHUB_ENV diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 3101b30231c355..3f7550cc72943b 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -56,11 +56,13 @@ jobs: # Add pull request annotations for Sphinx nitpicks (missing references) - name: 'Get list of changed files' + if: github.event_name == 'pull_request' id: changed_files uses: Ana06/get-changed-files@v2.2.0 with: filter: "Doc/**" - name: 'Build changed files in nit-picky mode' + if: github.event_name == 'pull_request' continue-on-error: true run: | # Mark files the pull request modified @@ -77,6 +79,26 @@ jobs: # Build docs with the '-n' (nit-picky) option, convert warnings to errors (-W) make -C Doc/ PYTHON=../python SPHINXOPTS="-q -n -W --keep-going" html 2>&1 + # This build doesn't use problem matchers or check annotations + # It also does not run 'make check', as sphinx-lint is not installed into the + # environment. + build_doc_oldest_supported_sphinx: + name: 'Docs (Oldest Sphinx)' + runs-on: ubuntu-latest + timeout-minutes: 60 + steps: + - uses: actions/checkout@v3 + - name: 'Set up Python' + uses: actions/setup-python@v4 + with: + python-version: '3.11' # known to work with Sphinx 3.2 + cache: 'pip' + cache-dependency-path: 'Doc/requirements-oldest-sphinx.txt' + - name: 'Install build dependencies' + run: make -C Doc/ venv REQUIREMENTS="requirements-oldest-sphinx.txt" + - name: 'Build HTML documentation' + run: make -C Doc/ SPHINXOPTS="-q" SPHINXERRORHANDLING="-W --keep-going" html + # Run "doctest" on HEAD as new syntax doesn't exist in the latest stable release doctest: name: 'Doctest' diff --git a/.github/workflows/documentation-links.yml b/.github/workflows/documentation-links.yml new file mode 100644 index 00000000000000..43a7afec73884e --- /dev/null +++ b/.github/workflows/documentation-links.yml @@ -0,0 +1,27 @@ +name: Read the Docs PR preview +# Automatically edits a pull request's descriptions with a link +# to the documentation's preview on Read the Docs. + +on: + pull_request_target: + types: + - opened + paths: + - 'Doc/**' + - '.github/workflows/doc.yml' + +permissions: + pull-requests: write + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + documentation-links: + runs-on: ubuntu-latest + steps: + - uses: readthedocs/actions/preview@v1 + with: + project-slug: "cpython-previews" + single-version: "true" diff --git a/.mailmap b/.mailmap new file mode 100644 index 00000000000000..013c839ed6b7a4 --- /dev/null +++ b/.mailmap @@ -0,0 +1,3 @@ +# This file sets the canonical name for contributors to the repository. +# Documentation: https://git-scm.com/docs/gitmailmap +Amethyst Reese diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 00000000000000..898a9ae89dbb92 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,18 @@ +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details +# Project page: https://readthedocs.org/projects/cpython-previews/ + +version: 2 + +sphinx: + configuration: Doc/conf.py + +build: + os: ubuntu-22.04 + tools: + python: "3" + + commands: + - make -C Doc venv html + - mkdir _readthedocs + - mv Doc/build/html _readthedocs/html diff --git a/Doc/Makefile b/Doc/Makefile index ebe7f3698000fb..c11ea6ce03e8a4 100644 --- a/Doc/Makefile +++ b/Doc/Makefile @@ -13,6 +13,7 @@ JOBS = auto PAPER = SOURCES = DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py) +REQUIREMENTS = requirements.txt SPHINXERRORHANDLING = -W # Internal variables. @@ -154,8 +155,8 @@ venv: echo "To recreate it, remove it first with \`make clean-venv'."; \ else \ $(PYTHON) -m venv $(VENVDIR); \ - $(VENVDIR)/bin/python3 -m pip install -U pip setuptools; \ - $(VENVDIR)/bin/python3 -m pip install -r requirements.txt; \ + $(VENVDIR)/bin/python3 -m pip install --upgrade pip; \ + $(VENVDIR)/bin/python3 -m pip install -r $(REQUIREMENTS); \ echo "The venv has been created in the $(VENVDIR) directory"; \ fi diff --git a/Doc/c-api/import.rst b/Doc/c-api/import.rst index a51619db6d3d97..474a64800044d0 100644 --- a/Doc/c-api/import.rst +++ b/Doc/c-api/import.rst @@ -188,6 +188,8 @@ Importing Modules .. versionchanged:: 3.3 Uses :func:`imp.source_from_cache()` in calculating the source path if only the bytecode path is provided. + .. versionchanged:: 3.12 + No longer uses the removed ``imp`` module. .. c:function:: long PyImport_GetMagicNumber() diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index fd8f49ccb1caab..e963b90628aa49 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -1145,7 +1145,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) .. data:: Py_TPFLAGS_MANAGED_DICT - This bit indicates that instances of the class have a ``__dict___`` + This bit indicates that instances of the class have a ``__dict__`` attribute, and that the space for the dictionary is managed by the VM. If this flag is set, :const:`Py_TPFLAGS_HAVE_GC` should also be set. diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst index f062f14e9a7561..ab3a2e274d9395 100644 --- a/Doc/c-api/unicode.rst +++ b/Doc/c-api/unicode.rst @@ -509,6 +509,15 @@ APIs: arguments. +.. c:function:: PyObject* PyUnicode_FromObject(PyObject *obj) + + Copy an instance of a Unicode subtype to a new true Unicode object if + necessary. If *obj* is already a true Unicode object (not a subtype), + return the reference with incremented refcount. + + Objects other than Unicode or its subtypes will cause a :exc:`TypeError`. + + .. c:function:: PyObject* PyUnicode_FromEncodedObject(PyObject *obj, \ const char *encoding, const char *errors) @@ -616,15 +625,6 @@ APIs: .. versionadded:: 3.3 -.. c:function:: PyObject* PyUnicode_FromObject(PyObject *obj) - - Copy an instance of a Unicode subtype to a new true Unicode object if - necessary. If *obj* is already a true Unicode object (not a subtype), - return the reference with incremented refcount. - - Objects other than Unicode or its subtypes will cause a :exc:`TypeError`. - - Locale Encoding """"""""""""""" diff --git a/Doc/conf.py b/Doc/conf.py index 42c23bf77c7034..cef2a0e2837f6a 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -114,12 +114,13 @@ # Short title used e.g. for HTML tags. html_short_title = '%s Documentation' % release -# Deployment preview information, from Netlify -# (See netlify.toml and https://docs.netlify.com/configure-builds/environment-variables/#git-metadata) +# Deployment preview information +# (See .readthedocs.yml and https://docs.readthedocs.io/en/stable/reference/environment-variables.html) +repository_url = os.getenv("READTHEDOCS_GIT_CLONE_URL") html_context = { - "is_deployment_preview": os.getenv("IS_DEPLOYMENT_PREVIEW"), - "repository_url": os.getenv("REPOSITORY_URL"), - "pr_id": os.getenv("REVIEW_ID") + "is_deployment_preview": os.getenv("READTHEDOCS_VERSION_TYPE") == "external", + "repository_url": repository_url.removesuffix(".git") if repository_url else None, + "pr_id": os.getenv("READTHEDOCS_VERSION") } # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, diff --git a/Doc/constraints.txt b/Doc/constraints.txt new file mode 100644 index 00000000000000..66c748eb092d83 --- /dev/null +++ b/Doc/constraints.txt @@ -0,0 +1,29 @@ +# We have upper bounds on our transitive dependencies here +# To avoid new releases unexpectedly breaking our build. +# This file can be updated on an ad-hoc basis, +# though it will probably have to be updated +# whenever Doc/requirements.txt is updated. + +# Direct dependencies of Sphinx +babel<3 +colorama<0.5 +imagesize<1.5 +Jinja2<3.2 +packaging<24 +# Pygments==2.15.0 breaks CI +Pygments<2.16,!=2.15.0 +requests<3 +snowballstemmer<3 +sphinxcontrib-applehelp<1.1 +sphinxcontrib-devhelp<1.1 +sphinxcontrib-htmlhelp<2.1 +sphinxcontrib-jsmath<1.1 +sphinxcontrib-qthelp<1.1 +sphinxcontrib-serializinghtml<1.2 + +# Direct dependencies of Jinja2 (Jinja is a dependency of Sphinx, see above) +MarkupSafe<2.2 + +# Direct dependencies of sphinx-lint +polib<1.3 +regex<2024 diff --git a/Doc/library/__main__.rst b/Doc/library/__main__.rst index 761c88710f9891..d29cbdff7830c8 100644 --- a/Doc/library/__main__.rst +++ b/Doc/library/__main__.rst @@ -124,7 +124,7 @@ This is where using the ``if __name__ == '__main__'`` code block comes in handy. Code within this block won't run unless the module is executed in the top-level environment. -Putting as few statements as possible in the block below ``if __name___ == +Putting as few statements as possible in the block below ``if __name__ == '__main__'`` can improve code clarity and correctness. Most often, a function named ``main`` encapsulates the program's primary behavior:: diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 5138afc2bbe47b..e982cc166a3f2d 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -1438,9 +1438,7 @@ async/await code consider using the high-level * *stdin* can be any of these: - * a file-like object representing a pipe to be connected to the - subprocess's standard input stream using - :meth:`~loop.connect_write_pipe` + * a file-like object * the :const:`subprocess.PIPE` constant (default) which will create a new pipe and connect it, * the value ``None`` which will make the subprocess inherit the file @@ -1450,9 +1448,7 @@ async/await code consider using the high-level * *stdout* can be any of these: - * a file-like object representing a pipe to be connected to the - subprocess's standard output stream using - :meth:`~loop.connect_write_pipe` + * a file-like object * the :const:`subprocess.PIPE` constant (default) which will create a new pipe and connect it, * the value ``None`` which will make the subprocess inherit the file @@ -1462,9 +1458,7 @@ async/await code consider using the high-level * *stderr* can be any of these: - * a file-like object representing a pipe to be connected to the - subprocess's standard error stream using - :meth:`~loop.connect_write_pipe` + * a file-like object * the :const:`subprocess.PIPE` constant (default) which will create a new pipe and connect it, * the value ``None`` which will make the subprocess inherit the file @@ -1483,6 +1477,11 @@ async/await code consider using the high-level as text. :func:`bytes.decode` can be used to convert the bytes returned from the stream to text. + If a file-like object passed as *stdin*, *stdout* or *stderr* represents a + pipe, then the other side of this pipe should be registered with + :meth:`~loop.connect_write_pipe` or :meth:`~loop.connect_read_pipe` for use + with the event loop. + See the constructor of the :class:`subprocess.Popen` class for documentation on other arguments. @@ -1571,7 +1570,7 @@ Server objects are created by :meth:`loop.create_server`, :meth:`loop.create_unix_server`, :func:`start_server`, and :func:`start_unix_server` functions. -Do not instantiate the class directly. +Do not instantiate the :class:`Server` class directly. .. class:: Server @@ -1662,7 +1661,8 @@ Do not instantiate the class directly. .. attribute:: sockets - List of :class:`socket.socket` objects the server is listening on. + List of socket-like objects, ``asyncio.trsock.TransportSocket``, which + the server is listening on. .. versionchanged:: 3.7 Prior to Python 3.7 ``Server.sockets`` used to return an diff --git a/Doc/library/asyncio-subprocess.rst b/Doc/library/asyncio-subprocess.rst index 4274638c5e8625..b7c83aa04c09f1 100644 --- a/Doc/library/asyncio-subprocess.rst +++ b/Doc/library/asyncio-subprocess.rst @@ -207,8 +207,9 @@ their completion. Interact with process: 1. send data to *stdin* (if *input* is not ``None``); - 2. read data from *stdout* and *stderr*, until EOF is reached; - 3. wait for process to terminate. + 2. closes *stdin*; + 3. read data from *stdout* and *stderr*, until EOF is reached; + 4. wait for process to terminate. The optional *input* argument is the data (:class:`bytes` object) that will be sent to the child process. @@ -229,6 +230,10 @@ their completion. Note, that the data read is buffered in memory, so do not use this method if the data size is large or unlimited. + .. versionchanged:: 3.12 + + *stdin* gets closed when `input=None` too. + .. method:: send_signal(signal) Sends the signal *signal* to the child process. diff --git a/Doc/library/calendar.rst b/Doc/library/calendar.rst index 66f59f0e2ced27..07d04a1c7b582a 100644 --- a/Doc/library/calendar.rst +++ b/Doc/library/calendar.rst @@ -28,6 +28,58 @@ interpreted as prescribed by the ISO 8601 standard. Year 0 is 1 BC, year -1 is 2 BC, and so on. +.. class:: Day + + Enumeration defining the days of the week as integer constants, from 0 to 6. + + .. attribute:: MONDAY + + .. attribute:: TUESDAY + + .. attribute:: WEDNESDAY + + .. attribute:: THURSDAY + + .. attribute:: FRIDAY + + .. attribute:: SATURDAY + + .. attribute:: SUNDAY + + .. versionadded:: 3.12 + + +.. class:: Month + + Enumeration defining months of the year as integer constants, from 1 to 12. + + .. attribute:: JANUARY + + .. attribute:: FEBRUARY + + .. attribute:: MARCH + + .. attribute:: APRIL + + .. attribute:: MAY + + .. attribute:: JUNE + + .. attribute:: JULY + + .. attribute:: AUGUST + + .. attribute:: SEPTEMBER + + .. attribute:: OCTOBER + + .. attribute:: NOVEMBER + + .. attribute:: DECEMBER + + .. versionadded:: 3.12 + + .. class:: Calendar(firstweekday=0) Creates a :class:`Calendar` object. *firstweekday* is an integer specifying the diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst index 85a7d9026d1d30..a5b20149921042 100644 --- a/Doc/library/dataclasses.rst +++ b/Doc/library/dataclasses.rst @@ -437,11 +437,11 @@ Module contents The newly returned object is created by calling the :meth:`~object.__init__` method of the dataclass. This ensures that - :ref:`__post_init__ <post-init-processing>`, if present, is also called. + :meth:`__post_init__`, if present, is also called. Init-only variables without default values, if any exist, must be specified on the call to :func:`replace` so that they can be passed to - :meth:`~object.__init__` and :ref:`__post_init__ <post-init-processing>`. + :meth:`~object.__init__` and :meth:`__post_init__`. It is an error for ``changes`` to contain any fields that are defined as having ``init=False``. A :exc:`ValueError` will be raised @@ -449,7 +449,7 @@ Module contents Be forewarned about how ``init=False`` fields work during a call to :func:`replace`. They are not copied from the source object, but - rather are initialized in :ref:`__post_init__ <post-init-processing>`, if they're + rather are initialized in :meth:`__post_init__`, if they're initialized at all. It is expected that ``init=False`` fields will be rarely and judiciously used. If they are used, it might be wise to have alternate class constructors, or perhaps a custom @@ -510,30 +510,31 @@ Module contents Post-init processing -------------------- -The generated :meth:`~object.__init__` code will call a method named -:meth:`!__post_init__`, if :meth:`!__post_init__` is defined on the -class. It will normally be called as ``self.__post_init__()``. -However, if any ``InitVar`` fields are defined, they will also be -passed to :meth:`!__post_init__` in the order they were defined in the -class. If no :meth:`~object.__init__` method is generated, then -:meth:`!__post_init__` will not automatically be called. +.. function:: __post_init__() -Among other uses, this allows for initializing field values that -depend on one or more other fields. For example:: + When defined on the class, it will be called by the generated + :meth:`~object.__init__`, normally as ``self.__post_init__()``. + However, if any ``InitVar`` fields are defined, they will also be + passed to :meth:`__post_init__` in the order they were defined in the + class. If no :meth:`~object.__init__` method is generated, then + :meth:`__post_init__` will not automatically be called. - @dataclass - class C: - a: float - b: float - c: float = field(init=False) + Among other uses, this allows for initializing field values that + depend on one or more other fields. For example:: - def __post_init__(self): - self.c = self.a + self.b + @dataclass + class C: + a: float + b: float + c: float = field(init=False) + + def __post_init__(self): + self.c = self.a + self.b The :meth:`~object.__init__` method generated by :func:`dataclass` does not call base class :meth:`~object.__init__` methods. If the base class has an :meth:`~object.__init__` method that has to be called, it is common to call this method in a -:meth:`!__post_init__` method:: +:meth:`__post_init__` method:: @dataclass class Rectangle: @@ -552,7 +553,7 @@ don't need to be called, since the derived dataclass will take care of initializing all fields of any base class that is a dataclass itself. See the section below on init-only variables for ways to pass -parameters to :meth:`!__post_init__`. Also see the warning about how +parameters to :meth:`__post_init__`. Also see the warning about how :func:`replace` handles ``init=False`` fields. Class variables @@ -576,7 +577,7 @@ is an ``InitVar``, it is considered a pseudo-field called an init-only field. As it is not a true field, it is not returned by the module-level :func:`fields` function. Init-only fields are added as parameters to the generated :meth:`~object.__init__` method, and are passed to -the optional :ref:`__post_init__ <post-init-processing>` method. They are not otherwise used +the optional :meth:`__post_init__` method. They are not otherwise used by dataclasses. For example, suppose a field will be initialized from a database, if a diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst index 7889dd7d1c3ef0..bed19ad145a20c 100644 --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -896,6 +896,10 @@ Other constructors, all class methods: in UTC. As such, the recommended way to create an object representing the current time in UTC is by calling ``datetime.now(timezone.utc)``. + .. deprecated:: 3.12 + + Use :meth:`datetime.now` with :attr:`UTC` instead. + .. classmethod:: datetime.fromtimestamp(timestamp, tz=None) @@ -964,6 +968,10 @@ Other constructors, all class methods: :c:func:`gmtime` function. Raise :exc:`OSError` instead of :exc:`ValueError` on :c:func:`gmtime` failure. + .. deprecated:: 3.12 + + Use :meth:`datetime.fromtimestamp` with :attr:`UTC` instead. + .. classmethod:: datetime.fromordinal(ordinal) diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst index 3894837127877c..6c3f436ddb1494 100644 --- a/Doc/library/dis.rst +++ b/Doc/library/dis.rst @@ -402,7 +402,7 @@ The Python compiler currently generates the following bytecode instructions. **General instructions** -In the following, We will refer to the interpreter stack as STACK and describe +In the following, We will refer to the interpreter stack as ``STACK`` and describe operations on it as if it was a Python list. The top of the stack corresponds to ``STACK[-1]`` in this language. @@ -414,7 +414,7 @@ operations on it as if it was a Python list. The top of the stack corresponds to .. opcode:: POP_TOP - Removes the top-of-stack item.:: + Removes the top-of-stack item:: STACK.pop() @@ -422,7 +422,7 @@ operations on it as if it was a Python list. The top of the stack corresponds to .. opcode:: END_FOR Removes the top two values from the stack. - Equivalent to POP_TOP; POP_TOP. + Equivalent to ``POP_TOP``; ``POP_TOP``. Used to clean up at the end of loops, hence the name. .. versionadded:: 3.12 @@ -431,7 +431,7 @@ operations on it as if it was a Python list. The top of the stack corresponds to .. opcode:: COPY (i) Push the i-th item to the top of the stack without removing it from its original - location.:: + location:: assert i > 0 STACK.append(STACK[-i]) @@ -441,7 +441,7 @@ operations on it as if it was a Python list. The top of the stack corresponds to .. opcode:: SWAP (i) - Swap the top of the stack with the i-th element.:: + Swap the top of the stack with the i-th element:: STACK[-i], STACK[-1] = stack[-1], STACK[-i] @@ -513,7 +513,7 @@ not have to be) the original ``STACK[-2]``. .. opcode:: BINARY_OP (op) Implements the binary and in-place operators (depending on the value of - *op*).:: + *op*):: rhs = STACK.pop() lhs = STACK.pop() @@ -580,14 +580,14 @@ not have to be) the original ``STACK[-2]``. Implements ``STACK[-1] = get_awaitable(STACK[-1])``, where ``get_awaitable(o)`` returns ``o`` if ``o`` is a coroutine object or a generator object with - the CO_ITERABLE_COROUTINE flag, or resolves + the :data:`~inspect.CO_ITERABLE_COROUTINE` flag, or resolves ``o.__await__``. If the ``where`` operand is nonzero, it indicates where the instruction occurs: - * ``1`` After a call to ``__aenter__`` - * ``2`` After a call to ``__aexit__`` + * ``1``: After a call to ``__aenter__`` + * ``2``: After a call to ``__aexit__`` .. versionadded:: 3.5 @@ -652,6 +652,7 @@ not have to be) the original ``STACK[-2]``. .. opcode:: SET_ADD (i) Implements:: + item = STACK.pop() set.add(STACK[-i], item) @@ -705,11 +706,11 @@ iterations of the loop. Yields ``STACK.pop()`` from a :term:`generator`. - .. versionchanged:: 3.11 - oparg set to be the stack depth. + .. versionchanged:: 3.11 + oparg set to be the stack depth. - .. versionchanged:: 3.12 - oparg set to be the exception block depth, for efficient closing of generators. + .. versionchanged:: 3.12 + oparg set to be the exception block depth, for efficient closing of generators. .. opcode:: SETUP_ANNOTATIONS @@ -726,32 +727,32 @@ iterations of the loop. Pops a value from the stack, which is used to restore the exception state. - .. versionchanged:: 3.11 - Exception representation on the stack now consist of one, not three, items. + .. versionchanged:: 3.11 + Exception representation on the stack now consist of one, not three, items. .. opcode:: RERAISE - Re-raises the exception currently on top of the stack. If oparg is non-zero, - pops an additional value from the stack which is used to set ``f_lasti`` - of the current frame. + Re-raises the exception currently on top of the stack. If oparg is non-zero, + pops an additional value from the stack which is used to set ``f_lasti`` + of the current frame. - .. versionadded:: 3.9 + .. versionadded:: 3.9 - .. versionchanged:: 3.11 - Exception representation on the stack now consist of one, not three, items. + .. versionchanged:: 3.11 + Exception representation on the stack now consist of one, not three, items. .. opcode:: PUSH_EXC_INFO - Pops a value from the stack. Pushes the current exception to the top of the stack. - Pushes the value originally popped back to the stack. - Used in exception handlers. + Pops a value from the stack. Pushes the current exception to the top of the stack. + Pushes the value originally popped back to the stack. + Used in exception handlers. - .. versionadded:: 3.11 + .. versionadded:: 3.11 .. opcode:: CHECK_EXC_MATCH Performs exception matching for ``except``. Tests whether the ``STACK[-2]`` - is an exception matching ``STACK[-1]``. Pops STACK[-1] and pushes the boolean + is an exception matching ``STACK[-1]``. Pops ``STACK[-1]`` and pushes the boolean result of the test. .. versionadded:: 3.11 @@ -770,16 +771,16 @@ iterations of the loop. .. opcode:: WITH_EXCEPT_START - Calls the function in position 4 on the stack with arguments (type, val, tb) - representing the exception at the top of the stack. - Used to implement the call ``context_manager.__exit__(*exc_info())`` when an exception - has occurred in a :keyword:`with` statement. + Calls the function in position 4 on the stack with arguments (type, val, tb) + representing the exception at the top of the stack. + Used to implement the call ``context_manager.__exit__(*exc_info())`` when an exception + has occurred in a :keyword:`with` statement. - .. versionadded:: 3.9 + .. versionadded:: 3.9 - .. versionchanged:: 3.11 - The ``__exit__`` function is in position 4 of the stack rather than 7. - Exception representation on the stack now consist of one, not three, items. + .. versionchanged:: 3.11 + The ``__exit__`` function is in position 4 of the stack rather than 7. + Exception representation on the stack now consist of one, not three, items. .. opcode:: LOAD_ASSERTION_ERROR @@ -863,7 +864,7 @@ iterations of the loop. .. opcode:: UNPACK_SEQUENCE (count) Unpacks ``STACK[-1]`` into *count* individual values, which are put onto the stack - right-to-left.:: + right-to-left:: STACK.extend(STACK.pop()[:count:-1]) @@ -1028,7 +1029,7 @@ iterations of the loop. This bytecode distinguishes two cases: if ``STACK[-1]`` has a method with the correct name, the bytecode pushes the unbound method and ``STACK[-1]``. ``STACK[-1]`` will be used as the first argument (``self``) by :opcode:`CALL` - when calling the unbound method. Otherwise, ``NULL`` and the object return by + when calling the unbound method. Otherwise, ``NULL`` and the object returned by the attribute lookup are pushed. .. versionchanged:: 3.12 @@ -1207,7 +1208,7 @@ iterations of the loop. .. opcode:: MAKE_CELL (i) - Creates a new cell in slot ``i``. If that slot is empty then + Creates a new cell in slot ``i``. If that slot is nonempty then that value is stored into the new cell. .. versionadded:: 3.11 @@ -1332,9 +1333,9 @@ iterations of the loop. .. opcode:: PUSH_NULL - Pushes a ``NULL`` to the stack. - Used in the call sequence to match the ``NULL`` pushed by - :opcode:`LOAD_METHOD` for non-method calls. + Pushes a ``NULL`` to the stack. + Used in the call sequence to match the ``NULL`` pushed by + :opcode:`LOAD_METHOD` for non-method calls. .. versionadded:: 3.11 @@ -1434,38 +1435,38 @@ iterations of the loop. .. opcode:: RESUME (where) - A no-op. Performs internal tracing, debugging and optimization checks. + A no-op. Performs internal tracing, debugging and optimization checks. - The ``where`` operand marks where the ``RESUME`` occurs: + The ``where`` operand marks where the ``RESUME`` occurs: - * ``0`` The start of a function, which is neither a generator, coroutine - nor an async generator - * ``1`` After a ``yield`` expression - * ``2`` After a ``yield from`` expression - * ``3`` After an ``await`` expression + * ``0`` The start of a function, which is neither a generator, coroutine + nor an async generator + * ``1`` After a ``yield`` expression + * ``2`` After a ``yield from`` expression + * ``3`` After an ``await`` expression .. versionadded:: 3.11 .. opcode:: RETURN_GENERATOR - Create a generator, coroutine, or async generator from the current frame. - Used as first opcode of in code object for the above mentioned callables. - Clear the current frame and return the newly created generator. + Create a generator, coroutine, or async generator from the current frame. + Used as first opcode of in code object for the above mentioned callables. + Clear the current frame and return the newly created generator. - .. versionadded:: 3.11 + .. versionadded:: 3.11 .. opcode:: SEND (delta) - Equivalent to ``STACK[-1] = STACK[-2].send(STACK[-1])``. Used in ``yield from`` - and ``await`` statements. + Equivalent to ``STACK[-1] = STACK[-2].send(STACK[-1])``. Used in ``yield from`` + and ``await`` statements. - If the call raises :exc:`StopIteration`, pop both items, push the - exception's ``value`` attribute, and increment the bytecode counter by - *delta*. + If the call raises :exc:`StopIteration`, pop both items, push the + exception's ``value`` attribute, and increment the bytecode counter by + *delta*. - .. versionadded:: 3.11 + .. versionadded:: 3.11 .. opcode:: HAVE_ARGUMENT @@ -1493,15 +1494,15 @@ iterations of the loop. argument and sets ``STACK[-1]`` to the result. Used to implement functionality that is necessary but not performance critical. - The operand determines which intrinsic function is called: + The operand determines which intrinsic function is called: - * ``0`` Not valid - * ``1`` Prints the argument to standard out. Used in the REPL. - * ``2`` Performs ``import *`` for the named module. - * ``3`` Extracts the return value from a ``StopIteration`` exception. - * ``4`` Wraps an aync generator value - * ``5`` Performs the unary ``+`` operation - * ``6`` Converts a list to a tuple + * ``0`` Not valid + * ``1`` Prints the argument to standard out. Used in the REPL. + * ``2`` Performs ``import *`` for the named module. + * ``3`` Extracts the return value from a ``StopIteration`` exception. + * ``4`` Wraps an aync generator value + * ``5`` Performs the unary ``+`` operation + * ``6`` Converts a list to a tuple .. versionadded:: 3.12 @@ -1511,17 +1512,17 @@ iterations of the loop. arguments and sets ``STACK[-1]`` to the result. Used to implement functionality that is necessary but not performance critical. - The operand determines which intrinsic function is called: + The operand determines which intrinsic function is called: - * ``0`` Not valid - * ``1`` Calculates the :exc:`ExceptionGroup` to raise from a ``try-except*``. + * ``0`` Not valid + * ``1`` Calculates the :exc:`ExceptionGroup` to raise from a ``try-except*``. .. versionadded:: 3.12 **Pseudo-instructions** -These opcodes do not appear in python bytecode, they are used by the compiler +These opcodes do not appear in Python bytecode. They are used by the compiler but are replaced by real opcodes or removed before bytecode is generated. .. opcode:: SETUP_FINALLY (target) @@ -1533,7 +1534,7 @@ but are replaced by real opcodes or removed before bytecode is generated. .. opcode:: SETUP_CLEANUP (target) - Like ``SETUP_FINALLY``, but in case of exception also pushes the last + Like ``SETUP_FINALLY``, but in case of an exception also pushes the last instruction (``lasti``) to the stack so that ``RERAISE`` can restore it. If an exception occurs, the value stack level and the last instruction on the frame are restored to their current state, and control is transferred @@ -1542,7 +1543,7 @@ but are replaced by real opcodes or removed before bytecode is generated. .. opcode:: SETUP_WITH (target) - Like ``SETUP_CLEANUP``, but in case of exception one more item is popped + Like ``SETUP_CLEANUP``, but in case of an exception one more item is popped from the stack before control is transferred to the exception handler at ``target``. @@ -1576,9 +1577,9 @@ Opcode collections These collections are provided for automatic introspection of bytecode instructions: - .. versionchanged:: 3.12 - The collections now contain pseudo instructions as well. These are - opcodes with values ``>= MIN_PSEUDO_OPCODE``. +.. versionchanged:: 3.12 + The collections now contain pseudo instructions as well. These are + opcodes with values ``>= MIN_PSEUDO_OPCODE``. .. data:: opname @@ -1599,7 +1600,7 @@ instructions: Sequence of bytecodes that use their argument. - .. versionadded:: 3.12 + .. versionadded:: 3.12 .. data:: hasconst @@ -1609,10 +1610,10 @@ instructions: .. data:: hasfree - Sequence of bytecodes that access a free variable (note that 'free' in this + Sequence of bytecodes that access a free variable. 'free' in this context refers to names in the current scope that are referenced by inner scopes or names in outer scopes that are referenced from this scope. It does - *not* include references to global or builtin scopes). + *not* include references to global or builtin scopes. .. data:: hasname @@ -1643,4 +1644,4 @@ instructions: Sequence of bytecodes that set an exception handler. - .. versionadded:: 3.12 + .. versionadded:: 3.12 diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index 7792e598c1155c..a5e86ef0f9eb59 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -1987,7 +1987,6 @@ are always available. They are listed here in alphabetical order. .. index:: statement: import - module: imp .. note:: diff --git a/Doc/library/imp.rst b/Doc/library/imp.rst deleted file mode 100644 index 000793a7e66cae..00000000000000 --- a/Doc/library/imp.rst +++ /dev/null @@ -1,411 +0,0 @@ -:mod:`imp` --- Access the :ref:`import <importsystem>` internals -================================================================ - -.. module:: imp - :synopsis: Access the implementation of the import statement. - :deprecated: - -**Source code:** :source:`Lib/imp.py` - -.. deprecated-removed:: 3.4 3.12 - The :mod:`imp` module is deprecated in favor of :mod:`importlib`. - -.. index:: statement: import - --------------- - -This module provides an interface to the mechanisms used to implement the -:keyword:`import` statement. It defines the following constants and functions: - - -.. function:: get_magic() - - .. index:: pair: file; byte-code - - Return the magic string value used to recognize byte-compiled code files - (:file:`.pyc` files). (This value may be different for each Python version.) - - .. deprecated:: 3.4 - Use :attr:`importlib.util.MAGIC_NUMBER` instead. - - -.. function:: get_suffixes() - - Return a list of 3-element tuples, each describing a particular type of - module. Each triple has the form ``(suffix, mode, type)``, where *suffix* is - a string to be appended to the module name to form the filename to search - for, *mode* is the mode string to pass to the built-in :func:`open` function - to open the file (this can be ``'r'`` for text files or ``'rb'`` for binary - files), and *type* is the file type, which has one of the values - :const:`PY_SOURCE`, :const:`PY_COMPILED`, or :const:`C_EXTENSION`, described - below. - - .. deprecated:: 3.3 - Use the constants defined on :mod:`importlib.machinery` instead. - - -.. function:: find_module(name[, path]) - - Try to find the module *name*. If *path* is omitted or ``None``, the list of - directory names given by ``sys.path`` is searched, but first a few special - places are searched: the function tries to find a built-in module with the - given name (:const:`C_BUILTIN`), then a frozen module (:const:`PY_FROZEN`), - and on some systems some other places are looked in as well (on Windows, it - looks in the registry which may point to a specific file). - - Otherwise, *path* must be a list of directory names; each directory is - searched for files with any of the suffixes returned by :func:`get_suffixes` - above. Invalid names in the list are silently ignored (but all list items - must be strings). - - If search is successful, the return value is a 3-element tuple ``(file, - pathname, description)``: - - *file* is an open :term:`file object` positioned at the beginning, *pathname* - is the pathname of the file found, and *description* is a 3-element tuple as - contained in the list returned by :func:`get_suffixes` describing the kind of - module found. - - If the module is built-in or frozen then *file* and *pathname* are both ``None`` - and the *description* tuple contains empty strings for its suffix and mode; - the module type is indicated as given in parentheses above. If the search - is unsuccessful, :exc:`ImportError` is raised. Other exceptions indicate - problems with the arguments or environment. - - If the module is a package, *file* is ``None``, *pathname* is the package - path and the last item in the *description* tuple is :const:`PKG_DIRECTORY`. - - This function does not handle hierarchical module names (names containing - dots). In order to find *P.M*, that is, submodule *M* of package *P*, use - :func:`find_module` and :func:`load_module` to find and load package *P*, and - then use :func:`find_module` with the *path* argument set to ``P.__path__``. - When *P* itself has a dotted name, apply this recipe recursively. - - .. deprecated:: 3.3 - Use :func:`importlib.util.find_spec` instead unless Python 3.3 - compatibility is required, in which case use - :func:`importlib.find_loader`. For example usage of the former case, - see the :ref:`importlib-examples` section of the :mod:`importlib` - documentation. - - -.. function:: load_module(name, file, pathname, description) - - Load a module that was previously found by :func:`find_module` (or by an - otherwise conducted search yielding compatible results). This function does - more than importing the module: if the module was already imported, it will - reload the module! The *name* argument indicates the full - module name (including the package name, if this is a submodule of a - package). The *file* argument is an open file, and *pathname* is the - corresponding file name; these can be ``None`` and ``''``, respectively, when - the module is a package or not being loaded from a file. The *description* - argument is a tuple, as would be returned by :func:`get_suffixes`, describing - what kind of module must be loaded. - - If the load is successful, the return value is the module object; otherwise, - an exception (usually :exc:`ImportError`) is raised. - - **Important:** the caller is responsible for closing the *file* argument, if - it was not ``None``, even when an exception is raised. This is best done - using a :keyword:`try` ... :keyword:`finally` statement. - - .. deprecated:: 3.3 - If previously used in conjunction with :func:`imp.find_module` then - consider using :func:`importlib.import_module`, otherwise use the loader - returned by the replacement you chose for :func:`imp.find_module`. If you - called :func:`imp.load_module` and related functions directly with file - path arguments then use a combination of - :func:`importlib.util.spec_from_file_location` and - :func:`importlib.util.module_from_spec`. See the :ref:`importlib-examples` - section of the :mod:`importlib` documentation for details of the various - approaches. - - -.. function:: new_module(name) - - Return a new empty module object called *name*. This object is *not* inserted - in ``sys.modules``. - - .. deprecated:: 3.4 - Use :func:`importlib.util.module_from_spec` instead. - - -.. function:: reload(module) - - Reload a previously imported *module*. The argument must be a module object, so - it must have been successfully imported before. This is useful if you have - edited the module source file using an external editor and want to try out the - new version without leaving the Python interpreter. The return value is the - module object (the same as the *module* argument). - - When ``reload(module)`` is executed: - - * Python modules' code is recompiled and the module-level code reexecuted, - defining a new set of objects which are bound to names in the module's - dictionary. The ``init`` function of extension modules is not called a second - time. - - * As with all other objects in Python the old objects are only reclaimed after - their reference counts drop to zero. - - * The names in the module namespace are updated to point to any new or changed - objects. - - * Other references to the old objects (such as names external to the module) are - not rebound to refer to the new objects and must be updated in each namespace - where they occur if that is desired. - - There are a number of other caveats: - - When a module is reloaded, its dictionary (containing the module's global - variables) is retained. Redefinitions of names will override the old - definitions, so this is generally not a problem. If the new version of a module - does not define a name that was defined by the old version, the old definition - remains. This feature can be used to the module's advantage if it maintains a - global table or cache of objects --- with a :keyword:`try` statement it can test - for the table's presence and skip its initialization if desired:: - - try: - cache - except NameError: - cache = {} - - It is legal though generally not very useful to reload built-in or dynamically - loaded modules, except for :mod:`sys`, :mod:`__main__` and :mod:`builtins`. - In many cases, however, extension modules are not designed to be initialized - more than once, and may fail in arbitrary ways when reloaded. - - If a module imports objects from another module using :keyword:`from` ... - :keyword:`import` ..., calling :func:`reload` for the other module does not - redefine the objects imported from it --- one way around this is to re-execute - the :keyword:`!from` statement, another is to use :keyword:`!import` and qualified - names (*module*.*name*) instead. - - If a module instantiates instances of a class, reloading the module that defines - the class does not affect the method definitions of the instances --- they - continue to use the old class definition. The same is true for derived classes. - - .. versionchanged:: 3.3 - Relies on both ``__name__`` and ``__loader__`` being defined on the module - being reloaded instead of just ``__name__``. - - .. deprecated:: 3.4 - Use :func:`importlib.reload` instead. - - -The following functions are conveniences for handling :pep:`3147` byte-compiled -file paths. - -.. versionadded:: 3.2 - -.. function:: cache_from_source(path, debug_override=None) - - Return the :pep:`3147` path to the byte-compiled file associated with the - source *path*. For example, if *path* is ``/foo/bar/baz.py`` the return - value would be ``/foo/bar/__pycache__/baz.cpython-32.pyc`` for Python 3.2. - The ``cpython-32`` string comes from the current magic tag (see - :func:`get_tag`; if :attr:`sys.implementation.cache_tag` is not defined then - :exc:`NotImplementedError` will be raised). By passing in ``True`` or - ``False`` for *debug_override* you can override the system's value for - ``__debug__``, leading to optimized bytecode. - - *path* need not exist. - - .. versionchanged:: 3.3 - If :attr:`sys.implementation.cache_tag` is ``None``, then - :exc:`NotImplementedError` is raised. - - .. deprecated:: 3.4 - Use :func:`importlib.util.cache_from_source` instead. - - .. versionchanged:: 3.5 - The *debug_override* parameter no longer creates a ``.pyo`` file. - - -.. function:: source_from_cache(path) - - Given the *path* to a :pep:`3147` file name, return the associated source code - file path. For example, if *path* is - ``/foo/bar/__pycache__/baz.cpython-32.pyc`` the returned path would be - ``/foo/bar/baz.py``. *path* need not exist, however if it does not conform - to :pep:`3147` format, a :exc:`ValueError` is raised. If - :attr:`sys.implementation.cache_tag` is not defined, - :exc:`NotImplementedError` is raised. - - .. versionchanged:: 3.3 - Raise :exc:`NotImplementedError` when - :attr:`sys.implementation.cache_tag` is not defined. - - .. deprecated:: 3.4 - Use :func:`importlib.util.source_from_cache` instead. - - -.. function:: get_tag() - - Return the :pep:`3147` magic tag string matching this version of Python's - magic number, as returned by :func:`get_magic`. - - .. deprecated:: 3.4 - Use :attr:`sys.implementation.cache_tag` directly starting - in Python 3.3. - - -The following functions help interact with the import system's internal -locking mechanism. Locking semantics of imports are an implementation -detail which may vary from release to release. However, Python ensures -that circular imports work without any deadlocks. - - -.. function:: lock_held() - - Return ``True`` if the global import lock is currently held, else - ``False``. On platforms without threads, always return ``False``. - - On platforms with threads, a thread executing an import first holds a - global import lock, then sets up a per-module lock for the rest of the - import. This blocks other threads from importing the same module until - the original import completes, preventing other threads from seeing - incomplete module objects constructed by the original thread. An - exception is made for circular imports, which by construction have to - expose an incomplete module object at some point. - - .. versionchanged:: 3.3 - The locking scheme has changed to per-module locks for - the most part. A global import lock is kept for some critical tasks, - such as initializing the per-module locks. - - .. deprecated:: 3.4 - - -.. function:: acquire_lock() - - Acquire the interpreter's global import lock for the current thread. - This lock should be used by import hooks to ensure thread-safety when - importing modules. - - Once a thread has acquired the import lock, the same thread may acquire it - again without blocking; the thread must release it once for each time it has - acquired it. - - On platforms without threads, this function does nothing. - - .. versionchanged:: 3.3 - The locking scheme has changed to per-module locks for - the most part. A global import lock is kept for some critical tasks, - such as initializing the per-module locks. - - .. deprecated:: 3.4 - - -.. function:: release_lock() - - Release the interpreter's global import lock. On platforms without - threads, this function does nothing. - - .. versionchanged:: 3.3 - The locking scheme has changed to per-module locks for - the most part. A global import lock is kept for some critical tasks, - such as initializing the per-module locks. - - .. deprecated:: 3.4 - - -The following constants with integer values, defined in this module, are used -to indicate the search result of :func:`find_module`. - - -.. data:: PY_SOURCE - - The module was found as a source file. - - .. deprecated:: 3.3 - - -.. data:: PY_COMPILED - - The module was found as a compiled code object file. - - .. deprecated:: 3.3 - - -.. data:: C_EXTENSION - - The module was found as dynamically loadable shared library. - - .. deprecated:: 3.3 - - -.. data:: PKG_DIRECTORY - - The module was found as a package directory. - - .. deprecated:: 3.3 - - -.. data:: C_BUILTIN - - The module was found as a built-in module. - - .. deprecated:: 3.3 - - -.. data:: PY_FROZEN - - The module was found as a frozen module. - - .. deprecated:: 3.3 - - -.. class:: NullImporter(path_string) - - The :class:`NullImporter` type is a :pep:`302` import hook that handles - non-directory path strings by failing to find any modules. Calling this type - with an existing directory or empty string raises :exc:`ImportError`. - Otherwise, a :class:`NullImporter` instance is returned. - - Instances have only one method: - - .. method:: NullImporter.find_module(fullname [, path]) - - This method always returns ``None``, indicating that the requested module could - not be found. - - .. versionchanged:: 3.3 - ``None`` is inserted into ``sys.path_importer_cache`` instead of an - instance of :class:`NullImporter`. - - .. deprecated:: 3.4 - Insert ``None`` into ``sys.path_importer_cache`` instead. - - -.. _examples-imp: - -Examples --------- - -The following function emulates what was the standard import statement up to -Python 1.4 (no hierarchical module names). (This *implementation* wouldn't work -in that version, since :func:`find_module` has been extended and -:func:`load_module` has been added in 1.4.) :: - - import imp - import sys - - def __import__(name, globals=None, locals=None, fromlist=None): - # Fast path: see if the module has already been imported. - try: - return sys.modules[name] - except KeyError: - pass - - # If any of the following calls raises an exception, - # there's a problem we can't handle -- let the caller handle it. - - fp, pathname, description = imp.find_module(name) - - try: - return imp.load_module(name, fp, pathname, description) - finally: - # Since we may exit via an exception, close fp explicitly. - if fp: - fp.close() diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst index e57c393a6b370b..a0d794017e2602 100644 --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -789,6 +789,7 @@ which incur interpreter overhead. .. testcode:: import collections + import functools import math import operator import random @@ -1082,7 +1083,7 @@ The following recipes have a more mathematical flavor: # convolve(data, [1, -2, 1]) --> 2nd finite difference (2nd derivative) kernel = tuple(kernel)[::-1] n = len(kernel) - padded_signal = chain(repeat(0, n-1), signal, [0] * (n-1)) + padded_signal = chain(repeat(0, n-1), signal, repeat(0, n-1)) for window in sliding_window(padded_signal, n): yield math.sumprod(kernel, window) @@ -1092,10 +1093,8 @@ The following recipes have a more mathematical flavor: (x - 5) (x + 4) (x - 3) expands to: x³ -4x² -17x + 60 """ # polynomial_from_roots([5, -4, 3]) --> [1, -4, -17, 60] - expansion = [1] - for r in roots: - expansion = convolve(expansion, (1, -r)) - return list(expansion) + factors = zip(repeat(1), map(operator.neg, roots)) + return list(functools.reduce(convolve, factors, [1])) def polynomial_eval(coefficients, x): """Evaluate a polynomial at a specific value. diff --git a/Doc/library/smtplib.rst b/Doc/library/smtplib.rst index 2539c3d3883298..4686232b09ac47 100644 --- a/Doc/library/smtplib.rst +++ b/Doc/library/smtplib.rst @@ -25,7 +25,7 @@ Protocol) and :rfc:`1869` (SMTP Service Extensions). An :class:`SMTP` instance encapsulates an SMTP connection. It has methods that support a full repertoire of SMTP and ESMTP operations. If the optional - host and port parameters are given, the SMTP :meth:`connect` method is + *host* and *port* parameters are given, the SMTP :meth:`connect` method is called with those parameters during initialization. If specified, *local_hostname* is used as the FQDN of the local host in the HELO/EHLO command. Otherwise, the local hostname is found using @@ -34,12 +34,12 @@ Protocol) and :rfc:`1869` (SMTP Service Extensions). *timeout* parameter specifies a timeout in seconds for blocking operations like the connection attempt (if not specified, the global default timeout setting will be used). If the timeout expires, :exc:`TimeoutError` is - raised. The optional source_address parameter allows binding + raised. The optional *source_address* parameter allows binding to some specific source address in a machine with multiple network interfaces, and/or to some specific source TCP port. It takes a 2-tuple - (host, port), for the socket to bind to as its source address before - connecting. If omitted (or if host or port are ``''`` and/or 0 respectively) - the OS default behavior will be used. + ``(host, port)``, for the socket to bind to as its source address before + connecting. If omitted (or if *host* or *port* are ``''`` and/or ``0`` + respectively) the OS default behavior will be used. For normal use, you should only require the initialization/connect, :meth:`sendmail`, and :meth:`SMTP.quit` methods. diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst index 51146e00999659..89673b8006ae77 100644 --- a/Doc/library/sqlite3.rst +++ b/Doc/library/sqlite3.rst @@ -310,7 +310,7 @@ Module functions to avoid data corruption. See :attr:`threadsafety` for more information. - :param Connection factory: + :param ~sqlite3.Connection factory: A custom subclass of :class:`Connection` to create the connection with, if not the default :class:`Connection` class. @@ -337,7 +337,7 @@ Module functions The default will change to ``False`` in a future Python release. :type autocommit: bool - :rtype: Connection + :rtype: ~sqlite3.Connection .. audit-event:: sqlite3.connect database sqlite3.connect .. audit-event:: sqlite3.connect/handle connection_handle sqlite3.connect @@ -573,6 +573,38 @@ Module constants package, a third-party library which used to upstream changes to :mod:`!sqlite3`. Today, it carries no meaning or practical value. +.. _sqlite3-dbconfig-constants: + +.. data:: SQLITE_DBCONFIG_DEFENSIVE + SQLITE_DBCONFIG_DQS_DDL + SQLITE_DBCONFIG_DQS_DML + SQLITE_DBCONFIG_ENABLE_FKEY + SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER + SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION + SQLITE_DBCONFIG_ENABLE_QPSG + SQLITE_DBCONFIG_ENABLE_TRIGGER + SQLITE_DBCONFIG_ENABLE_VIEW + SQLITE_DBCONFIG_LEGACY_ALTER_TABLE + SQLITE_DBCONFIG_LEGACY_FILE_FORMAT + SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE + SQLITE_DBCONFIG_RESET_DATABASE + SQLITE_DBCONFIG_TRIGGER_EQP + SQLITE_DBCONFIG_TRUSTED_SCHEMA + SQLITE_DBCONFIG_WRITABLE_SCHEMA + + These constants are used for the :meth:`Connection.setconfig` + and :meth:`~Connection.getconfig` methods. + + The availability of these constants varies depending on the version of SQLite + Python was compiled with. + + .. versionadded:: 3.12 + + .. seealso:: + + https://www.sqlite.org/c3ref/c_dbconfig_defensive.html + SQLite docs: Database Connection Configuration Options + .. _sqlite3-connection-objects: @@ -1041,12 +1073,25 @@ Connection objects (2, 'broccoli pie', 'broccoli cheese onions flour') (3, 'pumpkin pie', 'pumpkin sugar flour butter') - .. method:: load_extension(path, /) + .. method:: load_extension(path, /, *, entrypoint=None) - Load an SQLite extension from a shared library located at *path*. + Load an SQLite extension from a shared library. Enable extension loading with :meth:`enable_load_extension` before calling this method. + :param str path: + + The path to the SQLite extension. + + :param entrypoint: + + Entry point name. + If ``None`` (the default), + SQLite will come up with an entry point name of its own; + see the SQLite docs `Loading an Extension`_ for details. + + :type entrypoint: str | None + .. audit-event:: sqlite3.load_extension connection,path sqlite3.Connection.load_extension .. versionadded:: 3.2 @@ -1054,6 +1099,11 @@ Connection objects .. versionchanged:: 3.10 Added the ``sqlite3.load_extension`` auditing event. + .. versionadded:: 3.12 + The *entrypoint* parameter. + + .. _Loading an Extension: https://www.sqlite.org/loadext.html#loading_an_extension_ + .. method:: iterdump Return an :term:`iterator` to dump the database as SQL source code. @@ -1079,7 +1129,7 @@ Connection objects Works even if the database is being accessed by other clients or concurrently by the same connection. - :param Connection target: + :param ~sqlite3.Connection target: The database connection to save the backup to. :param int pages: @@ -1201,6 +1251,30 @@ Connection objects .. _SQLite limit category: https://www.sqlite.org/c3ref/c_limit_attached.html + .. method:: getconfig(op, /) + + Query a boolean connection configuration option. + + :param int op: + A :ref:`SQLITE_DBCONFIG code <sqlite3-dbconfig-constants>`. + + :rtype: bool + + .. versionadded:: 3.12 + + .. method:: setconfig(op, enable=True, /) + + Set a boolean connection configuration option. + + :param int op: + A :ref:`SQLITE_DBCONFIG code <sqlite3-dbconfig-constants>`. + + :param bool enable: + ``True`` if the configuration option should be enabled (default); + ``False`` if it should be disabled. + + .. versionadded:: 3.12 + .. method:: serialize(*, name="main") Serialize a database into a :class:`bytes` object. For an @@ -1456,12 +1530,12 @@ Cursor objects For every item in *parameters*, repeatedly execute the :ref:`parameterized <sqlite3-placeholders>` - SQL statement *sql*. + :abbr:`DML (Data Manipulation Language)` SQL statement *sql*. Uses the same implicit transaction handling as :meth:`~Cursor.execute`. :param str sql: - A single SQL :abbr:`DML (Data Manipulation Language)` statement. + A single SQL DML statement. :param parameters: An :term:`!iterable` of parameters to bind with @@ -1484,6 +1558,13 @@ Cursor objects # cur is an sqlite3.Cursor object cur.executemany("INSERT INTO data VALUES(?)", rows) + .. note:: + + Any resulting rows are discarded, + including DML statements with `RETURNING clauses`_. + + .. _RETURNING clauses: https://www.sqlite.org/lang_returning.html + .. deprecated-removed:: 3.12 3.14 :exc:`DeprecationWarning` is emitted if diff --git a/Doc/library/superseded.rst b/Doc/library/superseded.rst index 8786e227be9182..aaf66ea121d39c 100644 --- a/Doc/library/superseded.rst +++ b/Doc/library/superseded.rst @@ -17,7 +17,6 @@ backwards compatibility. They have been superseded by other modules. chunk.rst crypt.rst imghdr.rst - imp.rst mailcap.rst msilib.rst nis.rst diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index 7324f3113e0a08..7c0e85142e7716 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -1253,10 +1253,6 @@ always available. Originally specified in :pep:`302`. - .. versionchanged:: 3.3 - ``None`` is stored instead of :class:`imp.NullImporter` when no finder - is found. - .. data:: platform diff --git a/Doc/library/tempfile.rst b/Doc/library/tempfile.rst index 61358eb76925b2..fd4c294613fd31 100644 --- a/Doc/library/tempfile.rst +++ b/Doc/library/tempfile.rst @@ -292,6 +292,9 @@ The module defines the following user-callable items: .. versionchanged:: 3.6 The *dir* parameter now accepts a :term:`path-like object`. + .. versionchanged:: 3.12 + :func:`mkdtemp` now always returns an absolute path, even if *dir* is relative. + .. function:: gettempdir() diff --git a/Doc/library/turtle.rst b/Doc/library/turtle.rst index 05392d04e52263..10138f4f406f85 100644 --- a/Doc/library/turtle.rst +++ b/Doc/library/turtle.rst @@ -107,6 +107,7 @@ Turtle motion | :func:`right` | :func:`rt` | :func:`left` | :func:`lt` | :func:`goto` | :func:`setpos` | :func:`setposition` + | :func:`teleport` | :func:`setx` | :func:`sety` | :func:`setheading` | :func:`seth` @@ -372,6 +373,44 @@ Turtle motion (0.00,0.00) +.. function:: teleport(x, y=None, *, fill_gap=False) + + :param x: a number or ``None`` + :param y: a number or ``None`` + :param fill_gap: a boolean + + Move turtle to an absolute position. Unlike goto(x, y), a line will not + be drawn. The turtle's orientation does not change. If currently + filling, the polygon(s) teleported from will be filled after leaving, + and filling will begin again after teleporting. This can be disabled + with fill_gap=True, which makes the imaginary line traveled during + teleporting act as a fill barrier like in goto(x, y). + + .. doctest:: + :skipif: _tkinter is None + :hide: + + >>> turtle.goto(0, 0) + + .. doctest:: + :skipif: _tkinter is None + + >>> tp = turtle.pos() + >>> tp + (0.00,0.00) + >>> turtle.teleport(60) + >>> turtle.pos() + (60.00,0.00) + >>> turtle.teleport(y=10) + >>> turtle.pos() + (60.00,10.00) + >>> turtle.teleport(20, 30) + >>> turtle.pos() + (20.00,30.00) + + .. versionadded: 3.12 + + .. function:: setx(x) :param x: a number (integer or float) @@ -537,8 +576,7 @@ Turtle motion :skipif: _tkinter is None >>> turtle.color("blue") - >>> turtle.stamp() - 11 + >>> stamp_id = turtle.stamp() >>> turtle.fd(50) @@ -575,15 +613,8 @@ Turtle motion .. doctest:: >>> for i in range(8): - ... turtle.stamp(); turtle.fd(30) - 13 - 14 - 15 - 16 - 17 - 18 - 19 - 20 + ... unused_stamp_id = turtle.stamp() + ... turtle.fd(30) >>> turtle.clearstamps(2) >>> turtle.clearstamps(-2) >>> turtle.clearstamps() diff --git a/Doc/library/types.rst b/Doc/library/types.rst index 54887f4c51983a..a15fb5cfa49473 100644 --- a/Doc/library/types.rst +++ b/Doc/library/types.rst @@ -351,6 +351,13 @@ Standard names are defined for the following types: .. versionchanged:: 3.9.2 This type can now be subclassed. + .. seealso:: + + :ref:`Generic Alias Types<types-genericalias>` + In-depth documentation on instances of :class:`!types.GenericAlias` + + :pep:`585` - Type Hinting Generics In Standard Collections + Introducing the :class:`!types.GenericAlias` class .. class:: UnionType diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index 15bab7775eadd8..409a95d528b5d3 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -98,6 +98,9 @@ annotations. These include: *Introducing* :data:`LiteralString` * :pep:`681`: Data Class Transforms *Introducing* the :func:`@dataclass_transform<dataclass_transform>` decorator +* :pep:`692`: Using ``TypedDict`` for more precise ``**kwargs`` typing + *Introducing* a new way of typing ``**kwargs`` with :data:`Unpack` and + :data:`TypedDict` * :pep:`698`: Adding an override decorator to typing *Introducing* the :func:`@override<override>` decorator @@ -1417,8 +1420,10 @@ These are not used in annotations. They are building blocks for creating generic tup: tuple[Unpack[Ts]] In fact, ``Unpack`` can be used interchangeably with ``*`` in the context - of types. You might see ``Unpack`` being used explicitly in older versions - of Python, where ``*`` couldn't be used in certain places:: + of :class:`typing.TypeVarTuple <TypeVarTuple>` and + :class:`builtins.tuple <tuple>` types. You might see ``Unpack`` being used + explicitly in older versions of Python, where ``*`` couldn't be used in + certain places:: # In older versions of Python, TypeVarTuple and Unpack # are located in the `typing_extensions` backports package. @@ -1428,6 +1433,21 @@ These are not used in annotations. They are building blocks for creating generic tup: tuple[*Ts] # Syntax error on Python <= 3.10! tup: tuple[Unpack[Ts]] # Semantically equivalent, and backwards-compatible + ``Unpack`` can also be used along with :class:`typing.TypedDict` for typing + ``**kwargs`` in a function signature:: + + from typing import TypedDict, Unpack + + class Movie(TypedDict): + name: str + year: int + + # This function expects two keyword arguments - `name` of type `str` + # and `year` of type `int`. + def foo(**kwargs: Unpack[Movie]): ... + + See :pep:`692` for more details on using ``Unpack`` for ``**kwargs`` typing. + .. versionadded:: 3.11 .. class:: ParamSpec(name, *, bound=None, covariant=False, contravariant=False) diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst index a7c74cfa4fb477..c70153dfcd69e1 100644 --- a/Doc/library/unittest.rst +++ b/Doc/library/unittest.rst @@ -2281,7 +2281,8 @@ Loading and running tests The *testRunner* argument can either be a test runner class or an already created instance of it. By default ``main`` calls :func:`sys.exit` with - an exit code indicating success or failure of the tests run. + an exit code indicating success (0) or failure (1) of the tests run. + An exit code of 5 indicates that no tests were run. The *testLoader* argument has to be a :class:`TestLoader` instance, and defaults to :data:`defaultTestLoader`. diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst index 64cc9c388ec30d..1b05458280d896 100644 --- a/Doc/library/urllib.request.rst +++ b/Doc/library/urllib.request.rst @@ -28,8 +28,8 @@ The :mod:`urllib.request` module defines the following functions: .. function:: urlopen(url, data=None[, timeout], *, cafile=None, capath=None, cadefault=False, context=None) - Open the URL *url*, which can be either a string or a - :class:`Request` object. + Open *url*, which can be either a string containing a valid, properly + encoded URL, or a :class:`Request` object. *data* must be an object specifying additional data to be sent to the server, or ``None`` if no such data is needed. See :class:`Request` @@ -192,7 +192,7 @@ The following classes are provided: This class is an abstraction of a URL request. - *url* should be a string containing a valid URL. + *url* should be a string containing a valid, properly encoded URL. *data* must be an object specifying additional data to send to the server, or ``None`` if no such data is needed. Currently HTTP diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst index 52bf99e5bb0f67..9e5672545dea35 100644 --- a/Doc/library/venv.rst +++ b/Doc/library/venv.rst @@ -55,7 +55,7 @@ point to the directories of the virtual environment, whereas :data:`sys.base_prefix` and :data:`sys.base_exec_prefix` point to those of the base Python used to create the environment. It is sufficient to check -``sys.prefix == sys.base_prefix`` to determine if the current interpreter is +``sys.prefix != sys.base_prefix`` to determine if the current interpreter is running from a virtual environment. A virtual environment may be "activated" using a script in its binary directory diff --git a/Doc/reference/import.rst b/Doc/reference/import.rst index b22b5251f1de46..57eb5403243eef 100644 --- a/Doc/reference/import.rst +++ b/Doc/reference/import.rst @@ -1077,4 +1077,5 @@ methods to finders and loaders. .. [#fnpic] In legacy code, it is possible to find instances of :class:`imp.NullImporter` in the :data:`sys.path_importer_cache`. It is recommended that code be changed to use ``None`` instead. See - :ref:`portingpythoncode` for more details. + :ref:`portingpythoncode` for more details. Note that the ``imp`` module + was removed in Python 3.12. diff --git a/Doc/requirements-oldest-sphinx.txt b/Doc/requirements-oldest-sphinx.txt new file mode 100644 index 00000000000000..d0390a04ea6dd8 --- /dev/null +++ b/Doc/requirements-oldest-sphinx.txt @@ -0,0 +1,38 @@ +# Requirements to build the Python documentation, for the oldest supported +# Sphinx version. +# +# We pin Sphinx and all of its dependencies to ensure a consistent environment. + +blurb +python-docs-theme>=2022.1 + +# Generated from: +# pip install "Sphinx~=3.2.0" "docutils<0.17" "Jinja2<3" "MarkupSafe<2" +# pip freeze +# +# Sphinx 3.2 comes from ``needs_sphinx = '3.2'`` in ``Doc/conf.py``. +# Docutils<0.17, Jinja2<3, and MarkupSafe<2 are additionally specified as +# Sphinx 3.2 is incompatible with newer releases of these packages. + +Sphinx==3.2.1 +alabaster==0.7.13 +Babel==2.12.1 +certifi==2022.12.7 +charset-normalizer==3.1.0 +colorama==0.4.6 +docutils==0.16 +idna==3.4 +imagesize==1.4.1 +Jinja2==2.11.3 +MarkupSafe==1.1.1 +packaging==23.1 +Pygments==2.15.1 +requests==2.29.0 +snowballstemmer==2.2.0 +sphinxcontrib-applehelp==1.0.4 +sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-jsmath==1.0.1 +sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-serializinghtml==1.1.5 +urllib3==1.26.15 diff --git a/Doc/requirements.txt b/Doc/requirements.txt index 71d3cd61e53877..9cbd15c2209dc6 100644 --- a/Doc/requirements.txt +++ b/Doc/requirements.txt @@ -1,4 +1,7 @@ # Requirements to build the Python documentation +# +# Note that when updating this file, you will likely also have to update +# the Doc/constraints.txt file. # Sphinx version is pinned so that new versions that introduce new warnings # won't suddenly cause build failures. Updating the version is fine as long @@ -13,3 +16,5 @@ sphinxext-opengraph==0.7.5 # The theme used by the documentation is stored separately, so we need # to install that as well. python-docs-theme>=2022.1 + +-c constraints.txt diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore index f3350174f931aa..1d3503bf06f085 100644 --- a/Doc/tools/.nitignore +++ b/Doc/tools/.nitignore @@ -149,7 +149,6 @@ Doc/library/http.cookiejar.rst Doc/library/http.cookies.rst Doc/library/http.server.rst Doc/library/idle.rst -Doc/library/imp.rst Doc/library/importlib.resources.abc.rst Doc/library/importlib.resources.rst Doc/library/importlib.rst diff --git a/Doc/tools/templates/layout.html b/Doc/tools/templates/layout.html index 460161cd320223..b91f8138553e62 100644 --- a/Doc/tools/templates/layout.html +++ b/Doc/tools/templates/layout.html @@ -11,11 +11,6 @@ {%- if is_deployment_preview %} <div id="deployment-preview-warning" style="padding: .5em; text-align: center; background-color: #fff2ba; color: #6a580e;"> - <div style="float: right; margin-top: -10px; margin-left: 10px;"> - <a href="https://www.netlify.com"> - <img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" /> - </a> - </div> {% trans %}This is a deploy preview created from a <a href="{{ repository_url }}/pull/{{ pr_id }}">pull request</a>. For authoritative documentation, see the {% endtrans %} <a href="https://docs.python.org/3/{{ pagename }}{{ file_suffix }}">{% trans %} the current stable release{% endtrans %}</a>. diff --git a/Doc/using/unix.rst b/Doc/using/unix.rst index 067ff4cce5e48d..0044eb07f56eec 100644 --- a/Doc/using/unix.rst +++ b/Doc/using/unix.rst @@ -54,13 +54,6 @@ On FreeBSD and OpenBSD pkg_add ftp://ftp.openbsd.org/pub/OpenBSD/4.2/packages/i386/python-2.5.1p2.tgz -On OpenSolaris --------------- - -You can get Python from `OpenCSW <https://www.opencsw.org/>`_. Various versions -of Python are available and can be installed with e.g. ``pkgutil -i python27``. - - .. _building-python-on-unix: Building Python diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index 373e31b37cd9dc..f4ee30b0d4d9eb 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -66,6 +66,10 @@ Summary -- Release highlights .. PEP-sized items next. +New typing features: + +* :ref:`whatsnew312-pep692` + Important deprecations, removals or restrictions: * :pep:`623`, Remove wstr from Unicode @@ -145,6 +149,36 @@ New Features In Python 3.14, the default will switch to ``'data'``. (Contributed by Petr Viktorin in :pep:`706`.) +New Features Related to Type Hints +================================== + +This section covers major changes affecting :pep:`484` type hints and +the :mod:`typing` module. + +.. _whatsnew312-pep692: + +PEP 692: Using ``TypedDict`` for more precise ``**kwargs`` typing +----------------------------------------------------------------- + +Typing ``**kwargs`` in a function signature as introduced by :pep:`484` allowed +for valid annotations only in cases where all of the ``**kwargs`` were of the +same type. + +This PEP specifies a more precise way of typing ``**kwargs`` by relying on +typed dictionaries:: + + from typing import TypedDict, Unpack + + class Movie(TypedDict): + name: str + year: int + + def foo(**kwargs: Unpack[Movie]): ... + +See :pep:`692` for more details. + +(PEP written by Franek Magiera) + Other Language Changes ====================== @@ -203,6 +237,11 @@ Other Language Changes wrapped by a :exc:`RuntimeError`. Context information is added to the exception as a :pep:`678` note. (Contributed by Irit Katriel in :gh:`77757`.) +* When a ``try-except*`` construct handles the entire :exc:`ExceptionGroup` + and raises one other exception, that exception is no longer wrapped in an + :exc:`ExceptionGroup`. (Contributed by Irit Katriel in :gh:`103590`.) + + New Modules =========== @@ -261,6 +300,12 @@ asyncio yielding tasks. (Contributed by Kumar Aditya in :gh:`78530`.) +calendar +-------- + +* Add enums :data:`~calendar.Month` and :data:`~calendar.Day`. + (Contributed by Prince Roshan in :gh:`103636`.) + csv --- @@ -406,6 +451,16 @@ sqlite3 :ref:`transaction handling <sqlite3-transaction-control-autocommit>`. (Contributed by Erlend E. Aasland in :gh:`83638`.) +* Add *entrypoint* keyword-only parameter to + :meth:`~sqlite3.Connection.load_extension`, + for overriding the SQLite extension entry point. + (Contributed by Erlend E. Aasland in :gh:`103015`.) + +* Add :meth:`~sqlite3.Connection.getconfig` and + :meth:`~sqlite3.Connection.setconfig` to :class:`~sqlite3.Connection` + to make configuration changes to a database connection. + (Contributed by Erlend E. Aasland in :gh:`103489`.) + threading --------- @@ -457,8 +512,10 @@ uuid tempfile -------- -The :class:`tempfile.NamedTemporaryFile` function has a new optional parameter -*delete_on_close* (Contributed by Evgeny Zorin in :gh:`58451`.) +* The :class:`tempfile.NamedTemporaryFile` function has a new optional parameter + *delete_on_close* (Contributed by Evgeny Zorin in :gh:`58451`.) +* :func:`tempfile.mkdtemp` now always returns an absolute path, even if the + argument provided to the *dir* parameter is a relative path. .. _whatsnew-typing-py312: @@ -553,6 +610,9 @@ Optimizations replacement strings containing group references by 2--3 times. (Contributed by Serhiy Storchaka in :gh:`91524`.) +* Speed up :class:`asyncio.Task` creation by deferring expensive string formatting. + (Contributed by Itamar O in :gh:`103793`.) + CPython bytecode changes ======================== @@ -641,6 +701,9 @@ Deprecated Python 3.14, when ``'data'`` filter will become the default. See :ref:`tarfile-extraction-filter` for details. +* ``calendar.January`` and ``calendar.February`` constants are deprecated and + replaced by :data:`calendar.Month.JANUARY` and :data:`calendar.Month.FEBRUARY`. + (Contributed by Prince Roshan in :gh:`103636`.) Pending Removal in Python 3.13 ------------------------------ @@ -910,11 +973,14 @@ Removed completed: * References to, and support for ``module_repr()`` has been eradicated. - + (Contributed by Barry Warsaw in :gh:`97850`.) * ``importlib.util.set_package`` has been removed. (Contributed by Brett Cannon in :gh:`65961`.) +* The ``imp`` module has been removed. (Contributed by Barry Warsaw in + :gh:`98040`.) + * Removed the ``suspicious`` rule from the documentation Makefile, and removed ``Doc/tools/rstlint.py``, both in favor of `sphinx-lint <https://github.com/sphinx-contrib/sphinx-lint>`_. diff --git a/Grammar/python.gram b/Grammar/python.gram index 3a356c65a75195..6361dcd0985b99 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -881,14 +881,13 @@ fstring_middle[expr_ty]: | fstring_replacement_field | t=FSTRING_MIDDLE { _PyPegen_constant_from_token(p, t) } fstring_replacement_field[expr_ty]: - | '{' a=(yield_expr | star_expressions) debug_expr="="? conversion=[fstring_conversion] format=[fstring_full_format_spec] '}' { - _PyPegen_formatted_value(p, a, debug_expr, conversion, format, EXTRA) - } + | '{' a=(yield_expr | star_expressions) debug_expr="="? conversion=[fstring_conversion] format=[fstring_full_format_spec] rbrace='}' { + _PyPegen_formatted_value(p, a, debug_expr, conversion, format, rbrace, EXTRA) } | invalid_replacement_field -fstring_conversion[expr_ty]: +fstring_conversion[ResultTokenWithMetadata*]: | conv_token="!" conv=NAME { _PyPegen_check_fstring_conversion(p, conv_token, conv) } -fstring_full_format_spec[expr_ty]: - | ':' spec=fstring_format_spec* { spec ? _PyAST_JoinedStr((asdl_expr_seq*)spec, EXTRA) : NULL } +fstring_full_format_spec[ResultTokenWithMetadata*]: + | colon=':' spec=fstring_format_spec* { _PyPegen_setup_full_format_spec(p, colon, (asdl_expr_seq *) spec, EXTRA) } fstring_format_spec[expr_ty]: | t=FSTRING_MIDDLE { _PyPegen_constant_from_token(p, t) } | fstring_replacement_field diff --git a/Include/internal/pycore_bytesobject.h b/Include/internal/pycore_bytesobject.h index 9173a4f105f800..d36fa9569d64a5 100644 --- a/Include/internal/pycore_bytesobject.h +++ b/Include/internal/pycore_bytesobject.h @@ -9,11 +9,6 @@ extern "C" { #endif -/* runtime lifecycle */ - -extern PyStatus _PyBytes_InitTypes(PyInterpreterState *); - - /* Substring Search. Returns the index of the first occurrence of diff --git a/Include/internal/pycore_call.h b/Include/internal/pycore_call.h index 55378e3dfebf24..5d9342b562b002 100644 --- a/Include/internal/pycore_call.h +++ b/Include/internal/pycore_call.h @@ -116,6 +116,16 @@ _PyObject_FastCallTstate(PyThreadState *tstate, PyObject *func, PyObject *const return _PyObject_VectorcallTstate(tstate, func, args, (size_t)nargs, NULL); } +PyObject *const * +_PyStack_UnpackDict(PyThreadState *tstate, + PyObject *const *args, Py_ssize_t nargs, + PyObject *kwargs, PyObject **p_kwnames); + +void +_PyStack_UnpackDict_Free(PyObject *const *stack, Py_ssize_t nargs, + PyObject *kwnames); + +void _PyStack_UnpackDict_FreeNoDecRef(PyObject *const *stack, PyObject *kwnames); #ifdef __cplusplus } diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h index d32f37ac44d83c..86fd48b63ef8e4 100644 --- a/Include/internal/pycore_code.h +++ b/Include/internal/pycore_code.h @@ -51,6 +51,15 @@ typedef struct { #define INLINE_CACHE_ENTRIES_BINARY_SUBSCR CACHE_ENTRIES(_PyBinarySubscrCache) +typedef struct { + uint16_t counter; + uint16_t class_version[2]; + uint16_t self_type_version[2]; + uint16_t method[4]; +} _PySuperAttrCache; + +#define INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR CACHE_ENTRIES(_PySuperAttrCache) + typedef struct { uint16_t counter; uint16_t version[2]; @@ -217,6 +226,8 @@ extern int _PyLineTable_PreviousAddressRange(PyCodeAddressRange *range); /* Specialization functions */ +extern void _Py_Specialize_LoadSuperAttr(PyObject *global_super, PyObject *cls, PyObject *self, + _Py_CODEUNIT *instr, PyObject *name, int load_method); extern void _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name); extern void _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, diff --git a/Include/internal/pycore_compile.h b/Include/internal/pycore_compile.h index f85240c48a89b0..1a032f652dddaf 100644 --- a/Include/internal/pycore_compile.h +++ b/Include/internal/pycore_compile.h @@ -19,6 +19,7 @@ PyAPI_FUNC(PyCodeObject*) _PyAST_Compile( int optimize, struct _arena *arena); +static const _PyCompilerSrcLocation NO_LOCATION = {-1, -1, -1, -1}; typedef struct { int optimize; @@ -33,15 +34,21 @@ extern int _PyAST_Optimize( struct _arena *arena, _PyASTOptimizeState *state); +typedef struct { + int h_offset; + int h_startdepth; + int h_preserve_lasti; +} _PyCompile_ExceptHandlerInfo; typedef struct { int i_opcode; int i_oparg; _PyCompilerSrcLocation i_loc; -} _PyCompilerInstruction; + _PyCompile_ExceptHandlerInfo i_except_handler_info; +} _PyCompile_Instruction; typedef struct { - _PyCompilerInstruction *s_instrs; + _PyCompile_Instruction *s_instrs; int s_allocated; int s_used; @@ -82,6 +89,8 @@ int _PyCompile_EnsureArrayLargeEnough( int _PyCompile_ConstCacheMergeOne(PyObject *const_cache, PyObject **obj); +int _PyCompile_InstrSize(int opcode, int oparg); + /* Access compiler internals for unit testing */ PyAPI_FUNC(PyObject*) _PyCompile_CodeGen( diff --git a/Include/internal/pycore_fileutils_windows.h b/Include/internal/pycore_fileutils_windows.h index 9bc7feb8cecd01..e804d385e76708 100644 --- a/Include/internal/pycore_fileutils_windows.h +++ b/Include/internal/pycore_fileutils_windows.h @@ -75,6 +75,24 @@ static inline BOOL _Py_GetFileInformationByName( return GetFileInformationByName(FileName, FileInformationClass, FileInfoBuffer, FileInfoBufferSize); } +static inline BOOL _Py_GetFileInformationByName_ErrorIsTrustworthy(int error) +{ + switch(error) { + case ERROR_FILE_NOT_FOUND: + case ERROR_PATH_NOT_FOUND: + case ERROR_NOT_READY: + case ERROR_BAD_NET_NAME: + case ERROR_BAD_NETPATH: + case ERROR_BAD_PATHNAME: + case ERROR_INVALID_NAME: + case ERROR_FILENAME_EXCED_RANGE: + return TRUE; + case ERROR_NOT_SUPPORTED: + return FALSE; + } + return FALSE; +} + #endif #endif diff --git a/Include/internal/pycore_flowgraph.h b/Include/internal/pycore_flowgraph.h index f470dad3aaa459..883334f4b182eb 100644 --- a/Include/internal/pycore_flowgraph.h +++ b/Include/internal/pycore_flowgraph.h @@ -11,7 +11,6 @@ extern "C" { #include "pycore_opcode_utils.h" #include "pycore_compile.h" -static const _PyCompilerSrcLocation NO_LOCATION = {-1, -1, -1, -1}; typedef struct { int i_opcode; @@ -97,7 +96,6 @@ int _PyCfg_OptimizeCodeUnit(_PyCfgBuilder *g, PyObject *consts, PyObject *const_ int _PyCfg_Stackdepth(_PyCfgBasicblock *entryblock, int code_flags); void _PyCfg_ConvertExceptionHandlersToNops(_PyCfgBasicblock *entryblock); int _PyCfg_ResolveJumps(_PyCfgBuilder *g); -int _PyCfg_InstrSize(_PyCfgInstruction *instruction); static inline int @@ -113,7 +111,7 @@ basicblock_nofallthrough(const _PyCfgBasicblock *b) { PyCodeObject * _PyAssemble_MakeCodeObject(_PyCompile_CodeUnitMetadata *u, PyObject *const_cache, - PyObject *consts, int maxdepth, _PyCfgBasicblock *entryblock, + PyObject *consts, int maxdepth, _PyCompile_InstructionSequence *instrs, int nlocalsplus, int code_flags, PyObject *filename); #ifdef __cplusplus diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h index 20d48d20362571..d8d7fe9ef2ebde 100644 --- a/Include/internal/pycore_frame.h +++ b/Include/internal/pycore_frame.h @@ -145,9 +145,9 @@ _PyFrame_GetLocalsArray(_PyInterpreterFrame *frame) } /* Fetches the stack pointer, and sets stacktop to -1. - Having stacktop <= 0 ensures that invalid - values are not visible to the cycle GC. - We choose -1 rather than 0 to assist debugging. */ + Having stacktop <= 0 ensures that invalid + values are not visible to the cycle GC. + We choose -1 rather than 0 to assist debugging. */ static inline PyObject** _PyFrame_GetStackPointer(_PyInterpreterFrame *frame) { diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h index fdfa80bd7d424a..4fa15d74b3ad64 100644 --- a/Include/internal/pycore_global_objects_fini_generated.h +++ b/Include/internal/pycore_global_objects_fini_generated.h @@ -890,6 +890,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(end_lineno)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(end_offset)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(endpos)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(entrypoint)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(env)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(errors)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(event)); diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h index 6f430bb25eb8d3..e19d8ff1b50468 100644 --- a/Include/internal/pycore_global_strings.h +++ b/Include/internal/pycore_global_strings.h @@ -378,6 +378,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(end_lineno) STRUCT_FOR_ID(end_offset) STRUCT_FOR_ID(endpos) + STRUCT_FOR_ID(entrypoint) STRUCT_FOR_ID(env) STRUCT_FOR_ID(errors) STRUCT_FOR_ID(event) diff --git a/Include/internal/pycore_import.h b/Include/internal/pycore_import.h index 7a78a91aa617e6..0a9f24efbdb908 100644 --- a/Include/internal/pycore_import.h +++ b/Include/internal/pycore_import.h @@ -19,6 +19,8 @@ struct _import_runtime_state { used exclusively for when the extensions dict is access/modified from an arbitrary thread. */ PyThreadState main_tstate; + /* A lock to guard the dict. */ + PyThread_type_lock mutex; /* A dict mapping (filename, name) to PyModuleDef for modules. Only legacy (single-phase init) extension modules are added and only if they support multiple initialization (m_size >- 0) diff --git a/Include/internal/pycore_opcode.h b/Include/internal/pycore_opcode.h index 529b08fadb3ad7..a82885463ab2e9 100644 --- a/Include/internal/pycore_opcode.h +++ b/Include/internal/pycore_opcode.h @@ -42,6 +42,7 @@ const uint8_t _PyOpcode_Caches[256] = { [LOAD_GLOBAL] = 4, [BINARY_OP] = 1, [SEND] = 1, + [LOAD_SUPER_ATTR] = 9, [CALL] = 3, }; @@ -180,6 +181,7 @@ const uint8_t _PyOpcode_Deopt[256] = { [LOAD_GLOBAL_MODULE] = LOAD_GLOBAL, [LOAD_NAME] = LOAD_NAME, [LOAD_SUPER_ATTR] = LOAD_SUPER_ATTR, + [LOAD_SUPER_ATTR_METHOD] = LOAD_SUPER_ATTR, [MAKE_CELL] = MAKE_CELL, [MAKE_FUNCTION] = MAKE_FUNCTION, [MAP_ADD] = MAP_ADD, @@ -304,29 +306,29 @@ static const char *const _PyOpcode_OpName[266] = { [FOR_ITER_TUPLE] = "FOR_ITER_TUPLE", [FOR_ITER_RANGE] = "FOR_ITER_RANGE", [FOR_ITER_GEN] = "FOR_ITER_GEN", + [LOAD_SUPER_ATTR_METHOD] = "LOAD_SUPER_ATTR_METHOD", [LOAD_ATTR_CLASS] = "LOAD_ATTR_CLASS", - [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN", [GET_ITER] = "GET_ITER", [GET_YIELD_FROM_ITER] = "GET_YIELD_FROM_ITER", - [LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE", + [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN", [LOAD_BUILD_CLASS] = "LOAD_BUILD_CLASS", + [LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE", [LOAD_ATTR_MODULE] = "LOAD_ATTR_MODULE", - [LOAD_ATTR_PROPERTY] = "LOAD_ATTR_PROPERTY", [LOAD_ASSERTION_ERROR] = "LOAD_ASSERTION_ERROR", [RETURN_GENERATOR] = "RETURN_GENERATOR", + [LOAD_ATTR_PROPERTY] = "LOAD_ATTR_PROPERTY", [LOAD_ATTR_SLOT] = "LOAD_ATTR_SLOT", [LOAD_ATTR_WITH_HINT] = "LOAD_ATTR_WITH_HINT", [LOAD_ATTR_METHOD_LAZY_DICT] = "LOAD_ATTR_METHOD_LAZY_DICT", [LOAD_ATTR_METHOD_NO_DICT] = "LOAD_ATTR_METHOD_NO_DICT", [LOAD_ATTR_METHOD_WITH_VALUES] = "LOAD_ATTR_METHOD_WITH_VALUES", [LOAD_CONST__LOAD_FAST] = "LOAD_CONST__LOAD_FAST", - [LOAD_FAST__LOAD_CONST] = "LOAD_FAST__LOAD_CONST", [RETURN_VALUE] = "RETURN_VALUE", - [LOAD_FAST__LOAD_FAST] = "LOAD_FAST__LOAD_FAST", + [LOAD_FAST__LOAD_CONST] = "LOAD_FAST__LOAD_CONST", [SETUP_ANNOTATIONS] = "SETUP_ANNOTATIONS", + [LOAD_FAST__LOAD_FAST] = "LOAD_FAST__LOAD_FAST", [LOAD_GLOBAL_BUILTIN] = "LOAD_GLOBAL_BUILTIN", [LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE", - [STORE_ATTR_INSTANCE_VALUE] = "STORE_ATTR_INSTANCE_VALUE", [POP_EXCEPT] = "POP_EXCEPT", [STORE_NAME] = "STORE_NAME", [DELETE_NAME] = "DELETE_NAME", @@ -349,9 +351,9 @@ static const char *const _PyOpcode_OpName[266] = { [IMPORT_NAME] = "IMPORT_NAME", [IMPORT_FROM] = "IMPORT_FROM", [JUMP_FORWARD] = "JUMP_FORWARD", + [STORE_ATTR_INSTANCE_VALUE] = "STORE_ATTR_INSTANCE_VALUE", [STORE_ATTR_SLOT] = "STORE_ATTR_SLOT", [STORE_ATTR_WITH_HINT] = "STORE_ATTR_WITH_HINT", - [STORE_FAST__LOAD_FAST] = "STORE_FAST__LOAD_FAST", [POP_JUMP_IF_FALSE] = "POP_JUMP_IF_FALSE", [POP_JUMP_IF_TRUE] = "POP_JUMP_IF_TRUE", [LOAD_GLOBAL] = "LOAD_GLOBAL", @@ -381,7 +383,7 @@ static const char *const _PyOpcode_OpName[266] = { [JUMP_BACKWARD] = "JUMP_BACKWARD", [LOAD_SUPER_ATTR] = "LOAD_SUPER_ATTR", [CALL_FUNCTION_EX] = "CALL_FUNCTION_EX", - [STORE_FAST__STORE_FAST] = "STORE_FAST__STORE_FAST", + [STORE_FAST__LOAD_FAST] = "STORE_FAST__LOAD_FAST", [EXTENDED_ARG] = "EXTENDED_ARG", [LIST_APPEND] = "LIST_APPEND", [SET_ADD] = "SET_ADD", @@ -391,20 +393,20 @@ static const char *const _PyOpcode_OpName[266] = { [YIELD_VALUE] = "YIELD_VALUE", [RESUME] = "RESUME", [MATCH_CLASS] = "MATCH_CLASS", + [STORE_FAST__STORE_FAST] = "STORE_FAST__STORE_FAST", [STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT", - [STORE_SUBSCR_LIST_INT] = "STORE_SUBSCR_LIST_INT", [FORMAT_VALUE] = "FORMAT_VALUE", [BUILD_CONST_KEY_MAP] = "BUILD_CONST_KEY_MAP", [BUILD_STRING] = "BUILD_STRING", + [STORE_SUBSCR_LIST_INT] = "STORE_SUBSCR_LIST_INT", [UNPACK_SEQUENCE_LIST] = "UNPACK_SEQUENCE_LIST", [UNPACK_SEQUENCE_TUPLE] = "UNPACK_SEQUENCE_TUPLE", [UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE", - [SEND_GEN] = "SEND_GEN", [LIST_EXTEND] = "LIST_EXTEND", [SET_UPDATE] = "SET_UPDATE", [DICT_MERGE] = "DICT_MERGE", [DICT_UPDATE] = "DICT_UPDATE", - [166] = "<166>", + [SEND_GEN] = "SEND_GEN", [167] = "<167>", [168] = "<168>", [169] = "<169>", @@ -508,7 +510,6 @@ static const char *const _PyOpcode_OpName[266] = { #endif #define EXTRA_CASES \ - case 166: \ case 167: \ case 168: \ case 169: \ diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h index 0452c4c61551de..42c4874d9466bf 100644 --- a/Include/internal/pycore_runtime_init_generated.h +++ b/Include/internal/pycore_runtime_init_generated.h @@ -884,6 +884,7 @@ extern "C" { INIT_ID(end_lineno), \ INIT_ID(end_offset), \ INIT_ID(endpos), \ + INIT_ID(entrypoint), \ INIT_ID(env), \ INIT_ID(errors), \ INIT_ID(event), \ diff --git a/Include/internal/pycore_tuple.h b/Include/internal/pycore_tuple.h index edc70843b57531..335edad89792c3 100644 --- a/Include/internal/pycore_tuple.h +++ b/Include/internal/pycore_tuple.h @@ -14,7 +14,6 @@ extern "C" { /* runtime lifecycle */ extern PyStatus _PyTuple_InitGlobalObjects(PyInterpreterState *); -extern PyStatus _PyTuple_InitTypes(PyInterpreterState *); extern void _PyTuple_Fini(PyInterpreterState *); diff --git a/Include/internal/pycore_typeobject.h b/Include/internal/pycore_typeobject.h index fa59c533f9e603..76253fd5fd864c 100644 --- a/Include/internal/pycore_typeobject.h +++ b/Include/internal/pycore_typeobject.h @@ -119,6 +119,8 @@ PyObject *_Py_slot_tp_getattr_hook(PyObject *self, PyObject *name); PyObject * _PySuper_Lookup(PyTypeObject *su_type, PyObject *su_obj, PyObject *name, int *meth_found); +PyObject * +_PySuper_LookupDescr(PyTypeObject *su_type, PyObject *su_obj, PyObject *name); #ifdef __cplusplus } diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h index 7114a5416f2515..6d9cd24d9f3a13 100644 --- a/Include/internal/pycore_unicodeobject_generated.h +++ b/Include/internal/pycore_unicodeobject_generated.h @@ -987,6 +987,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { string = &_Py_ID(endpos); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(entrypoint); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(env); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); diff --git a/Include/opcode.h b/Include/opcode.h index 76e9bc870d4bed..37a9e9bffa4cb7 100644 --- a/Include/opcode.h +++ b/Include/opcode.h @@ -183,32 +183,33 @@ extern "C" { #define FOR_ITER_TUPLE 63 #define FOR_ITER_RANGE 64 #define FOR_ITER_GEN 65 -#define LOAD_ATTR_CLASS 66 -#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 67 -#define LOAD_ATTR_INSTANCE_VALUE 70 -#define LOAD_ATTR_MODULE 72 -#define LOAD_ATTR_PROPERTY 73 -#define LOAD_ATTR_SLOT 76 -#define LOAD_ATTR_WITH_HINT 77 -#define LOAD_ATTR_METHOD_LAZY_DICT 78 -#define LOAD_ATTR_METHOD_NO_DICT 79 -#define LOAD_ATTR_METHOD_WITH_VALUES 80 -#define LOAD_CONST__LOAD_FAST 81 -#define LOAD_FAST__LOAD_CONST 82 -#define LOAD_FAST__LOAD_FAST 84 -#define LOAD_GLOBAL_BUILTIN 86 -#define LOAD_GLOBAL_MODULE 87 -#define STORE_ATTR_INSTANCE_VALUE 88 -#define STORE_ATTR_SLOT 111 -#define STORE_ATTR_WITH_HINT 112 -#define STORE_FAST__LOAD_FAST 113 -#define STORE_FAST__STORE_FAST 143 -#define STORE_SUBSCR_DICT 153 -#define STORE_SUBSCR_LIST_INT 154 -#define UNPACK_SEQUENCE_LIST 158 -#define UNPACK_SEQUENCE_TUPLE 159 -#define UNPACK_SEQUENCE_TWO_TUPLE 160 -#define SEND_GEN 161 +#define LOAD_SUPER_ATTR_METHOD 66 +#define LOAD_ATTR_CLASS 67 +#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 70 +#define LOAD_ATTR_INSTANCE_VALUE 72 +#define LOAD_ATTR_MODULE 73 +#define LOAD_ATTR_PROPERTY 76 +#define LOAD_ATTR_SLOT 77 +#define LOAD_ATTR_WITH_HINT 78 +#define LOAD_ATTR_METHOD_LAZY_DICT 79 +#define LOAD_ATTR_METHOD_NO_DICT 80 +#define LOAD_ATTR_METHOD_WITH_VALUES 81 +#define LOAD_CONST__LOAD_FAST 82 +#define LOAD_FAST__LOAD_CONST 84 +#define LOAD_FAST__LOAD_FAST 86 +#define LOAD_GLOBAL_BUILTIN 87 +#define LOAD_GLOBAL_MODULE 88 +#define STORE_ATTR_INSTANCE_VALUE 111 +#define STORE_ATTR_SLOT 112 +#define STORE_ATTR_WITH_HINT 113 +#define STORE_FAST__LOAD_FAST 143 +#define STORE_FAST__STORE_FAST 153 +#define STORE_SUBSCR_DICT 154 +#define STORE_SUBSCR_LIST_INT 158 +#define UNPACK_SEQUENCE_LIST 159 +#define UNPACK_SEQUENCE_TUPLE 160 +#define UNPACK_SEQUENCE_TWO_TUPLE 161 +#define SEND_GEN 166 #define HAS_ARG(op) ((((op) >= HAVE_ARGUMENT) && (!IS_PSEUDO_OPCODE(op)))\ || ((op) == JUMP) \ diff --git a/Lib/_strptime.py b/Lib/_strptime.py index b97dfcce1e8e4d..77ccdc9e1d789b 100644 --- a/Lib/_strptime.py +++ b/Lib/_strptime.py @@ -290,22 +290,6 @@ def _calc_julian_from_U_or_W(year, week_of_year, day_of_week, week_starts_Mon): return 1 + days_to_week + day_of_week -def _calc_julian_from_V(iso_year, iso_week, iso_weekday): - """Calculate the Julian day based on the ISO 8601 year, week, and weekday. - ISO weeks start on Mondays, with week 01 being the week containing 4 Jan. - ISO week days range from 1 (Monday) to 7 (Sunday). - """ - correction = datetime_date(iso_year, 1, 4).isoweekday() + 3 - ordinal = (iso_week * 7) + iso_weekday - correction - # ordinal may be negative or 0 now, which means the date is in the previous - # calendar year - if ordinal < 1: - ordinal += datetime_date(iso_year, 1, 1).toordinal() - iso_year -= 1 - ordinal -= datetime_date(iso_year, 1, 1).toordinal() - return iso_year, ordinal - - def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"): """Return a 2-tuple consisting of a time struct and an int containing the number of microseconds based on the input string and the @@ -483,7 +467,8 @@ def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"): else: tz = value break - # Deal with the cases where ambiguities arize + + # Deal with the cases where ambiguities arise # don't assume default values for ISO week/year if year is None and iso_year is not None: if iso_week is None or weekday is None: @@ -511,7 +496,6 @@ def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"): elif year is None: year = 1900 - # If we know the week of the year and what day of that week, we can figure # out the Julian day of the year. if julian is None and weekday is not None: @@ -520,7 +504,10 @@ def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"): julian = _calc_julian_from_U_or_W(year, week_of_year, weekday, week_starts_Mon) elif iso_year is not None and iso_week is not None: - year, julian = _calc_julian_from_V(iso_year, iso_week, weekday + 1) + datetime_result = datetime_date.fromisocalendar(iso_year, iso_week, weekday + 1) + year = datetime_result.year + month = datetime_result.month + day = datetime_result.day if julian is not None and julian <= 0: year -= 1 yday = 366 if calendar.isleap(year) else 365 diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py index 3a697129e4c914..fa2422b7fba4a7 100644 --- a/Lib/asyncio/selector_events.py +++ b/Lib/asyncio/selector_events.py @@ -794,6 +794,8 @@ def __init__(self, loop, sock, protocol, extra=None, server=None): self._buffer = collections.deque() self._conn_lost = 0 # Set when call to connection_lost scheduled. self._closing = False # Set when close() called. + self._paused = False # Set when pause_reading() called + if self._server is not None: self._server._attach() loop._transports[self._sock_fd] = self @@ -839,6 +841,25 @@ def get_protocol(self): def is_closing(self): return self._closing + def is_reading(self): + return not self.is_closing() and not self._paused + + def pause_reading(self): + if not self.is_reading(): + return + self._paused = True + self._loop._remove_reader(self._sock_fd) + if self._loop.get_debug(): + logger.debug("%r pauses reading", self) + + def resume_reading(self): + if self._closing or not self._paused: + return + self._paused = False + self._add_reader(self._sock_fd, self._read_ready) + if self._loop.get_debug(): + logger.debug("%r resumes reading", self) + def close(self): if self._closing: return @@ -898,9 +919,8 @@ def get_write_buffer_size(self): return sum(map(len, self._buffer)) def _add_reader(self, fd, callback, *args): - if self._closing: + if not self.is_reading(): return - self._loop._add_reader(fd, callback, *args) @@ -915,7 +935,6 @@ def __init__(self, loop, sock, protocol, waiter=None, self._read_ready_cb = None super().__init__(loop, sock, protocol, extra, server) self._eof = False - self._paused = False self._empty_waiter = None if _HAS_SENDMSG: self._write_ready = self._write_sendmsg @@ -943,25 +962,6 @@ def set_protocol(self, protocol): super().set_protocol(protocol) - def is_reading(self): - return not self._paused and not self._closing - - def pause_reading(self): - if self._closing or self._paused: - return - self._paused = True - self._loop._remove_reader(self._sock_fd) - if self._loop.get_debug(): - logger.debug("%r pauses reading", self) - - def resume_reading(self): - if self._closing or not self._paused: - return - self._paused = False - self._add_reader(self._sock_fd, self._read_ready) - if self._loop.get_debug(): - logger.debug("%r resumes reading", self) - def _read_ready(self): self._read_ready_cb() diff --git a/Lib/asyncio/subprocess.py b/Lib/asyncio/subprocess.py index cd10231f710f11..50727ca300e63e 100644 --- a/Lib/asyncio/subprocess.py +++ b/Lib/asyncio/subprocess.py @@ -144,10 +144,11 @@ def kill(self): async def _feed_stdin(self, input): debug = self._loop.get_debug() - self.stdin.write(input) - if debug: - logger.debug( - '%r communicate: feed stdin (%s bytes)', self, len(input)) + if input is not None: + self.stdin.write(input) + if debug: + logger.debug( + '%r communicate: feed stdin (%s bytes)', self, len(input)) try: await self.stdin.drain() except (BrokenPipeError, ConnectionResetError) as exc: @@ -180,7 +181,7 @@ async def _read_stream(self, fd): return output async def communicate(self, input=None): - if input is not None: + if self.stdin is not None: stdin = self._feed_stdin(input) else: stdin = self._noop() diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index b21e0394141bf4..17fb4d5f7646ce 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -485,13 +485,21 @@ def __init__(self, loop, pipe, protocol, waiter=None, extra=None): self._loop.call_soon(self._protocol.connection_made, self) # only start reading when connection_made() has been called - self._loop.call_soon(self._loop._add_reader, + self._loop.call_soon(self._add_reader, self._fileno, self._read_ready) if waiter is not None: # only wake up the waiter when connection_made() has been called self._loop.call_soon(futures._set_result_unless_cancelled, waiter, None) + def _add_reader(self, fd, callback): + if not self.is_reading(): + return + self._loop._add_reader(fd, callback) + + def is_reading(self): + return not self._paused and not self._closing + def __repr__(self): info = [self.__class__.__name__] if self._pipe is None: @@ -532,7 +540,7 @@ def _read_ready(self): self._loop.call_soon(self._call_connection_lost, None) def pause_reading(self): - if self._closing or self._paused: + if not self.is_reading(): return self._paused = True self._loop._remove_reader(self._fileno) diff --git a/Lib/cProfile.py b/Lib/cProfile.py index f7000a8bfa0ddb..135a12c3965c00 100755 --- a/Lib/cProfile.py +++ b/Lib/cProfile.py @@ -8,6 +8,7 @@ import _lsprof import importlib.machinery +import io import profile as _pyprofile # ____________________________________________________________ @@ -168,7 +169,7 @@ def main(): else: progname = args[0] sys.path.insert(0, os.path.dirname(progname)) - with open(progname, 'rb') as fp: + with io.open_code(progname) as fp: code = compile(fp.read(), progname, 'exec') spec = importlib.machinery.ModuleSpec(name='__main__', loader=None, origin=progname) diff --git a/Lib/calendar.py b/Lib/calendar.py index 657396439c91fc..bbd4fea3b88ca4 100644 --- a/Lib/calendar.py +++ b/Lib/calendar.py @@ -7,8 +7,10 @@ import sys import datetime +from enum import IntEnum, global_enum import locale as _locale from itertools import repeat +import warnings __all__ = ["IllegalMonthError", "IllegalWeekdayError", "setfirstweekday", "firstweekday", "isleap", "leapdays", "weekday", "monthrange", @@ -16,6 +18,9 @@ "timegm", "month_name", "month_abbr", "day_name", "day_abbr", "Calendar", "TextCalendar", "HTMLCalendar", "LocaleTextCalendar", "LocaleHTMLCalendar", "weekheader", + "Day", "Month", "JANUARY", "FEBRUARY", "MARCH", + "APRIL", "MAY", "JUNE", "JULY", + "AUGUST", "SEPTEMBER", "OCTOBER", "NOVEMBER", "DECEMBER", "MONDAY", "TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY", "SATURDAY", "SUNDAY"] @@ -37,9 +42,47 @@ def __str__(self): return "bad weekday number %r; must be 0 (Monday) to 6 (Sunday)" % self.weekday -# Constants for months referenced later -January = 1 -February = 2 +def __getattr__(name): + if name in ('January', 'February'): + warnings.warn(f"The '{name}' attribute is deprecated, use '{name.upper()}' instead", + DeprecationWarning, stacklevel=2) + if name == 'January': + return 1 + else: + return 2 + + raise AttributeError(f"module '{__name__}' has no attribute '{name}'") + + +# Constants for months +@global_enum +class Month(IntEnum): + JANUARY = 1 + FEBRUARY = 2 + MARCH = 3 + APRIL = 4 + MAY = 5 + JUNE = 6 + JULY = 7 + AUGUST = 8 + SEPTEMBER = 9 + OCTOBER = 10 + NOVEMBER = 11 + DECEMBER = 12 + + +# Constants for days +@global_enum +class Day(IntEnum): + MONDAY = 0 + TUESDAY = 1 + WEDNESDAY = 2 + THURSDAY = 3 + FRIDAY = 4 + SATURDAY = 5 + SUNDAY = 6 + + # Number of days per month (except for February in leap years) mdays = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] @@ -95,9 +138,6 @@ def __len__(self): month_name = _localized_month('%B') month_abbr = _localized_month('%b') -# Constants for weekdays -(MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY) = range(7) - def isleap(year): """Return True for leap years, False for non-leap years.""" @@ -125,12 +165,12 @@ def monthrange(year, month): if not 1 <= month <= 12: raise IllegalMonthError(month) day1 = weekday(year, month, 1) - ndays = mdays[month] + (month == February and isleap(year)) + ndays = mdays[month] + (month == FEBRUARY and isleap(year)) return day1, ndays def _monthlen(year, month): - return mdays[month] + (month == February and isleap(year)) + return mdays[month] + (month == FEBRUARY and isleap(year)) def _prevmonth(year, month): @@ -260,10 +300,7 @@ def yeardatescalendar(self, year, width=3): Each month contains between 4 and 6 weeks and each week contains 1-7 days. Days are datetime.date objects. """ - months = [ - self.monthdatescalendar(year, i) - for i in range(January, January+12) - ] + months = [self.monthdatescalendar(year, m) for m in Month] return [months[i:i+width] for i in range(0, len(months), width) ] def yeardays2calendar(self, year, width=3): @@ -273,10 +310,7 @@ def yeardays2calendar(self, year, width=3): (day number, weekday number) tuples. Day numbers outside this month are zero. """ - months = [ - self.monthdays2calendar(year, i) - for i in range(January, January+12) - ] + months = [self.monthdays2calendar(year, m) for m in Month] return [months[i:i+width] for i in range(0, len(months), width) ] def yeardayscalendar(self, year, width=3): @@ -285,10 +319,7 @@ def yeardayscalendar(self, year, width=3): yeardatescalendar()). Entries in the week lists are day numbers. Day numbers outside this month are zero. """ - months = [ - self.monthdayscalendar(year, i) - for i in range(January, January+12) - ] + months = [self.monthdayscalendar(year, m) for m in Month] return [months[i:i+width] for i in range(0, len(months), width) ] @@ -509,7 +540,7 @@ def formatyear(self, theyear, width=3): a('\n') a('<tr><th colspan="%d" class="%s">%s</th></tr>' % ( width, self.cssclass_year_head, theyear)) - for i in range(January, January+12, width): + for i in range(JANUARY, JANUARY+12, width): # months in this row months = range(i, min(i+width, 13)) a('<tr>') diff --git a/Lib/curses/textpad.py b/Lib/curses/textpad.py index 2079953a06614b..aa87061b8d749e 100644 --- a/Lib/curses/textpad.py +++ b/Lib/curses/textpad.py @@ -102,7 +102,10 @@ def do_command(self, ch): self._insert_printable_char(ch) elif ch == curses.ascii.SOH: # ^a self.win.move(y, 0) - elif ch in (curses.ascii.STX,curses.KEY_LEFT, curses.ascii.BS,curses.KEY_BACKSPACE): + elif ch in (curses.ascii.STX,curses.KEY_LEFT, + curses.ascii.BS, + curses.KEY_BACKSPACE, + curses.ascii.DEL): if x > 0: self.win.move(y, x-1) elif y == 0: @@ -111,7 +114,7 @@ def do_command(self, ch): self.win.move(y-1, self._end_of_line(y-1)) else: self.win.move(y-1, self.maxx) - if ch in (curses.ascii.BS, curses.KEY_BACKSPACE): + if ch in (curses.ascii.BS, curses.KEY_BACKSPACE, curses.ascii.DEL): self.win.delch() elif ch == curses.ascii.EOT: # ^d self.win.delch() diff --git a/Lib/datetime.py b/Lib/datetime.py index 09a2d2d5381c34..b0eb1c216a689d 100644 --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1801,6 +1801,13 @@ def fromtimestamp(cls, timestamp, tz=None): @classmethod def utcfromtimestamp(cls, t): """Construct a naive UTC datetime from a POSIX timestamp.""" + import warnings + warnings.warn("datetime.utcfromtimestamp() is deprecated and scheduled " + "for removal in a future version. Use timezone-aware " + "objects to represent datetimes in UTC: " + "datetime.fromtimestamp(t, datetime.UTC).", + DeprecationWarning, + stacklevel=2) return cls._fromtimestamp(t, True, None) @classmethod @@ -1812,8 +1819,15 @@ def now(cls, tz=None): @classmethod def utcnow(cls): "Construct a UTC datetime from time.time()." + import warnings + warnings.warn("datetime.utcnow() is deprecated and scheduled for " + "removal in a future version. Instead, Use timezone-aware " + "objects to represent datetimes in UTC: " + "datetime.now(datetime.UTC).", + DeprecationWarning, + stacklevel=2) t = _time.time() - return cls.utcfromtimestamp(t) + return cls._fromtimestamp(t, True, None) @classmethod def combine(cls, date, time, tzinfo=True): diff --git a/Lib/dis.py b/Lib/dis.py index 8af84c00d0cf64..85c109584bf94f 100644 --- a/Lib/dis.py +++ b/Lib/dis.py @@ -369,9 +369,8 @@ def _get_const_value(op, arg, co_consts): assert op in hasconst argval = UNKNOWN - if op == LOAD_CONST or op == RETURN_CONST: - if co_consts is not None: - argval = co_consts[arg] + if co_consts is not None: + argval = co_consts[arg] return argval def _get_const_info(op, arg, co_consts): diff --git a/Lib/email/utils.py b/Lib/email/utils.py index 4d014bacd6182e..81da5394ea1695 100644 --- a/Lib/email/utils.py +++ b/Lib/email/utils.py @@ -143,13 +143,13 @@ def formatdate(timeval=None, localtime=False, usegmt=False): # 2822 requires that day and month names be the English abbreviations. if timeval is None: timeval = time.time() - if localtime or usegmt: - dt = datetime.datetime.fromtimestamp(timeval, datetime.timezone.utc) - else: - dt = datetime.datetime.utcfromtimestamp(timeval) + dt = datetime.datetime.fromtimestamp(timeval, datetime.timezone.utc) + if localtime: dt = dt.astimezone() usegmt = False + elif not usegmt: + dt = dt.replace(tzinfo=None) return format_datetime(dt, usegmt) def format_datetime(dt, usegmt=False): diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 4278422dfacc9f..5f4f1d75b43e64 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -10,7 +10,7 @@ __all__ = ["version", "bootstrap"] _PACKAGE_NAMES = ('pip',) -_PIP_VERSION = "23.1.1" +_PIP_VERSION = "23.1.2" _PROJECTS = [ ("pip", _PIP_VERSION, "py3"), ] diff --git a/Lib/ensurepip/_bundled/pip-23.1.1-py3-none-any.whl b/Lib/ensurepip/_bundled/pip-23.1.2-py3-none-any.whl similarity index 93% rename from Lib/ensurepip/_bundled/pip-23.1.1-py3-none-any.whl rename to Lib/ensurepip/_bundled/pip-23.1.2-py3-none-any.whl index dee4c0304b2c36..6a2515615ccda3 100644 Binary files a/Lib/ensurepip/_bundled/pip-23.1.1-py3-none-any.whl and b/Lib/ensurepip/_bundled/pip-23.1.2-py3-none-any.whl differ diff --git a/Lib/http/cookiejar.py b/Lib/http/cookiejar.py index 93b10d26c84545..bd89370e16831e 100644 --- a/Lib/http/cookiejar.py +++ b/Lib/http/cookiejar.py @@ -104,9 +104,9 @@ def time2isoz(t=None): """ if t is None: - dt = datetime.datetime.utcnow() + dt = datetime.datetime.now(tz=datetime.UTC) else: - dt = datetime.datetime.utcfromtimestamp(t) + dt = datetime.datetime.fromtimestamp(t, tz=datetime.UTC) return "%04d-%02d-%02d %02d:%02d:%02dZ" % ( dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) @@ -122,9 +122,9 @@ def time2netscape(t=None): """ if t is None: - dt = datetime.datetime.utcnow() + dt = datetime.datetime.now(tz=datetime.UTC) else: - dt = datetime.datetime.utcfromtimestamp(t) + dt = datetime.datetime.fromtimestamp(t, tz=datetime.UTC) return "%s, %02d-%s-%04d %02d:%02d:%02d GMT" % ( DAYS[dt.weekday()], dt.day, MONTHS[dt.month-1], dt.year, dt.hour, dt.minute, dt.second) diff --git a/Lib/idlelib/colorizer.py b/Lib/idlelib/colorizer.py index e9f19c145c8673..b4df353012b788 100644 --- a/Lib/idlelib/colorizer.py +++ b/Lib/idlelib/colorizer.py @@ -310,7 +310,7 @@ def recolorize_main(self): # crumb telling the next invocation to resume here # in case update tells us to leave. self.tag_add("TODO", next) - self.update() + self.update_idletasks() if self.stop_colorizing: if DEBUG: print("colorizing stopped") return diff --git a/Lib/idlelib/outwin.py b/Lib/idlelib/outwin.py index ac67c904ab9797..610031e26f1dff 100644 --- a/Lib/idlelib/outwin.py +++ b/Lib/idlelib/outwin.py @@ -112,7 +112,7 @@ def write(self, s, tags=(), mark="insert"): assert isinstance(s, str) self.text.insert(mark, s, tags) self.text.see(mark) - self.text.update() + self.text.update_idletasks() return len(s) def writelines(self, lines): diff --git a/Lib/imp.py b/Lib/imp.py deleted file mode 100644 index fe850f6a001814..00000000000000 --- a/Lib/imp.py +++ /dev/null @@ -1,346 +0,0 @@ -"""This module provides the components needed to build your own __import__ -function. Undocumented functions are obsolete. - -In most cases it is preferred you consider using the importlib module's -functionality over this module. - -""" -# (Probably) need to stay in _imp -from _imp import (lock_held, acquire_lock, release_lock, - get_frozen_object, is_frozen_package, - init_frozen, is_builtin, is_frozen, - _fix_co_filename, _frozen_module_names) -try: - from _imp import create_dynamic -except ImportError: - # Platform doesn't support dynamic loading. - create_dynamic = None - -from importlib._bootstrap import _ERR_MSG, _exec, _load, _builtin_from_name -from importlib._bootstrap_external import SourcelessFileLoader - -from importlib import machinery -from importlib import util -import importlib -import os -import sys -import tokenize -import types -import warnings - -warnings.warn("the imp module is deprecated in favour of importlib and slated " - "for removal in Python 3.12; " - "see the module's documentation for alternative uses", - DeprecationWarning, stacklevel=2) - -# DEPRECATED -SEARCH_ERROR = 0 -PY_SOURCE = 1 -PY_COMPILED = 2 -C_EXTENSION = 3 -PY_RESOURCE = 4 -PKG_DIRECTORY = 5 -C_BUILTIN = 6 -PY_FROZEN = 7 -PY_CODERESOURCE = 8 -IMP_HOOK = 9 - - -def new_module(name): - """**DEPRECATED** - - Create a new module. - - The module is not entered into sys.modules. - - """ - return types.ModuleType(name) - - -def get_magic(): - """**DEPRECATED** - - Return the magic number for .pyc files. - """ - return util.MAGIC_NUMBER - - -def get_tag(): - """Return the magic tag for .pyc files.""" - return sys.implementation.cache_tag - - -def cache_from_source(path, debug_override=None): - """**DEPRECATED** - - Given the path to a .py file, return the path to its .pyc file. - - The .py file does not need to exist; this simply returns the path to the - .pyc file calculated as if the .py file were imported. - - If debug_override is not None, then it must be a boolean and is used in - place of sys.flags.optimize. - - If sys.implementation.cache_tag is None then NotImplementedError is raised. - - """ - with warnings.catch_warnings(): - warnings.simplefilter('ignore') - return util.cache_from_source(path, debug_override) - - -def source_from_cache(path): - """**DEPRECATED** - - Given the path to a .pyc. file, return the path to its .py file. - - The .pyc file does not need to exist; this simply returns the path to - the .py file calculated to correspond to the .pyc file. If path does - not conform to PEP 3147 format, ValueError will be raised. If - sys.implementation.cache_tag is None then NotImplementedError is raised. - - """ - return util.source_from_cache(path) - - -def get_suffixes(): - """**DEPRECATED**""" - extensions = [(s, 'rb', C_EXTENSION) for s in machinery.EXTENSION_SUFFIXES] - source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES] - bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES] - - return extensions + source + bytecode - - -class NullImporter: - - """**DEPRECATED** - - Null import object. - - """ - - def __init__(self, path): - if path == '': - raise ImportError('empty pathname', path='') - elif os.path.isdir(path): - raise ImportError('existing directory', path=path) - - def find_module(self, fullname): - """Always returns None.""" - return None - - -class _HackedGetData: - - """Compatibility support for 'file' arguments of various load_*() - functions.""" - - def __init__(self, fullname, path, file=None): - super().__init__(fullname, path) - self.file = file - - def get_data(self, path): - """Gross hack to contort loader to deal w/ load_*()'s bad API.""" - if self.file and path == self.path: - # The contract of get_data() requires us to return bytes. Reopen the - # file in binary mode if needed. - if not self.file.closed: - file = self.file - if 'b' not in file.mode: - file.close() - if self.file.closed: - self.file = file = open(self.path, 'rb') - - with file: - return file.read() - else: - return super().get_data(path) - - -class _LoadSourceCompatibility(_HackedGetData, machinery.SourceFileLoader): - - """Compatibility support for implementing load_source().""" - - -def load_source(name, pathname, file=None): - loader = _LoadSourceCompatibility(name, pathname, file) - spec = util.spec_from_file_location(name, pathname, loader=loader) - if name in sys.modules: - module = _exec(spec, sys.modules[name]) - else: - module = _load(spec) - # To allow reloading to potentially work, use a non-hacked loader which - # won't rely on a now-closed file object. - module.__loader__ = machinery.SourceFileLoader(name, pathname) - module.__spec__.loader = module.__loader__ - return module - - -class _LoadCompiledCompatibility(_HackedGetData, SourcelessFileLoader): - - """Compatibility support for implementing load_compiled().""" - - -def load_compiled(name, pathname, file=None): - """**DEPRECATED**""" - loader = _LoadCompiledCompatibility(name, pathname, file) - spec = util.spec_from_file_location(name, pathname, loader=loader) - if name in sys.modules: - module = _exec(spec, sys.modules[name]) - else: - module = _load(spec) - # To allow reloading to potentially work, use a non-hacked loader which - # won't rely on a now-closed file object. - module.__loader__ = SourcelessFileLoader(name, pathname) - module.__spec__.loader = module.__loader__ - return module - - -def load_package(name, path): - """**DEPRECATED**""" - if os.path.isdir(path): - extensions = (machinery.SOURCE_SUFFIXES[:] + - machinery.BYTECODE_SUFFIXES[:]) - for extension in extensions: - init_path = os.path.join(path, '__init__' + extension) - if os.path.exists(init_path): - path = init_path - break - else: - raise ValueError('{!r} is not a package'.format(path)) - spec = util.spec_from_file_location(name, path, - submodule_search_locations=[]) - if name in sys.modules: - return _exec(spec, sys.modules[name]) - else: - return _load(spec) - - -def load_module(name, file, filename, details): - """**DEPRECATED** - - Load a module, given information returned by find_module(). - - The module name must include the full package name, if any. - - """ - suffix, mode, type_ = details - if mode and (not mode.startswith('r') or '+' in mode): - raise ValueError('invalid file open mode {!r}'.format(mode)) - elif file is None and type_ in {PY_SOURCE, PY_COMPILED}: - msg = 'file object required for import (type code {})'.format(type_) - raise ValueError(msg) - elif type_ == PY_SOURCE: - return load_source(name, filename, file) - elif type_ == PY_COMPILED: - return load_compiled(name, filename, file) - elif type_ == C_EXTENSION and load_dynamic is not None: - if file is None: - with open(filename, 'rb') as opened_file: - return load_dynamic(name, filename, opened_file) - else: - return load_dynamic(name, filename, file) - elif type_ == PKG_DIRECTORY: - return load_package(name, filename) - elif type_ == C_BUILTIN: - return init_builtin(name) - elif type_ == PY_FROZEN: - return init_frozen(name) - else: - msg = "Don't know how to import {} (type code {})".format(name, type_) - raise ImportError(msg, name=name) - - -def find_module(name, path=None): - """**DEPRECATED** - - Search for a module. - - If path is omitted or None, search for a built-in, frozen or special - module and continue search in sys.path. The module name cannot - contain '.'; to search for a submodule of a package, pass the - submodule name and the package's __path__. - - """ - if not isinstance(name, str): - raise TypeError("'name' must be a str, not {}".format(type(name))) - elif not isinstance(path, (type(None), list)): - # Backwards-compatibility - raise RuntimeError("'path' must be None or a list, " - "not {}".format(type(path))) - - if path is None: - if is_builtin(name): - return None, None, ('', '', C_BUILTIN) - elif is_frozen(name): - return None, None, ('', '', PY_FROZEN) - else: - path = sys.path - - for entry in path: - package_directory = os.path.join(entry, name) - for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]: - package_file_name = '__init__' + suffix - file_path = os.path.join(package_directory, package_file_name) - if os.path.isfile(file_path): - return None, package_directory, ('', '', PKG_DIRECTORY) - for suffix, mode, type_ in get_suffixes(): - file_name = name + suffix - file_path = os.path.join(entry, file_name) - if os.path.isfile(file_path): - break - else: - continue - break # Break out of outer loop when breaking out of inner loop. - else: - raise ImportError(_ERR_MSG.format(name), name=name) - - encoding = None - if 'b' not in mode: - with open(file_path, 'rb') as file: - encoding = tokenize.detect_encoding(file.readline)[0] - file = open(file_path, mode, encoding=encoding) - return file, file_path, (suffix, mode, type_) - - -def reload(module): - """**DEPRECATED** - - Reload the module and return it. - - The module must have been successfully imported before. - - """ - return importlib.reload(module) - - -def init_builtin(name): - """**DEPRECATED** - - Load and return a built-in module by name, or None is such module doesn't - exist - """ - try: - return _builtin_from_name(name) - except ImportError: - return None - - -if create_dynamic: - def load_dynamic(name, path, file=None): - """**DEPRECATED** - - Load an extension module. - """ - import importlib.machinery - loader = importlib.machinery.ExtensionFileLoader(name, path) - - # Issue #24748: Skip the sys.modules check in _load_module_shim; - # always load new extension - spec = importlib.util.spec_from_file_location( - name, path, loader=loader) - return _load(spec) - -else: - load_dynamic = None diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index 74a78bc9f6dc08..cb227373ca2fd4 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -440,7 +440,8 @@ def _write_atomic(path, data, mode=0o666): # Python 3.12a7 3524 (Shrink the BINARY_SUBSCR caches) # Python 3.12b1 3525 (Shrink the CALL caches) # Python 3.12b1 3526 (Add instrumentation support) -# Python 3.12b1 3527 (Optimize super() calls) +# Python 3.12b1 3527 (Add LOAD_SUPER_ATTR) +# Python 3.12b1 3528 (Add LOAD_SUPER_ATTR_METHOD specialization) # Python 3.13 will start with 3550 @@ -457,7 +458,7 @@ def _write_atomic(path, data, mode=0o666): # Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array # in PC/launcher.c must also be updated. -MAGIC_NUMBER = (3527).to_bytes(2, 'little') + b'\r\n' +MAGIC_NUMBER = (3528).to_bytes(2, 'little') + b'\r\n' _RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c diff --git a/Lib/locale.py b/Lib/locale.py index 4127d917465936..e94f0d1acbaa7d 100644 --- a/Lib/locale.py +++ b/Lib/locale.py @@ -962,7 +962,7 @@ def getpreferredencoding(do_setlocale=True): 'c.ascii': 'C', 'c.en': 'C', 'c.iso88591': 'en_US.ISO8859-1', - 'c.utf8': 'en_US.UTF-8', + 'c.utf8': 'C.UTF-8', 'c_c': 'C', 'c_c.c': 'C', 'ca': 'ca_ES.ISO8859-1', diff --git a/Lib/opcode.py b/Lib/opcode.py index e7b346e8ec291f..aef8407948df15 100644 --- a/Lib/opcode.py +++ b/Lib/opcode.py @@ -353,6 +353,9 @@ def pseudo_op(name, op, real_ops): "FOR_ITER_RANGE", "FOR_ITER_GEN", ], + "LOAD_SUPER_ATTR": [ + "LOAD_SUPER_ATTR_METHOD", + ], "LOAD_ATTR": [ # These potentially push [NULL, bound method] onto the stack. "LOAD_ATTR_CLASS", @@ -426,6 +429,12 @@ def pseudo_op(name, op, real_ops): "FOR_ITER": { "counter": 1, }, + "LOAD_SUPER_ATTR": { + "counter": 1, + "class_version": 2, + "self_type_version": 2, + "method": 4, + }, "LOAD_ATTR": { "counter": 1, "version": 2, diff --git a/Lib/pdb.py b/Lib/pdb.py index a3553b345a8dd3..645cbf518e58e3 100755 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -154,7 +154,7 @@ def namespace(self): @property def code(self): - with io.open(self) as fp: + with io.open_code(self) as fp: return f"exec(compile({fp.read()!r}, {self!r}, 'exec'))" diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py index 56731de64af494..fb977eaaa05767 100644 --- a/Lib/pkgutil.py +++ b/Lib/pkgutil.py @@ -14,7 +14,7 @@ __all__ = [ 'get_importer', 'iter_importers', 'get_loader', 'find_loader', 'walk_packages', 'iter_modules', 'get_data', - 'ImpImporter', 'ImpLoader', 'read_code', 'extend_path', + 'read_code', 'extend_path', 'ModuleInfo', ] @@ -185,187 +185,6 @@ def _iter_file_finder_modules(importer, prefix=''): importlib.machinery.FileFinder, _iter_file_finder_modules) -def _import_imp(): - global imp - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - imp = importlib.import_module('imp') - -class ImpImporter: - """PEP 302 Finder that wraps Python's "classic" import algorithm - - ImpImporter(dirname) produces a PEP 302 finder that searches that - directory. ImpImporter(None) produces a PEP 302 finder that searches - the current sys.path, plus any modules that are frozen or built-in. - - Note that ImpImporter does not currently support being used by placement - on sys.meta_path. - """ - - def __init__(self, path=None): - global imp - warnings.warn("This emulation is deprecated and slated for removal " - "in Python 3.12; use 'importlib' instead", - DeprecationWarning) - _import_imp() - self.path = path - - def find_module(self, fullname, path=None): - # Note: we ignore 'path' argument since it is only used via meta_path - subname = fullname.split(".")[-1] - if subname != fullname and self.path is None: - return None - if self.path is None: - path = None - else: - path = [os.path.realpath(self.path)] - try: - file, filename, etc = imp.find_module(subname, path) - except ImportError: - return None - return ImpLoader(fullname, file, filename, etc) - - def iter_modules(self, prefix=''): - if self.path is None or not os.path.isdir(self.path): - return - - yielded = {} - import inspect - try: - filenames = os.listdir(self.path) - except OSError: - # ignore unreadable directories like import does - filenames = [] - filenames.sort() # handle packages before same-named modules - - for fn in filenames: - modname = inspect.getmodulename(fn) - if modname=='__init__' or modname in yielded: - continue - - path = os.path.join(self.path, fn) - ispkg = False - - if not modname and os.path.isdir(path) and '.' not in fn: - modname = fn - try: - dircontents = os.listdir(path) - except OSError: - # ignore unreadable directories like import does - dircontents = [] - for fn in dircontents: - subname = inspect.getmodulename(fn) - if subname=='__init__': - ispkg = True - break - else: - continue # not a package - - if modname and '.' not in modname: - yielded[modname] = 1 - yield prefix + modname, ispkg - - -class ImpLoader: - """PEP 302 Loader that wraps Python's "classic" import algorithm - """ - code = source = None - - def __init__(self, fullname, file, filename, etc): - warnings.warn("This emulation is deprecated and slated for removal in " - "Python 3.12; use 'importlib' instead", - DeprecationWarning) - _import_imp() - self.file = file - self.filename = filename - self.fullname = fullname - self.etc = etc - - def load_module(self, fullname): - self._reopen() - try: - mod = imp.load_module(fullname, self.file, self.filename, self.etc) - finally: - if self.file: - self.file.close() - # Note: we don't set __loader__ because we want the module to look - # normal; i.e. this is just a wrapper for standard import machinery - return mod - - def get_data(self, pathname): - with open(pathname, "rb") as file: - return file.read() - - def _reopen(self): - if self.file and self.file.closed: - mod_type = self.etc[2] - if mod_type==imp.PY_SOURCE: - self.file = open(self.filename, 'r') - elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION): - self.file = open(self.filename, 'rb') - - def _fix_name(self, fullname): - if fullname is None: - fullname = self.fullname - elif fullname != self.fullname: - raise ImportError("Loader for module %s cannot handle " - "module %s" % (self.fullname, fullname)) - return fullname - - def is_package(self, fullname): - fullname = self._fix_name(fullname) - return self.etc[2]==imp.PKG_DIRECTORY - - def get_code(self, fullname=None): - fullname = self._fix_name(fullname) - if self.code is None: - mod_type = self.etc[2] - if mod_type==imp.PY_SOURCE: - source = self.get_source(fullname) - self.code = compile(source, self.filename, 'exec') - elif mod_type==imp.PY_COMPILED: - self._reopen() - try: - self.code = read_code(self.file) - finally: - self.file.close() - elif mod_type==imp.PKG_DIRECTORY: - self.code = self._get_delegate().get_code() - return self.code - - def get_source(self, fullname=None): - fullname = self._fix_name(fullname) - if self.source is None: - mod_type = self.etc[2] - if mod_type==imp.PY_SOURCE: - self._reopen() - try: - self.source = self.file.read() - finally: - self.file.close() - elif mod_type==imp.PY_COMPILED: - if os.path.exists(self.filename[:-1]): - with open(self.filename[:-1], 'r') as f: - self.source = f.read() - elif mod_type==imp.PKG_DIRECTORY: - self.source = self._get_delegate().get_source() - return self.source - - def _get_delegate(self): - finder = ImpImporter(self.filename) - spec = _get_spec(finder, '__init__') - return spec.loader - - def get_filename(self, fullname=None): - fullname = self._fix_name(fullname) - mod_type = self.etc[2] - if mod_type==imp.PKG_DIRECTORY: - return self._get_delegate().get_filename() - elif mod_type in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION): - return self.filename - return None - - try: import zipimport from zipimport import zipimporter diff --git a/Lib/platform.py b/Lib/platform.py index 790ef860bf106e..7bb222088d5061 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -136,11 +136,11 @@ 'pl': 200, 'p': 200, } -_component_re = re.compile(r'([0-9]+|[._+-])') def _comparable_version(version): + component_re = re.compile(r'([0-9]+|[._+-])') result = [] - for v in _component_re.split(version): + for v in component_re.split(version): if v not in '._+-': try: v = int(v, 10) @@ -152,11 +152,6 @@ def _comparable_version(version): ### Platform specific APIs -_libc_search = re.compile(b'(__libc_init)' - b'|' - b'(GLIBC_([0-9.]+))' - b'|' - br'(libc(_\w+)?\.so(?:\.(\d[0-9.]*))?)', re.ASCII) def libc_ver(executable=None, lib='', version='', chunksize=16384): @@ -190,6 +185,12 @@ def libc_ver(executable=None, lib='', version='', chunksize=16384): # sys.executable is not set. return lib, version + libc_search = re.compile(b'(__libc_init)' + b'|' + b'(GLIBC_([0-9.]+))' + b'|' + br'(libc(_\w+)?\.so(?:\.(\d[0-9.]*))?)', re.ASCII) + V = _comparable_version # We use os.path.realpath() # here to work around problems with Cygwin not being @@ -200,7 +201,7 @@ def libc_ver(executable=None, lib='', version='', chunksize=16384): pos = 0 while pos < len(binary): if b'libc' in binary or b'GLIBC' in binary: - m = _libc_search.search(binary, pos) + m = libc_search.search(binary, pos) else: m = None if not m or m.end() == len(binary): @@ -247,9 +248,6 @@ def _norm_version(version, build=''): version = '.'.join(strings[:3]) return version -_ver_output = re.compile(r'(?:([\w ]+) ([\w.]+) ' - r'.*' - r'\[.* ([\d.]+)\])') # Examples of VER command output: # @@ -295,9 +293,13 @@ def _syscmd_ver(system='', release='', version='', else: return system, release, version + ver_output = re.compile(r'(?:([\w ]+) ([\w.]+) ' + r'.*' + r'\[.* ([\d.]+)\])') + # Parse the output info = info.strip() - m = _ver_output.match(info) + m = ver_output.match(info) if m is not None: system, release, version = m.groups() # Strip trailing dots from version and release @@ -1033,18 +1035,6 @@ def processor(): ### Various APIs for extracting information from sys.version -_sys_version_parser = re.compile( - r'([\w.+]+)\s*' # "version<space>" - r'\(#?([^,]+)' # "(#buildno" - r'(?:,\s*([\w ]*)' # ", builddate" - r'(?:,\s*([\w :]*))?)?\)\s*' # ", buildtime)<space>" - r'\[([^\]]+)\]?', re.ASCII) # "[compiler]" - -_pypy_sys_version_parser = re.compile( - r'([\w.+]+)\s*' - r'\(#?([^,]+),\s*([\w ]+),\s*([\w :]+)\)\s*' - r'\[PyPy [^\]]+\]?') - _sys_version_cache = {} def _sys_version(sys_version=None): @@ -1076,10 +1066,17 @@ def _sys_version(sys_version=None): if result is not None: return result + sys_version_parser = re.compile( + r'([\w.+]+)\s*' # "version<space>" + r'\(#?([^,]+)' # "(#buildno" + r'(?:,\s*([\w ]*)' # ", builddate" + r'(?:,\s*([\w :]*))?)?\)\s*' # ", buildtime)<space>" + r'\[([^\]]+)\]?', re.ASCII) # "[compiler]" + if sys.platform.startswith('java'): # Jython name = 'Jython' - match = _sys_version_parser.match(sys_version) + match = sys_version_parser.match(sys_version) if match is None: raise ValueError( 'failed to parse Jython sys.version: %s' % @@ -1091,8 +1088,13 @@ def _sys_version(sys_version=None): elif "PyPy" in sys_version: # PyPy + pypy_sys_version_parser = re.compile( + r'([\w.+]+)\s*' + r'\(#?([^,]+),\s*([\w ]+),\s*([\w :]+)\)\s*' + r'\[PyPy [^\]]+\]?') + name = "PyPy" - match = _pypy_sys_version_parser.match(sys_version) + match = pypy_sys_version_parser.match(sys_version) if match is None: raise ValueError("failed to parse PyPy sys.version: %s" % repr(sys_version)) @@ -1101,7 +1103,7 @@ def _sys_version(sys_version=None): else: # CPython - match = _sys_version_parser.match(sys_version) + match = sys_version_parser.match(sys_version) if match is None: raise ValueError( 'failed to parse CPython sys.version: %s' % @@ -1290,13 +1292,6 @@ def platform(aliased=False, terse=False): ### freedesktop.org os-release standard # https://www.freedesktop.org/software/systemd/man/os-release.html -# NAME=value with optional quotes (' or "). The regular expression is less -# strict than shell lexer, but that's ok. -_os_release_line = re.compile( - "^(?P<name>[a-zA-Z0-9_]+)=(?P<quote>[\"\']?)(?P<value>.*)(?P=quote)$" -) -# unescape five special characters mentioned in the standard -_os_release_unescape = re.compile(r"\\([\\\$\"\'`])") # /etc takes precedence over /usr/lib _os_release_candidates = ("/etc/os-release", "/usr/lib/os-release") _os_release_cache = None @@ -1311,10 +1306,18 @@ def _parse_os_release(lines): "PRETTY_NAME": "Linux", } + # NAME=value with optional quotes (' or "). The regular expression is less + # strict than shell lexer, but that's ok. + os_release_line = re.compile( + "^(?P<name>[a-zA-Z0-9_]+)=(?P<quote>[\"\']?)(?P<value>.*)(?P=quote)$" + ) + # unescape five special characters mentioned in the standard + os_release_unescape = re.compile(r"\\([\\\$\"\'`])") + for line in lines: - mo = _os_release_line.match(line) + mo = os_release_line.match(line) if mo is not None: - info[mo.group('name')] = _os_release_unescape.sub( + info[mo.group('name')] = os_release_unescape.sub( r"\1", mo.group('value') ) diff --git a/Lib/profile.py b/Lib/profile.py index 453e56285c510c..4b82523b03d64b 100755 --- a/Lib/profile.py +++ b/Lib/profile.py @@ -25,6 +25,7 @@ import importlib.machinery +import io import sys import time import marshal @@ -588,7 +589,7 @@ def main(): else: progname = args[0] sys.path.insert(0, os.path.dirname(progname)) - with open(progname, 'rb') as fp: + with io.open_code(progname) as fp: code = compile(fp.read(), progname, 'exec') spec = importlib.machinery.ModuleSpec(name='__main__', loader=None, origin=progname) diff --git a/Lib/pydoc.py b/Lib/pydoc.py index 78d8fd5357f72a..1c3443fa8469f7 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -512,7 +512,7 @@ def getdocloc(self, object, basedir=sysconfig.get_path('stdlib')): basedir = os.path.normcase(basedir) if (isinstance(object, type(os)) and - (object.__name__ in ('errno', 'exceptions', 'gc', 'imp', + (object.__name__ in ('errno', 'exceptions', 'gc', 'marshal', 'posix', 'signal', 'sys', '_thread', 'zipimport') or (file.startswith(basedir) and diff --git a/Lib/runpy.py b/Lib/runpy.py index 54fc136d4074f2..42f896c9cd5094 100644 --- a/Lib/runpy.py +++ b/Lib/runpy.py @@ -279,12 +279,7 @@ def run_path(path_name, init_globals=None, run_name=None): pkg_name = run_name.rpartition(".")[0] from pkgutil import get_importer importer = get_importer(path_name) - # Trying to avoid importing imp so as to not consume the deprecation warning. - is_NullImporter = False - if type(importer).__module__ == 'imp': - if type(importer).__name__ == 'NullImporter': - is_NullImporter = True - if isinstance(importer, type(None)) or is_NullImporter: + if isinstance(importer, type(None)): # Not a valid sys.path entry, so run the code directly # execfile() doesn't help as we want to allow compiled files code, fname = _get_code_from_file(run_name, path_name) diff --git a/Lib/shutil.py b/Lib/shutil.py index c75ea4da02ebb4..7d1a3d00011f37 100644 --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -332,7 +332,7 @@ def _copyxattr(src, dst, *, follow_symlinks=True): os.setxattr(dst, name, value, follow_symlinks=follow_symlinks) except OSError as e: if e.errno not in (errno.EPERM, errno.ENOTSUP, errno.ENODATA, - errno.EINVAL): + errno.EINVAL, errno.EACCES): raise else: def _copyxattr(*args, **kwargs): diff --git a/Lib/sqlite3/__main__.py b/Lib/sqlite3/__main__.py index f8a5cca24e56af..3228dbc09d502a 100644 --- a/Lib/sqlite3/__main__.py +++ b/Lib/sqlite3/__main__.py @@ -94,12 +94,16 @@ def main(): db_name = repr(args.filename) # Prepare REPL banner and prompts. + if sys.platform == "win32" and "idlelib.run" not in sys.modules: + eofkey = "CTRL-Z" + else: + eofkey = "CTRL-D" banner = dedent(f""" sqlite3 shell, running on SQLite version {sqlite3.sqlite_version} Connected to {db_name} Each command will be run using execute() on the cursor. - Type ".help" for more information; type ".quit" or CTRL-D to quit. + Type ".help" for more information; type ".quit" or {eofkey} to quit. """).strip() sys.ps1 = "sqlite> " sys.ps2 = " ... " diff --git a/Lib/tempfile.py b/Lib/tempfile.py index 4732eb0efe1f76..2b4f4313247128 100644 --- a/Lib/tempfile.py +++ b/Lib/tempfile.py @@ -376,7 +376,7 @@ def mkdtemp(suffix=None, prefix=None, dir=None): continue else: raise - return file + return _os.path.abspath(file) raise FileExistsError(_errno.EEXIST, "No usable temporary directory name found") diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py index 477f16f1841f62..c5eb6e7f1643ee 100644 --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -2437,7 +2437,8 @@ def test_utcfromtimestamp(self): ts = time.time() expected = time.gmtime(ts) - got = self.theclass.utcfromtimestamp(ts) + with self.assertWarns(DeprecationWarning): + got = self.theclass.utcfromtimestamp(ts) self.verify_field_equality(expected, got) # Run with US-style DST rules: DST begins 2 a.m. on second Sunday in @@ -2483,8 +2484,12 @@ def test_timestamp_aware(self): @support.run_with_tz('MSK-03') # Something east of Greenwich def test_microsecond_rounding(self): + def utcfromtimestamp(*args, **kwargs): + with self.assertWarns(DeprecationWarning): + return self.theclass.utcfromtimestamp(*args, **kwargs) + for fts in [self.theclass.fromtimestamp, - self.theclass.utcfromtimestamp]: + utcfromtimestamp]: zero = fts(0) self.assertEqual(zero.second, 0) self.assertEqual(zero.microsecond, 0) @@ -2581,10 +2586,11 @@ def test_fromtimestamp_limits(self): self.theclass.fromtimestamp(ts) def test_utcfromtimestamp_limits(self): - try: - self.theclass.utcfromtimestamp(-2**32 - 1) - except (OSError, OverflowError): - self.skipTest("Test not valid on this platform") + with self.assertWarns(DeprecationWarning): + try: + self.theclass.utcfromtimestamp(-2**32 - 1) + except (OSError, OverflowError): + self.skipTest("Test not valid on this platform") min_dt = self.theclass.min.replace(tzinfo=timezone.utc) min_ts = min_dt.timestamp() @@ -2597,10 +2603,11 @@ def test_utcfromtimestamp_limits(self): ("maximum", max_ts, max_dt.replace(tzinfo=None)), ]: with self.subTest(test_name, ts=ts, expected=expected): - try: - actual = self.theclass.utcfromtimestamp(ts) - except (OSError, OverflowError) as exc: - self.skipTest(str(exc)) + with self.assertWarns(DeprecationWarning): + try: + actual = self.theclass.utcfromtimestamp(ts) + except (OSError, OverflowError) as exc: + self.skipTest(str(exc)) self.assertEqual(actual, expected) @@ -2645,7 +2652,8 @@ def test_negative_float_fromtimestamp(self): @unittest.skipIf(sys.platform == "win32", "Windows doesn't accept negative timestamps") def test_negative_float_utcfromtimestamp(self): - d = self.theclass.utcfromtimestamp(-1.05) + with self.assertWarns(DeprecationWarning): + d = self.theclass.utcfromtimestamp(-1.05) self.assertEqual(d, self.theclass(1969, 12, 31, 23, 59, 58, 950000)) def test_utcnow(self): @@ -2655,8 +2663,11 @@ def test_utcnow(self): # a second of each other. tolerance = timedelta(seconds=1) for dummy in range(3): - from_now = self.theclass.utcnow() - from_timestamp = self.theclass.utcfromtimestamp(time.time()) + with self.assertWarns(DeprecationWarning): + from_now = self.theclass.utcnow() + + with self.assertWarns(DeprecationWarning): + from_timestamp = self.theclass.utcfromtimestamp(time.time()) if abs(from_timestamp - from_now) <= tolerance: break # Else try again a few times. @@ -2956,7 +2967,11 @@ def __new__(cls, *args, **kwargs): constr_name=constr_name): constructor = getattr(base_obj, constr_name) - dt = constructor(*constr_args) + if constr_name == "utcfromtimestamp": + with self.assertWarns(DeprecationWarning): + dt = constructor(*constr_args) + else: + dt = constructor(*constr_args) # Test that it creates the right subclass self.assertIsInstance(dt, DateTimeSubclass) @@ -2986,7 +3001,11 @@ def __new__(cls, *args, **kwargs): for name, meth_name, kwargs in test_cases: with self.subTest(name): constr = getattr(DateTimeSubclass, meth_name) - dt = constr(**kwargs) + if constr == "utcnow": + with self.assertWarns(DeprecationWarning): + dt = constr(**kwargs) + else: + dt = constr(**kwargs) self.assertIsInstance(dt, DateTimeSubclass) self.assertEqual(dt.extra, 7) @@ -4642,7 +4661,8 @@ def test_tzinfo_now(self): for dummy in range(3): now = datetime.now(weirdtz) self.assertIs(now.tzinfo, weirdtz) - utcnow = datetime.utcnow().replace(tzinfo=utc) + with self.assertWarns(DeprecationWarning): + utcnow = datetime.utcnow().replace(tzinfo=utc) now2 = utcnow.astimezone(weirdtz) if abs(now - now2) < timedelta(seconds=30): break @@ -4676,7 +4696,8 @@ def test_tzinfo_fromtimestamp(self): # Try to make sure tz= actually does some conversion. timestamp = 1000000000 - utcdatetime = datetime.utcfromtimestamp(timestamp) + with self.assertWarns(DeprecationWarning): + utcdatetime = datetime.utcfromtimestamp(timestamp) # In POSIX (epoch 1970), that's 2001-09-09 01:46:40 UTC, give or take. # But on some flavor of Mac, it's nowhere near that. So we can't have # any idea here what time that actually is, we can only test that @@ -4690,7 +4711,8 @@ def test_tzinfo_fromtimestamp(self): def test_tzinfo_utcnow(self): meth = self.theclass.utcnow # Ensure it doesn't require tzinfo (i.e., that this doesn't blow up). - base = meth() + with self.assertWarns(DeprecationWarning): + base = meth() # Try with and without naming the keyword; for whatever reason, # utcnow() doesn't accept a tzinfo argument. off42 = FixedOffset(42, "42") @@ -4702,7 +4724,8 @@ def test_tzinfo_utcfromtimestamp(self): meth = self.theclass.utcfromtimestamp ts = time.time() # Ensure it doesn't require tzinfo (i.e., that this doesn't blow up). - base = meth(ts) + with self.assertWarns(DeprecationWarning): + base = meth(ts) # Try with and without naming the keyword; for whatever reason, # utcfromtimestamp() doesn't accept a tzinfo argument. off42 = FixedOffset(42, "42") @@ -5309,7 +5332,7 @@ def dst(self, dt): def test_fromutc(self): self.assertRaises(TypeError, Eastern.fromutc) # not enough args - now = datetime.utcnow().replace(tzinfo=utc_real) + now = datetime.now(tz=utc_real) self.assertRaises(ValueError, Eastern.fromutc, now) # wrong tzinfo now = now.replace(tzinfo=Eastern) # insert correct tzinfo enow = Eastern.fromutc(now) # doesn't blow up @@ -5411,9 +5434,11 @@ def test_bug_1028306(self): self.assertEqual(datetime_sc, as_datetime) def test_extra_attributes(self): + with self.assertWarns(DeprecationWarning): + utcnow = datetime.utcnow() for x in [date.today(), time(), - datetime.utcnow(), + utcnow, timedelta(), tzinfo(), timezone(timedelta())]: @@ -6073,6 +6098,7 @@ def stats(cls, start_year=1): def transitions(self): for (_, prev_ti), (t, ti) in pairs(zip(self.ut, self.ti)): shift = ti[0] - prev_ti[0] + # TODO: Remove this use of utcfromtimestamp yield datetime.utcfromtimestamp(t), shift def nondst_folds(self): diff --git a/Lib/test/libregrtest/runtest.py b/Lib/test/libregrtest/runtest.py index e9bb72a7d77ee1..61595277ed6d5a 100644 --- a/Lib/test/libregrtest/runtest.py +++ b/Lib/test/libregrtest/runtest.py @@ -143,6 +143,14 @@ def __str__(self) -> str: # set of tests that we don't want to be executed when using regrtest NOTTESTS = set() +#If these test directories are encountered recurse into them and treat each +# test_ .py or dir as a separate test module. This can increase parallelism. +# Beware this can't generally be done for any directory with sub-tests as the +# __init__.py may do things which alter what tests are to be run. + +SPLITTESTDIRS = { + "test_asyncio", +} # Storage of uncollectable objects FOUND_GARBAGE = [] @@ -158,7 +166,7 @@ def findtestdir(path=None): return path or os.path.dirname(os.path.dirname(__file__)) or os.curdir -def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS): +def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS, *, split_test_dirs=SPLITTESTDIRS, base_mod=""): """Return a list of all applicable test modules.""" testdir = findtestdir(testdir) names = os.listdir(testdir) @@ -166,8 +174,13 @@ def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS): others = set(stdtests) | nottests for name in names: mod, ext = os.path.splitext(name) - if mod[:5] == "test_" and ext in (".py", "") and mod not in others: - tests.append(mod) + if mod[:5] == "test_" and mod not in others: + if mod in split_test_dirs: + subdir = os.path.join(testdir, mod) + mod = f"{base_mod or 'test'}.{mod}" + tests.extend(findtests(subdir, [], nottests, split_test_dirs=split_test_dirs, base_mod=mod)) + elif ext in (".py", ""): + tests.append(f"{base_mod}.{mod}" if base_mod else mod) return stdtests + sorted(tests) diff --git a/Lib/test/support/testresult.py b/Lib/test/support/testresult.py index 2cd1366cd8a9e1..14474be222dc4b 100644 --- a/Lib/test/support/testresult.py +++ b/Lib/test/support/testresult.py @@ -18,10 +18,13 @@ def __init__(self, stream, descriptions, verbosity): self.buffer = True if self.USE_XML: from xml.etree import ElementTree as ET - from datetime import datetime + from datetime import datetime, UTC self.__ET = ET self.__suite = ET.Element('testsuite') - self.__suite.set('start', datetime.utcnow().isoformat(' ')) + self.__suite.set('start', + datetime.now(UTC) + .replace(tzinfo=None) + .isoformat(' ')) self.__e = None self.__start_time = None diff --git a/Lib/test/test_asyncio/test_proactor_events.py b/Lib/test/test_asyncio/test_proactor_events.py index 6cb7dc300c5331..c42856e578b8cc 100644 --- a/Lib/test/test_asyncio/test_proactor_events.py +++ b/Lib/test/test_asyncio/test_proactor_events.py @@ -447,6 +447,19 @@ def monkey(): self.assertFalse(tr.is_reading()) + def test_pause_reading_connection_made(self): + tr = self.socket_transport() + self.protocol.connection_made.side_effect = lambda _: tr.pause_reading() + test_utils.run_briefly(self.loop) + self.assertFalse(tr.is_reading()) + self.loop.assert_no_reader(7) + + tr.resume_reading() + self.assertTrue(tr.is_reading()) + + tr.close() + self.assertFalse(tr.is_reading()) + def pause_writing_transport(self, high): tr = self.socket_transport() diff --git a/Lib/test/test_asyncio/test_selector_events.py b/Lib/test/test_asyncio/test_selector_events.py index e41341fd26e19e..47693ea4d3ce2e 100644 --- a/Lib/test/test_asyncio/test_selector_events.py +++ b/Lib/test/test_asyncio/test_selector_events.py @@ -547,6 +547,22 @@ def test_pause_resume_reading(self): self.assertFalse(tr.is_reading()) self.loop.assert_no_reader(7) + def test_pause_reading_connection_made(self): + tr = self.socket_transport() + self.protocol.connection_made.side_effect = lambda _: tr.pause_reading() + test_utils.run_briefly(self.loop) + self.assertFalse(tr.is_reading()) + self.loop.assert_no_reader(7) + + tr.resume_reading() + self.assertTrue(tr.is_reading()) + self.loop.assert_reader(7, tr._read_ready) + + tr.close() + self.assertFalse(tr.is_reading()) + self.loop.assert_no_reader(7) + + def test_read_eof_received_error(self): transport = self.socket_transport() transport.close = mock.Mock() diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py index eba6e2d1f28f3e..eeeca40c15cd28 100644 --- a/Lib/test/test_asyncio/test_subprocess.py +++ b/Lib/test/test_asyncio/test_subprocess.py @@ -151,6 +151,24 @@ async def run(data): self.assertEqual(exitcode, 0) self.assertEqual(stdout, b'some data') + def test_communicate_none_input(self): + args = PROGRAM_CAT + + async def run(): + proc = await asyncio.create_subprocess_exec( + *args, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + ) + stdout, stderr = await proc.communicate() + return proc.returncode, stdout + + task = run() + task = asyncio.wait_for(task, support.LONG_TIMEOUT) + exitcode, stdout = self.loop.run_until_complete(task) + self.assertEqual(exitcode, 0) + self.assertEqual(stdout, b'') + def test_shell(self): proc = self.loop.run_until_complete( asyncio.create_subprocess_shell('exit 7') diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index 31622c91470bcb..6e8a51ce2555d5 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -399,6 +399,18 @@ async def notmuch(): self.loop.run_until_complete(t1) self.loop.run_until_complete(t2) + def test_task_set_name_pylong(self): + # test that setting the task name to a PyLong explicitly doesn't + # incorrectly trigger the deferred name formatting logic + async def notmuch(): + return 123 + + t = self.new_task(self.loop, notmuch(), name=987654321) + self.assertEqual(t.get_name(), '987654321') + t.set_name(123456789) + self.assertEqual(t.get_name(), '123456789') + self.loop.run_until_complete(t) + def test_task_repr_name_not_str(self): async def notmuch(): return 123 diff --git a/Lib/test/test_calendar.py b/Lib/test/test_calendar.py index ccfbeede0be949..24e472b5fee828 100644 --- a/Lib/test/test_calendar.py +++ b/Lib/test/test_calendar.py @@ -8,6 +8,7 @@ import sys import datetime import os +import warnings # From https://en.wikipedia.org/wiki/Leap_year_starting_on_Saturday result_0_02_text = """\ @@ -490,6 +491,14 @@ def test_format(self): self.assertEqual(out.getvalue().strip(), "1 2 3") class CalendarTestCase(unittest.TestCase): + + def test_deprecation_warning(self): + with self.assertWarnsRegex( + DeprecationWarning, + "The 'January' attribute is deprecated, use 'JANUARY' instead" + ): + calendar.January + def test_isleap(self): # Make sure that the return is right for a few years, and # ensure that the return values are 1 or 0, not just true or diff --git a/Lib/test/test_curses.py b/Lib/test/test_curses.py index b550f4af555ce4..3ab837e4f95681 100644 --- a/Lib/test/test_curses.py +++ b/Lib/test/test_curses.py @@ -5,6 +5,7 @@ import sys import tempfile import unittest +from unittest.mock import MagicMock from test.support import (requires, verbose, SaveSignals, cpython_only, check_disallow_instantiation) @@ -1319,5 +1320,75 @@ def lorem_ipsum(win): for y, line in enumerate(text[:maxy]): win.addstr(y, 0, line[:maxx - (y == maxy - 1)]) + +class TextboxTest(unittest.TestCase): + def setUp(self): + self.mock_win = MagicMock(spec=curses.window) + self.mock_win.getyx.return_value = (1, 1) + self.mock_win.getmaxyx.return_value = (10, 20) + self.textbox = curses.textpad.Textbox(self.mock_win) + + def test_init(self): + """Test textbox initialization.""" + self.mock_win.reset_mock() + tb = curses.textpad.Textbox(self.mock_win) + self.mock_win.getmaxyx.assert_called_once_with() + self.mock_win.keypad.assert_called_once_with(1) + self.assertEqual(tb.insert_mode, False) + self.assertEqual(tb.stripspaces, 1) + self.assertIsNone(tb.lastcmd) + self.mock_win.reset_mock() + + def test_insert(self): + """Test inserting a printable character.""" + self.mock_win.reset_mock() + self.textbox.do_command(ord('a')) + self.mock_win.addch.assert_called_with(ord('a')) + self.textbox.do_command(ord('b')) + self.mock_win.addch.assert_called_with(ord('b')) + self.textbox.do_command(ord('c')) + self.mock_win.addch.assert_called_with(ord('c')) + self.mock_win.reset_mock() + + def test_delete(self): + """Test deleting a character.""" + self.mock_win.reset_mock() + self.textbox.do_command(curses.ascii.BS) + self.textbox.do_command(curses.KEY_BACKSPACE) + self.textbox.do_command(curses.ascii.DEL) + assert self.mock_win.delch.call_count == 3 + self.mock_win.reset_mock() + + def test_move_left(self): + """Test moving the cursor left.""" + self.mock_win.reset_mock() + self.textbox.do_command(curses.KEY_LEFT) + self.mock_win.move.assert_called_with(1, 0) + self.textbox.do_command(curses.KEY_RIGHT) + self.mock_win.move.assert_called_with(1, 2) + self.mock_win.reset_mock() + + def test_move_left(self): + """Test moving the cursor left.""" + self.mock_win.reset_mock() + self.textbox.do_command(curses.KEY_RIGHT) + self.mock_win.move.assert_called_with(1, 2) + self.mock_win.reset_mock() + + def test_move_up(self): + """Test moving the cursor left.""" + self.mock_win.reset_mock() + self.textbox.do_command(curses.KEY_UP) + self.mock_win.move.assert_called_with(0, 1) + self.mock_win.reset_mock() + + def test_move_down(self): + """Test moving the cursor left.""" + self.mock_win.reset_mock() + self.textbox.do_command(curses.KEY_DOWN) + self.mock_win.move.assert_called_with(2, 1) + self.mock_win.reset_mock() + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_dataclasses.py b/Lib/test/test_dataclasses.py index 7dd81a8855f1be..7b48b26f9e7743 100644 --- a/Lib/test/test_dataclasses.py +++ b/Lib/test/test_dataclasses.py @@ -3671,7 +3671,7 @@ def test_text_annotations(self): ByMakeDataClass = make_dataclass('ByMakeDataClass', [('x', int)]) ManualModuleMakeDataClass = make_dataclass('ManualModuleMakeDataClass', [('x', int)], - module='test.test_dataclasses') + module=__name__) WrongNameMakeDataclass = make_dataclass('Wrong', [('x', int)]) WrongModuleMakeDataclass = make_dataclass('WrongModuleMakeDataclass', [('x', int)], diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py index f17bb1813b9d87..ad3eefba365856 100644 --- a/Lib/test/test_descr.py +++ b/Lib/test/test_descr.py @@ -5004,7 +5004,7 @@ class Child(Parent): self.assertEqual(Parent.__subclasses__(), []) def test_attr_raise_through_property(self): - # add test case for gh-103272 + # test case for gh-103272 class A: def __getattr__(self, name): raise ValueError("FOO") @@ -5016,6 +5016,19 @@ def foo(self): with self.assertRaisesRegex(ValueError, "FOO"): A().foo + # test case for gh-103551 + class B: + @property + def __getattr__(self, name): + raise ValueError("FOO") + + @property + def foo(self): + raise NotImplementedError("BAR") + + with self.assertRaisesRegex(NotImplementedError, "BAR"): + B().foo + class DictProxyTests(unittest.TestCase): def setUp(self): diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py index 2d5c73c9adc920..5262c5c257cb89 100644 --- a/Lib/test/test_dis.py +++ b/Lib/test/test_dis.py @@ -227,6 +227,26 @@ def bug42562(): JUMP_FORWARD -4 (to 0) """ +def func_w_kwargs(a, b, **c): + pass + +def wrap_func_w_kwargs(): + func_w_kwargs(1, 2, c=5) + +dis_kw_names = """\ +%3d RESUME 0 + +%3d LOAD_GLOBAL 1 (NULL + func_w_kwargs) + LOAD_CONST 1 (1) + LOAD_CONST 2 (2) + LOAD_CONST 3 (5) + KW_NAMES 4 (('c',)) + CALL 3 + POP_TOP + RETURN_CONST 0 (None) +""" % (wrap_func_w_kwargs.__code__.co_firstlineno, + wrap_func_w_kwargs.__code__.co_firstlineno + 1) + _BIG_LINENO_FORMAT = """\ 1 RESUME 0 @@ -861,6 +881,13 @@ def do_disassembly_test(self, func, expected, with_offsets=False): self.maxDiff = None got = self.get_disassembly(func, depth=0) self.do_disassembly_compare(got, expected, with_offsets) + # Add checks for dis.disco + if hasattr(func, '__code__'): + got_disco = io.StringIO() + with contextlib.redirect_stdout(got_disco): + dis.disco(func.__code__) + self.do_disassembly_compare(got_disco.getvalue(), expected, + with_offsets) def test_opmap(self): self.assertEqual(dis.opmap["NOP"], 9) @@ -911,6 +938,10 @@ def test_bug_46724(self): # Test that negative operargs are handled properly self.do_disassembly_test(bug46724, dis_bug46724) + def test_kw_names(self): + # Test that value is displayed for KW_NAMES + self.do_disassembly_test(wrap_func_w_kwargs, dis_kw_names) + def test_big_linenos(self): def func(count): namespace = {} @@ -1935,6 +1966,14 @@ def test_findlabels(self): self.assertEqual(sorted(labels), sorted(jumps)) + def test_findlinestarts(self): + def func(): + pass + + code = func.__code__ + offsets = [linestart[0] for linestart in dis.findlinestarts(code)] + self.assertEqual(offsets, [0, 2]) + class TestDisTraceback(DisTestBase): def setUp(self) -> None: diff --git a/Lib/test/test_except_star.py b/Lib/test/test_except_star.py index bc66f90b9cad45..c49c6008e08e8c 100644 --- a/Lib/test/test_except_star.py +++ b/Lib/test/test_except_star.py @@ -618,18 +618,17 @@ def test_raise_handle_all_raise_one_named(self): raise orig except* (TypeError, ValueError) as e: raise SyntaxError(3) - except BaseException as e: + except SyntaxError as e: exc = e - self.assertExceptionIsLike( - exc, ExceptionGroup("", [SyntaxError(3)])) + self.assertExceptionIsLike(exc, SyntaxError(3)) self.assertExceptionIsLike( - exc.exceptions[0].__context__, + exc.__context__, ExceptionGroup("eg", [TypeError(1), ValueError(2)])) self.assertMetadataNotEqual(orig, exc) - self.assertMetadataEqual(orig, exc.exceptions[0].__context__) + self.assertMetadataEqual(orig, exc.__context__) def test_raise_handle_all_raise_one_unnamed(self): orig = ExceptionGroup("eg", [TypeError(1), ValueError(2)]) @@ -638,18 +637,17 @@ def test_raise_handle_all_raise_one_unnamed(self): raise orig except* (TypeError, ValueError) as e: raise SyntaxError(3) - except ExceptionGroup as e: + except SyntaxError as e: exc = e - self.assertExceptionIsLike( - exc, ExceptionGroup("", [SyntaxError(3)])) + self.assertExceptionIsLike(exc, SyntaxError(3)) self.assertExceptionIsLike( - exc.exceptions[0].__context__, + exc.__context__, ExceptionGroup("eg", [TypeError(1), ValueError(2)])) self.assertMetadataNotEqual(orig, exc) - self.assertMetadataEqual(orig, exc.exceptions[0].__context__) + self.assertMetadataEqual(orig, exc.__context__) def test_raise_handle_all_raise_two_named(self): orig = ExceptionGroup("eg", [TypeError(1), ValueError(2)]) @@ -773,23 +771,22 @@ def test_raise_handle_all_raise_one_named(self): raise orig except* (TypeError, ValueError) as e: raise SyntaxError(3) from e - except BaseException as e: + except SyntaxError as e: exc = e - self.assertExceptionIsLike( - exc, ExceptionGroup("", [SyntaxError(3)])) + self.assertExceptionIsLike(exc, SyntaxError(3)) self.assertExceptionIsLike( - exc.exceptions[0].__context__, + exc.__context__, ExceptionGroup("eg", [TypeError(1), ValueError(2)])) self.assertExceptionIsLike( - exc.exceptions[0].__cause__, + exc.__cause__, ExceptionGroup("eg", [TypeError(1), ValueError(2)])) self.assertMetadataNotEqual(orig, exc) - self.assertMetadataEqual(orig, exc.exceptions[0].__context__) - self.assertMetadataEqual(orig, exc.exceptions[0].__cause__) + self.assertMetadataEqual(orig, exc.__context__) + self.assertMetadataEqual(orig, exc.__cause__) def test_raise_handle_all_raise_one_unnamed(self): orig = ExceptionGroup("eg", [TypeError(1), ValueError(2)]) @@ -799,23 +796,22 @@ def test_raise_handle_all_raise_one_unnamed(self): except* (TypeError, ValueError) as e: e = sys.exception() raise SyntaxError(3) from e - except ExceptionGroup as e: + except SyntaxError as e: exc = e - self.assertExceptionIsLike( - exc, ExceptionGroup("", [SyntaxError(3)])) + self.assertExceptionIsLike(exc, SyntaxError(3)) self.assertExceptionIsLike( - exc.exceptions[0].__context__, + exc.__context__, ExceptionGroup("eg", [TypeError(1), ValueError(2)])) self.assertExceptionIsLike( - exc.exceptions[0].__cause__, + exc.__cause__, ExceptionGroup("eg", [TypeError(1), ValueError(2)])) self.assertMetadataNotEqual(orig, exc) - self.assertMetadataEqual(orig, exc.exceptions[0].__context__) - self.assertMetadataEqual(orig, exc.exceptions[0].__cause__) + self.assertMetadataEqual(orig, exc.__context__) + self.assertMetadataEqual(orig, exc.__cause__) def test_raise_handle_all_raise_two_named(self): orig = ExceptionGroup("eg", [TypeError(1), ValueError(2)]) diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index 9d5e16628f04b6..5e94c99ae65af1 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -1535,5 +1535,19 @@ def test_not_closing_quotes(self): self.assertAllRaise(SyntaxError, "unterminated triple-quoted f-string literal", ['f"""', "f'''"]) + def test_syntax_error_after_debug(self): + self.assertAllRaise(SyntaxError, "f-string: expecting a valid expression after '{'", + [ + "f'{1=}{;'", + "f'{1=}{+;'", + "f'{1=}{2}{;'", + "f'{1=}{3}{;'", + ]) + self.assertAllRaise(SyntaxError, "f-string: expecting '=', or '!', or ':', or '}'", + [ + "f'{1=}{1;'", + "f'{1=}{1;}'", + ]) + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_genericalias.py b/Lib/test/test_genericalias.py index 9b59d1e3e0aad2..24d4216417521c 100644 --- a/Lib/test/test_genericalias.py +++ b/Lib/test/test_genericalias.py @@ -314,8 +314,11 @@ def test_parameter_chaining(self): with self.assertRaises(TypeError): list[int][int] + with self.assertRaises(TypeError): dict[T, int][str, int] + with self.assertRaises(TypeError): dict[str, T][str, int] + with self.assertRaises(TypeError): dict[T, T][str, int] def test_equality(self): diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py deleted file mode 100644 index 80abc720c3251a..00000000000000 --- a/Lib/test/test_imp.py +++ /dev/null @@ -1,524 +0,0 @@ -import gc -import importlib -import importlib.util -import os -import os.path -import py_compile -import sys -from test import support -from test.support import import_helper -from test.support import os_helper -from test.support import script_helper -from test.support import warnings_helper -import unittest -import warnings -imp = warnings_helper.import_deprecated('imp') -import _imp - - -OS_PATH_NAME = os.path.__name__ - - -def requires_load_dynamic(meth): - """Decorator to skip a test if not running under CPython or lacking - imp.load_dynamic().""" - meth = support.cpython_only(meth) - return unittest.skipIf(getattr(imp, 'load_dynamic', None) is None, - 'imp.load_dynamic() required')(meth) - - -class LockTests(unittest.TestCase): - - """Very basic test of import lock functions.""" - - def verify_lock_state(self, expected): - self.assertEqual(imp.lock_held(), expected, - "expected imp.lock_held() to be %r" % expected) - def testLock(self): - LOOPS = 50 - - # The import lock may already be held, e.g. if the test suite is run - # via "import test.autotest". - lock_held_at_start = imp.lock_held() - self.verify_lock_state(lock_held_at_start) - - for i in range(LOOPS): - imp.acquire_lock() - self.verify_lock_state(True) - - for i in range(LOOPS): - imp.release_lock() - - # The original state should be restored now. - self.verify_lock_state(lock_held_at_start) - - if not lock_held_at_start: - try: - imp.release_lock() - except RuntimeError: - pass - else: - self.fail("release_lock() without lock should raise " - "RuntimeError") - -class ImportTests(unittest.TestCase): - def setUp(self): - mod = importlib.import_module('test.encoded_modules') - self.test_strings = mod.test_strings - self.test_path = mod.__path__ - - # test_import_encoded_module moved to test_source_encoding.py - - def test_find_module_encoding(self): - for mod, encoding, _ in self.test_strings: - with imp.find_module('module_' + mod, self.test_path)[0] as fd: - self.assertEqual(fd.encoding, encoding) - - path = [os.path.dirname(__file__)] - with self.assertRaises(SyntaxError): - imp.find_module('badsyntax_pep3120', path) - - def test_issue1267(self): - for mod, encoding, _ in self.test_strings: - fp, filename, info = imp.find_module('module_' + mod, - self.test_path) - with fp: - self.assertNotEqual(fp, None) - self.assertEqual(fp.encoding, encoding) - self.assertEqual(fp.tell(), 0) - self.assertEqual(fp.readline(), '# test %s encoding\n' - % encoding) - - fp, filename, info = imp.find_module("tokenize") - with fp: - self.assertNotEqual(fp, None) - self.assertEqual(fp.encoding, "utf-8") - self.assertEqual(fp.tell(), 0) - self.assertEqual(fp.readline(), - '"""Tokenization help for Python programs.\n') - - def test_issue3594(self): - temp_mod_name = 'test_imp_helper' - sys.path.insert(0, '.') - try: - with open(temp_mod_name + '.py', 'w', encoding="latin-1") as file: - file.write("# coding: cp1252\nu = 'test.test_imp'\n") - file, filename, info = imp.find_module(temp_mod_name) - file.close() - self.assertEqual(file.encoding, 'cp1252') - finally: - del sys.path[0] - os_helper.unlink(temp_mod_name + '.py') - os_helper.unlink(temp_mod_name + '.pyc') - - def test_issue5604(self): - # Test cannot cover imp.load_compiled function. - # Martin von Loewis note what shared library cannot have non-ascii - # character because init_xxx function cannot be compiled - # and issue never happens for dynamic modules. - # But sources modified to follow generic way for processing paths. - - # the return encoding could be uppercase or None - fs_encoding = sys.getfilesystemencoding() - - # covers utf-8 and Windows ANSI code pages - # one non-space symbol from every page - # (http://en.wikipedia.org/wiki/Code_page) - known_locales = { - 'utf-8' : b'\xc3\xa4', - 'cp1250' : b'\x8C', - 'cp1251' : b'\xc0', - 'cp1252' : b'\xc0', - 'cp1253' : b'\xc1', - 'cp1254' : b'\xc0', - 'cp1255' : b'\xe0', - 'cp1256' : b'\xe0', - 'cp1257' : b'\xc0', - 'cp1258' : b'\xc0', - } - - if sys.platform == 'darwin': - self.assertEqual(fs_encoding, 'utf-8') - # Mac OS X uses the Normal Form D decomposition - # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html - special_char = b'a\xcc\x88' - else: - special_char = known_locales.get(fs_encoding) - - if not special_char: - self.skipTest("can't run this test with %s as filesystem encoding" - % fs_encoding) - decoded_char = special_char.decode(fs_encoding) - temp_mod_name = 'test_imp_helper_' + decoded_char - test_package_name = 'test_imp_helper_package_' + decoded_char - init_file_name = os.path.join(test_package_name, '__init__.py') - try: - # if the curdir is not in sys.path the test fails when run with - # ./python ./Lib/test/regrtest.py test_imp - sys.path.insert(0, os.curdir) - with open(temp_mod_name + '.py', 'w', encoding="utf-8") as file: - file.write('a = 1\n') - file, filename, info = imp.find_module(temp_mod_name) - with file: - self.assertIsNotNone(file) - self.assertTrue(filename[:-3].endswith(temp_mod_name)) - self.assertEqual(info[0], '.py') - self.assertEqual(info[1], 'r') - self.assertEqual(info[2], imp.PY_SOURCE) - - mod = imp.load_module(temp_mod_name, file, filename, info) - self.assertEqual(mod.a, 1) - - with warnings.catch_warnings(): - warnings.simplefilter('ignore') - mod = imp.load_source(temp_mod_name, temp_mod_name + '.py') - self.assertEqual(mod.a, 1) - - with warnings.catch_warnings(): - warnings.simplefilter('ignore') - if not sys.dont_write_bytecode: - mod = imp.load_compiled( - temp_mod_name, - imp.cache_from_source(temp_mod_name + '.py')) - self.assertEqual(mod.a, 1) - - if not os.path.exists(test_package_name): - os.mkdir(test_package_name) - with open(init_file_name, 'w', encoding="utf-8") as file: - file.write('b = 2\n') - with warnings.catch_warnings(): - warnings.simplefilter('ignore') - package = imp.load_package(test_package_name, test_package_name) - self.assertEqual(package.b, 2) - finally: - del sys.path[0] - for ext in ('.py', '.pyc'): - os_helper.unlink(temp_mod_name + ext) - os_helper.unlink(init_file_name + ext) - os_helper.rmtree(test_package_name) - os_helper.rmtree('__pycache__') - - def test_issue9319(self): - path = os.path.dirname(__file__) - self.assertRaises(SyntaxError, - imp.find_module, "badsyntax_pep3120", [path]) - - def test_load_from_source(self): - # Verify that the imp module can correctly load and find .py files - # XXX (ncoghlan): It would be nice to use import_helper.CleanImport - # here, but that breaks because the os module registers some - # handlers in copy_reg on import. Since CleanImport doesn't - # revert that registration, the module is left in a broken - # state after reversion. Reinitialising the module contents - # and just reverting os.environ to its previous state is an OK - # workaround - with import_helper.CleanImport('os', 'os.path', OS_PATH_NAME): - import os - orig_path = os.path - orig_getenv = os.getenv - with os_helper.EnvironmentVarGuard(): - x = imp.find_module("os") - self.addCleanup(x[0].close) - new_os = imp.load_module("os", *x) - self.assertIs(os, new_os) - self.assertIs(orig_path, new_os.path) - self.assertIsNot(orig_getenv, new_os.getenv) - - @requires_load_dynamic - def test_issue15828_load_extensions(self): - # Issue 15828 picked up that the adapter between the old imp API - # and importlib couldn't handle C extensions - example = "_heapq" - x = imp.find_module(example) - file_ = x[0] - if file_ is not None: - self.addCleanup(file_.close) - mod = imp.load_module(example, *x) - self.assertEqual(mod.__name__, example) - - @requires_load_dynamic - def test_issue16421_multiple_modules_in_one_dll(self): - # Issue 16421: loading several modules from the same compiled file fails - m = '_testimportmultiple' - fileobj, pathname, description = imp.find_module(m) - fileobj.close() - mod0 = imp.load_dynamic(m, pathname) - mod1 = imp.load_dynamic('_testimportmultiple_foo', pathname) - mod2 = imp.load_dynamic('_testimportmultiple_bar', pathname) - self.assertEqual(mod0.__name__, m) - self.assertEqual(mod1.__name__, '_testimportmultiple_foo') - self.assertEqual(mod2.__name__, '_testimportmultiple_bar') - with self.assertRaises(ImportError): - imp.load_dynamic('nonexistent', pathname) - - @requires_load_dynamic - def test_load_dynamic_ImportError_path(self): - # Issue #1559549 added `name` and `path` attributes to ImportError - # in order to provide better detail. Issue #10854 implemented those - # attributes on import failures of extensions on Windows. - path = 'bogus file path' - name = 'extension' - with self.assertRaises(ImportError) as err: - imp.load_dynamic(name, path) - self.assertIn(path, err.exception.path) - self.assertEqual(name, err.exception.name) - - @requires_load_dynamic - def test_load_module_extension_file_is_None(self): - # When loading an extension module and the file is None, open one - # on the behalf of imp.load_dynamic(). - # Issue #15902 - name = '_testimportmultiple' - found = imp.find_module(name) - if found[0] is not None: - found[0].close() - if found[2][2] != imp.C_EXTENSION: - self.skipTest("found module doesn't appear to be a C extension") - imp.load_module(name, None, *found[1:]) - - @requires_load_dynamic - def test_issue24748_load_module_skips_sys_modules_check(self): - name = 'test.imp_dummy' - try: - del sys.modules[name] - except KeyError: - pass - try: - module = importlib.import_module(name) - spec = importlib.util.find_spec('_testmultiphase') - module = imp.load_dynamic(name, spec.origin) - self.assertEqual(module.__name__, name) - self.assertEqual(module.__spec__.name, name) - self.assertEqual(module.__spec__.origin, spec.origin) - self.assertRaises(AttributeError, getattr, module, 'dummy_name') - self.assertEqual(module.int_const, 1969) - self.assertIs(sys.modules[name], module) - finally: - try: - del sys.modules[name] - except KeyError: - pass - - @unittest.skipIf(sys.dont_write_bytecode, - "test meaningful only when writing bytecode") - def test_bug7732(self): - with os_helper.temp_cwd(): - source = os_helper.TESTFN + '.py' - os.mkdir(source) - self.assertRaisesRegex(ImportError, '^No module', - imp.find_module, os_helper.TESTFN, ["."]) - - def test_multiple_calls_to_get_data(self): - # Issue #18755: make sure multiple calls to get_data() can succeed. - loader = imp._LoadSourceCompatibility('imp', imp.__file__, - open(imp.__file__, encoding="utf-8")) - loader.get_data(imp.__file__) # File should be closed - loader.get_data(imp.__file__) # Will need to create a newly opened file - - def test_load_source(self): - # Create a temporary module since load_source(name) modifies - # sys.modules[name] attributes like __loader___ - modname = f"tmp{__name__}" - mod = type(sys.modules[__name__])(modname) - with support.swap_item(sys.modules, modname, mod): - with self.assertRaisesRegex(ValueError, 'embedded null'): - imp.load_source(modname, __file__ + "\0") - - @support.cpython_only - def test_issue31315(self): - # There shouldn't be an assertion failure in imp.create_dynamic(), - # when spec.name is not a string. - create_dynamic = support.get_attribute(imp, 'create_dynamic') - class BadSpec: - name = None - origin = 'foo' - with self.assertRaises(TypeError): - create_dynamic(BadSpec()) - - def test_issue_35321(self): - # Both _frozen_importlib and _frozen_importlib_external - # should have a spec origin of "frozen" and - # no need to clean up imports in this case. - - import _frozen_importlib_external - self.assertEqual(_frozen_importlib_external.__spec__.origin, "frozen") - - import _frozen_importlib - self.assertEqual(_frozen_importlib.__spec__.origin, "frozen") - - def test_source_hash(self): - self.assertEqual(_imp.source_hash(42, b'hi'), b'\xfb\xd9G\x05\xaf$\x9b~') - self.assertEqual(_imp.source_hash(43, b'hi'), b'\xd0/\x87C\xccC\xff\xe2') - - def test_pyc_invalidation_mode_from_cmdline(self): - cases = [ - ([], "default"), - (["--check-hash-based-pycs", "default"], "default"), - (["--check-hash-based-pycs", "always"], "always"), - (["--check-hash-based-pycs", "never"], "never"), - ] - for interp_args, expected in cases: - args = interp_args + [ - "-c", - "import _imp; print(_imp.check_hash_based_pycs)", - ] - res = script_helper.assert_python_ok(*args) - self.assertEqual(res.out.strip().decode('utf-8'), expected) - - def test_find_and_load_checked_pyc(self): - # issue 34056 - with os_helper.temp_cwd(): - with open('mymod.py', 'wb') as fp: - fp.write(b'x = 42\n') - py_compile.compile( - 'mymod.py', - doraise=True, - invalidation_mode=py_compile.PycInvalidationMode.CHECKED_HASH, - ) - file, path, description = imp.find_module('mymod', path=['.']) - mod = imp.load_module('mymod', file, path, description) - self.assertEqual(mod.x, 42) - - def test_issue98354(self): - # _imp.create_builtin should raise TypeError - # if 'name' attribute of 'spec' argument is not a 'str' instance - - create_builtin = support.get_attribute(_imp, "create_builtin") - - class FakeSpec: - def __init__(self, name): - self.name = self - spec = FakeSpec("time") - with self.assertRaises(TypeError): - create_builtin(spec) - - class FakeSpec2: - name = [1, 2, 3, 4] - spec = FakeSpec2() - with self.assertRaises(TypeError): - create_builtin(spec) - - import builtins - class UnicodeSubclass(str): - pass - class GoodSpec: - name = UnicodeSubclass("builtins") - spec = GoodSpec() - bltin = create_builtin(spec) - self.assertEqual(bltin, builtins) - - class UnicodeSubclassFakeSpec(str): - def __init__(self, name): - self.name = self - spec = UnicodeSubclassFakeSpec("builtins") - bltin = create_builtin(spec) - self.assertEqual(bltin, builtins) - - @support.cpython_only - def test_create_builtin_subinterp(self): - # gh-99578: create_builtin() behavior changes after the creation of the - # first sub-interpreter. Test both code paths, before and after the - # creation of a sub-interpreter. Previously, create_builtin() had - # a reference leak after the creation of the first sub-interpreter. - - import builtins - create_builtin = support.get_attribute(_imp, "create_builtin") - class Spec: - name = "builtins" - spec = Spec() - - def check_get_builtins(): - refcnt = sys.getrefcount(builtins) - mod = _imp.create_builtin(spec) - self.assertIs(mod, builtins) - self.assertEqual(sys.getrefcount(builtins), refcnt + 1) - # Check that a GC collection doesn't crash - gc.collect() - - check_get_builtins() - - ret = support.run_in_subinterp("import builtins") - self.assertEqual(ret, 0) - - check_get_builtins() - - -class ReloadTests(unittest.TestCase): - - """Very basic tests to make sure that imp.reload() operates just like - reload().""" - - def test_source(self): - # XXX (ncoghlan): It would be nice to use test.import_helper.CleanImport - # here, but that breaks because the os module registers some - # handlers in copy_reg on import. Since CleanImport doesn't - # revert that registration, the module is left in a broken - # state after reversion. Reinitialising the module contents - # and just reverting os.environ to its previous state is an OK - # workaround - with os_helper.EnvironmentVarGuard(): - import os - imp.reload(os) - - def test_extension(self): - with import_helper.CleanImport('time'): - import time - imp.reload(time) - - def test_builtin(self): - with import_helper.CleanImport('marshal'): - import marshal - imp.reload(marshal) - - def test_with_deleted_parent(self): - # see #18681 - from html import parser - html = sys.modules.pop('html') - def cleanup(): - sys.modules['html'] = html - self.addCleanup(cleanup) - with self.assertRaisesRegex(ImportError, 'html'): - imp.reload(parser) - - -class PEP3147Tests(unittest.TestCase): - """Tests of PEP 3147.""" - - tag = imp.get_tag() - - @unittest.skipUnless(sys.implementation.cache_tag is not None, - 'requires sys.implementation.cache_tag not be None') - def test_cache_from_source(self): - # Given the path to a .py file, return the path to its PEP 3147 - # defined .pyc file (i.e. under __pycache__). - path = os.path.join('foo', 'bar', 'baz', 'qux.py') - expect = os.path.join('foo', 'bar', 'baz', '__pycache__', - 'qux.{}.pyc'.format(self.tag)) - self.assertEqual(imp.cache_from_source(path, True), expect) - - @unittest.skipUnless(sys.implementation.cache_tag is not None, - 'requires sys.implementation.cache_tag to not be ' - 'None') - def test_source_from_cache(self): - # Given the path to a PEP 3147 defined .pyc file, return the path to - # its source. This tests the good path. - path = os.path.join('foo', 'bar', 'baz', '__pycache__', - 'qux.{}.pyc'.format(self.tag)) - expect = os.path.join('foo', 'bar', 'baz', 'qux.py') - self.assertEqual(imp.source_from_cache(path), expect) - - -class NullImporterTests(unittest.TestCase): - @unittest.skipIf(os_helper.TESTFN_UNENCODABLE is None, - "Need an undecodeable filename") - def test_unencodeable(self): - name = os_helper.TESTFN_UNENCODABLE - os.mkdir(name) - try: - self.assertRaises(ImportError, imp.NullImporter, name) - finally: - os.rmdir(name) - - -if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/test_importlib/util.py b/Lib/test/test_importlib/util.py index 9032fd18d3f95b..e348733f6ce3c3 100644 --- a/Lib/test/test_importlib/util.py +++ b/Lib/test/test_importlib/util.py @@ -131,9 +131,8 @@ def uncache(*names): """ for name in names: - if name in ('sys', 'marshal', 'imp'): - raise ValueError( - "cannot uncache {0}".format(name)) + if name in ('sys', 'marshal'): + raise ValueError("cannot uncache {}".format(name)) try: del sys.modules[name] except KeyError: diff --git a/Lib/test/test_int.py b/Lib/test/test_int.py index 334fea0774be51..5545ee39d8e942 100644 --- a/Lib/test/test_int.py +++ b/Lib/test/test_int.py @@ -155,6 +155,8 @@ def test_basic(self): self.assertEqual(int(' 0O123 ', 0), 83) self.assertEqual(int(' 0X123 ', 0), 291) self.assertEqual(int(' 0B100 ', 0), 4) + with self.assertRaises(ValueError): + int('010', 0) # without base still base 10 self.assertEqual(int('0123'), 123) @@ -221,6 +223,24 @@ def test_basic(self): self.assertEqual(int('2br45qc', 35), 4294967297) self.assertEqual(int('1z141z5', 36), 4294967297) + def test_invalid_signs(self): + with self.assertRaises(ValueError): + int('+') + with self.assertRaises(ValueError): + int('-') + with self.assertRaises(ValueError): + int('- 1') + with self.assertRaises(ValueError): + int('+ 1') + with self.assertRaises(ValueError): + int(' + 1 ') + + def test_unicode(self): + self.assertEqual(int("१२३४५६७८९०1234567890"), 12345678901234567890) + self.assertEqual(int('١٢٣٤٥٦٧٨٩٠'), 1234567890) + self.assertEqual(int("१२३४५६७८९०1234567890", 0), 12345678901234567890) + self.assertEqual(int('١٢٣٤٥٦٧٨٩٠', 0), 1234567890) + def test_underscores(self): for lit in VALID_UNDERSCORE_LITERALS: if any(ch in lit for ch in '.eEjJ'): diff --git a/Lib/test/test_opcache.py b/Lib/test/test_opcache.py index e39b7260624899..57fed5d09fd7b8 100644 --- a/Lib/test/test_opcache.py +++ b/Lib/test/test_opcache.py @@ -1,6 +1,29 @@ import unittest +class TestLoadSuperAttrCache(unittest.TestCase): + def test_descriptor_not_double_executed_on_spec_fail(self): + calls = [] + class Descriptor: + def __get__(self, instance, owner): + calls.append((instance, owner)) + return lambda: 1 + + class C: + d = Descriptor() + + class D(C): + def f(self): + return super().d() + + d = D() + + self.assertEqual(d.f(), 1) # warmup + calls.clear() + self.assertEqual(d.f(), 1) # try to specialize + self.assertEqual(calls, [(d, D)]) + + class TestLoadAttrCache(unittest.TestCase): def test_descriptor_added_after_optimization(self): class Descriptor: diff --git a/Lib/test/test_patma.py b/Lib/test/test_patma.py index 0ed54079c99b30..3dbd19dfffd318 100644 --- a/Lib/test/test_patma.py +++ b/Lib/test/test_patma.py @@ -3165,6 +3165,19 @@ def f(command): # 0 self.assertListEqual(self._trace(f, "go x"), [1, 2, 3]) self.assertListEqual(self._trace(f, "spam"), [1, 2, 3]) + def test_unreachable_code(self): + def f(command): # 0 + match command: # 1 + case 1: # 2 + if False: # 3 + return 1 # 4 + case _: # 5 + if False: # 6 + return 0 # 7 + + self.assertListEqual(self._trace(f, 1), [1, 2, 3]) + self.assertListEqual(self._trace(f, 0), [1, 2, 5, 6]) + def test_parser_deeply_nested_patterns(self): # Deeply nested patterns can cause exponential backtracking when parsing. # See gh-93671 for more information. diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index b5c413af344c93..2f712a10257984 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -2396,6 +2396,12 @@ def _create_fake_frozen_module(): # verify that pdb found the source of the "frozen" function self.assertIn('x = "Sentinel string for gh-93696"', stdout, "Sentinel statement not found") + def test_non_utf8_encoding(self): + script_dir = os.path.join(os.path.dirname(__file__), 'encoded_modules') + for filename in os.listdir(script_dir): + if filename.endswith(".py"): + self._run_pdb([os.path.join(script_dir, filename)], 'q') + class ChecklineTests(unittest.TestCase): def setUp(self): linecache.clearcache() # Pdb.checkline() uses linecache.getline() diff --git a/Lib/test/test_pkgutil.py b/Lib/test/test_pkgutil.py index 0cc99e0cc22763..4d9f5db3c6b3cf 100644 --- a/Lib/test/test_pkgutil.py +++ b/Lib/test/test_pkgutil.py @@ -541,14 +541,6 @@ def check_deprecated(self): "Python 3.12; use 'importlib' instead", DeprecationWarning)) - def test_importer_deprecated(self): - with self.check_deprecated(): - pkgutil.ImpImporter("") - - def test_loader_deprecated(self): - with self.check_deprecated(): - pkgutil.ImpLoader("", "", "", "") - def test_get_loader_avoids_emulation(self): with check_warnings() as w: self.assertIsNotNone(pkgutil.get_loader("sys")) diff --git a/Lib/test/test_plistlib.py b/Lib/test/test_plistlib.py index 6b457440be5430..b08ababa341cfe 100644 --- a/Lib/test/test_plistlib.py +++ b/Lib/test/test_plistlib.py @@ -925,7 +925,7 @@ def test_large_timestamp(self): # Issue #26709: 32-bit timestamp out of range for ts in -2**31-1, 2**31: with self.subTest(ts=ts): - d = (datetime.datetime.utcfromtimestamp(0) + + d = (datetime.datetime(1970, 1, 1, 0, 0) + datetime.timedelta(seconds=ts)) data = plistlib.dumps(d, fmt=plistlib.FMT_BINARY) self.assertEqual(plistlib.loads(data), d) diff --git a/Lib/test/test_sqlite3/test_dbapi.py b/Lib/test/test_sqlite3/test_dbapi.py index 3013abfa730ed5..1bb0e13e356e78 100644 --- a/Lib/test/test_sqlite3/test_dbapi.py +++ b/Lib/test/test_sqlite3/test_dbapi.py @@ -577,6 +577,30 @@ def test_connection_bad_reinit(self): cx.executemany, "insert into t values(?)", ((v,) for v in range(3))) + def test_connection_config(self): + op = sqlite.SQLITE_DBCONFIG_ENABLE_FKEY + with memory_database() as cx: + with self.assertRaisesRegex(ValueError, "unknown"): + cx.getconfig(-1) + + # Toggle and verify. + old = cx.getconfig(op) + new = not old + cx.setconfig(op, new) + self.assertEqual(cx.getconfig(op), new) + + cx.setconfig(op) # defaults to True + self.assertTrue(cx.getconfig(op)) + + # Check that foreign key support was actually enabled. + with cx: + cx.executescript(""" + create table t(t integer primary key); + create table u(u, foreign key(u) references t(t)); + """) + with self.assertRaisesRegex(sqlite.IntegrityError, "constraint"): + cx.execute("insert into u values(0)") + class UninitialisedConnectionTests(unittest.TestCase): def setUp(self): diff --git a/Lib/test/test_sqlite3/test_types.py b/Lib/test/test_sqlite3/test_types.py index 5e0ff353cbbd6b..fde5f888e64009 100644 --- a/Lib/test/test_sqlite3/test_types.py +++ b/Lib/test/test_sqlite3/test_types.py @@ -517,7 +517,7 @@ def test_sqlite_timestamp(self): self.assertEqual(ts, ts2) def test_sql_timestamp(self): - now = datetime.datetime.utcnow() + now = datetime.datetime.now(tz=datetime.UTC) self.cur.execute("insert into test(ts) values (current_timestamp)") self.cur.execute("select ts from test") with self.assertWarnsRegex(DeprecationWarning, "converter"): diff --git a/Lib/test/test_strptime.py b/Lib/test/test_strptime.py index e3fcabef946116..810c5a36e02f41 100644 --- a/Lib/test/test_strptime.py +++ b/Lib/test/test_strptime.py @@ -242,6 +242,16 @@ def test_ValueError(self): # 5. Julian/ordinal day (%j) is specified with %G, but not %Y with self.assertRaises(ValueError): _strptime._strptime("1999 256", "%G %j") + # 6. Invalid ISO weeks + invalid_iso_weeks = [ + "2019-00-1", + "2019-54-1", + "2021-53-1", + ] + for invalid_iso_dtstr in invalid_iso_weeks: + with self.subTest(invalid_iso_dtstr): + with self.assertRaises(ValueError): + _strptime._strptime(invalid_iso_dtstr, "%G-%V-%u") def test_strptime_exception_context(self): diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py index ba25590b265ca4..e8d322d20a5a8e 100644 --- a/Lib/test/test_tarfile.py +++ b/Lib/test/test_tarfile.py @@ -3635,15 +3635,35 @@ def test_modes(self): arc.add('exec_group_other', mode='?rw-rwxrwx') arc.add('read_group_only', mode='?---r-----') arc.add('no_bits', mode='?---------') - arc.add('dir/', mode='?---rwsrwt', type=tarfile.DIRTYPE) + arc.add('dir/', mode='?---rwsrwt') + + # On some systems, setting the sticky bit is a no-op. + # Check if that's the case. + tmp_filename = os.path.join(TEMPDIR, "tmp.file") + with open(tmp_filename, 'w'): + pass + os.chmod(tmp_filename, os.stat(tmp_filename).st_mode | stat.S_ISVTX) + have_sticky_files = (os.stat(tmp_filename).st_mode & stat.S_ISVTX) + os.unlink(tmp_filename) + + os.mkdir(tmp_filename) + os.chmod(tmp_filename, os.stat(tmp_filename).st_mode | stat.S_ISVTX) + have_sticky_dirs = (os.stat(tmp_filename).st_mode & stat.S_ISVTX) + os.rmdir(tmp_filename) with self.check_context(arc.open(), 'fully_trusted'): - self.expect_file('all_bits', mode='?rwsrwsrwt') + if have_sticky_files: + self.expect_file('all_bits', mode='?rwsrwsrwt') + else: + self.expect_file('all_bits', mode='?rwsrwsrwx') self.expect_file('perm_bits', mode='?rwxrwxrwx') self.expect_file('exec_group_other', mode='?rw-rwxrwx') self.expect_file('read_group_only', mode='?---r-----') self.expect_file('no_bits', mode='?---------') - self.expect_file('dir', type=tarfile.DIRTYPE, mode='?---rwsrwt') + if have_sticky_dirs: + self.expect_file('dir/', mode='?---rwsrwt') + else: + self.expect_file('dir/', mode='?---rwsrwx') with self.check_context(arc.open(), 'tar'): self.expect_file('all_bits', mode='?rwxr-xr-x') @@ -3651,7 +3671,7 @@ def test_modes(self): self.expect_file('exec_group_other', mode='?rw-r-xr-x') self.expect_file('read_group_only', mode='?---r-----') self.expect_file('no_bits', mode='?---------') - self.expect_file('dir/', type=tarfile.DIRTYPE, mode='?---r-xr-x') + self.expect_file('dir/', mode='?---r-xr-x') with self.check_context(arc.open(), 'data'): normal_dir_mode = stat.filemode(stat.S_IMODE( @@ -3661,7 +3681,7 @@ def test_modes(self): self.expect_file('exec_group_other', mode='?rw-r--r--') self.expect_file('read_group_only', mode='?rw-r-----') self.expect_file('no_bits', mode='?rw-------') - self.expect_file('dir/', type=tarfile.DIRTYPE, mode=normal_dir_mode) + self.expect_file('dir/', mode=normal_dir_mode) def test_pipe(self): # Test handling of a special file diff --git a/Lib/test/test_tempfile.py b/Lib/test/test_tempfile.py index 11a43aca17e88a..db08fb1c7f2a42 100644 --- a/Lib/test/test_tempfile.py +++ b/Lib/test/test_tempfile.py @@ -850,6 +850,15 @@ def test_for_tempdir_is_bytes_issue40701_api_warts(self): finally: tempfile.tempdir = orig_tempdir + def test_path_is_absolute(self): + # Test that the path returned by mkdtemp with a relative `dir` + # argument is absolute + try: + path = tempfile.mkdtemp(dir=".") + self.assertTrue(os.path.isabs(path)) + finally: + os.rmdir(path) + class TestMktemp(BaseTestCase): """Test mktemp().""" diff --git a/Lib/test/test_tools/test_sundry.py b/Lib/test/test_tools/test_sundry.py index 6a3dc12781b2b6..3177fafb84a65b 100644 --- a/Lib/test/test_tools/test_sundry.py +++ b/Lib/test/test_tools/test_sundry.py @@ -1,4 +1,4 @@ -"""Tests for scripts in the Tools directory. +"""Tests for scripts in the Tools/scripts directory. This file contains extremely basic regression tests for the scripts found in the Tools directory of a Python checkout or tarball which don't have separate @@ -17,14 +17,7 @@ class TestSundryScripts(unittest.TestCase): # At least make sure the rest don't have syntax errors. When tests are # added for a script it should be added to the allowlist below. - # scripts that have independent tests. - allowlist = ['reindent'] - # scripts that can't be imported without running - denylist = ['make_ctype'] - # denylisted for other reasons - other = ['2to3'] - - skiplist = denylist + allowlist + other + skiplist = ['2to3'] # import logging registers "atfork" functions which keep indirectly the # logging module dictionary alive. Mock the function to be able to unload diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index 5e2b353782994e..19a2be88d2c1bc 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -1539,11 +1539,11 @@ def __repr__(self): e.__notes__ = BadThing() notes_repr = 'bad repr' - self.assertEqual(self.get_report(e), vanilla + notes_repr) + self.assertEqual(self.get_report(e), vanilla + notes_repr + '\n') e.__notes__ = Unprintable() err_msg = '<__notes__ repr() failed>' - self.assertEqual(self.get_report(e), vanilla + err_msg) + self.assertEqual(self.get_report(e), vanilla + err_msg + '\n') # non-string item in the __notes__ sequence e.__notes__ = [BadThing(), 'Final Note'] @@ -1555,6 +1555,14 @@ def __repr__(self): err_msg = '<note str() failed>' self.assertEqual(self.get_report(e), vanilla + err_msg + '\nFinal Note\n') + e.__notes__ = "please do not explode me" + err_msg = "'please do not explode me'" + self.assertEqual(self.get_report(e), vanilla + err_msg + '\n') + + e.__notes__ = b"please do not show me as numbers" + err_msg = "b'please do not show me as numbers'" + self.assertEqual(self.get_report(e), vanilla + err_msg + '\n') + def test_exception_with_note_with_multiple_notes(self): e = ValueError(42) vanilla = self.get_report(e) diff --git a/Lib/test/test_turtle.py b/Lib/test/test_turtle.py index 95af84e3779824..3f9f129a3dd200 100644 --- a/Lib/test/test_turtle.py +++ b/Lib/test/test_turtle.py @@ -267,6 +267,14 @@ def test_goto(self): self.assertAlmostEqual(self.nav.xcor(), 100) self.assertAlmostEqual(self.nav.ycor(), -100) + def test_teleport(self): + self.nav.teleport(20, -30, fill_gap=True) + self.assertAlmostEqual(self.nav.xcor(), 20) + self.assertAlmostEqual(self.nav.ycor(), -30) + self.nav.teleport(-20, 30, fill_gap=False) + self.assertAlmostEqual(self.nav.xcor(), -20) + self.assertAlmostEqual(self.nav.ycor(), 30) + def test_pos(self): self.assertEqual(self.nav.pos(), self.nav._position) self.nav.goto(100, -100) @@ -440,6 +448,18 @@ def test_showturtle_hideturtle_and_isvisible(self): tpen.showturtle() self.assertTrue(tpen.isvisible()) + def test_teleport(self): + + tpen = turtle.TPen() + + for fill_gap_value in [True, False]: + tpen.penup() + tpen.teleport(100, 100, fill_gap=fill_gap_value) + self.assertFalse(tpen.isdown()) + tpen.pendown() + tpen.teleport(-100, -100, fill_gap=fill_gap_value) + self.assertTrue(tpen.isdown()) + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_unittest/test_program.py b/Lib/test/test_unittest/test_program.py index f138f6836514e0..f6d52f93e4a25f 100644 --- a/Lib/test/test_unittest/test_program.py +++ b/Lib/test/test_unittest/test_program.py @@ -71,15 +71,22 @@ def testExpectedFailure(self): def testUnexpectedSuccess(self): pass - class FooBarLoader(unittest.TestLoader): - """Test loader that returns a suite containing FooBar.""" + class Empty(unittest.TestCase): + pass + + class TestLoader(unittest.TestLoader): + """Test loader that returns a suite containing the supplied testcase.""" + + def __init__(self, testcase): + self.testcase = testcase + def loadTestsFromModule(self, module): return self.suiteClass( - [self.loadTestsFromTestCase(Test_TestProgram.FooBar)]) + [self.loadTestsFromTestCase(self.testcase)]) def loadTestsFromNames(self, names, module): return self.suiteClass( - [self.loadTestsFromTestCase(Test_TestProgram.FooBar)]) + [self.loadTestsFromTestCase(self.testcase)]) def test_defaultTest_with_string(self): class FakeRunner(object): @@ -92,7 +99,7 @@ def run(self, test): runner = FakeRunner() program = unittest.TestProgram(testRunner=runner, exit=False, defaultTest='test.test_unittest', - testLoader=self.FooBarLoader()) + testLoader=self.TestLoader(self.FooBar)) sys.argv = old_argv self.assertEqual(('test.test_unittest',), program.testNames) @@ -108,7 +115,7 @@ def run(self, test): program = unittest.TestProgram( testRunner=runner, exit=False, defaultTest=['test.test_unittest', 'test.test_unittest2'], - testLoader=self.FooBarLoader()) + testLoader=self.TestLoader(self.FooBar)) sys.argv = old_argv self.assertEqual(['test.test_unittest', 'test.test_unittest2'], program.testNames) @@ -118,7 +125,7 @@ def test_NonExit(self): program = unittest.main(exit=False, argv=["foobar"], testRunner=unittest.TextTestRunner(stream=stream), - testLoader=self.FooBarLoader()) + testLoader=self.TestLoader(self.FooBar)) self.assertTrue(hasattr(program, 'result')) out = stream.getvalue() self.assertIn('\nFAIL: testFail ', out) @@ -130,13 +137,13 @@ def test_NonExit(self): def test_Exit(self): stream = BufferedWriter() - self.assertRaises( - SystemExit, - unittest.main, - argv=["foobar"], - testRunner=unittest.TextTestRunner(stream=stream), - exit=True, - testLoader=self.FooBarLoader()) + with self.assertRaises(SystemExit) as cm: + unittest.main( + argv=["foobar"], + testRunner=unittest.TextTestRunner(stream=stream), + exit=True, + testLoader=self.TestLoader(self.FooBar)) + self.assertEqual(cm.exception.code, 1) out = stream.getvalue() self.assertIn('\nFAIL: testFail ', out) self.assertIn('\nERROR: testError ', out) @@ -147,12 +154,11 @@ def test_Exit(self): def test_ExitAsDefault(self): stream = BufferedWriter() - self.assertRaises( - SystemExit, - unittest.main, - argv=["foobar"], - testRunner=unittest.TextTestRunner(stream=stream), - testLoader=self.FooBarLoader()) + with self.assertRaises(SystemExit): + unittest.main( + argv=["foobar"], + testRunner=unittest.TextTestRunner(stream=stream), + testLoader=self.TestLoader(self.FooBar)) out = stream.getvalue() self.assertIn('\nFAIL: testFail ', out) self.assertIn('\nERROR: testError ', out) @@ -161,6 +167,17 @@ def test_ExitAsDefault(self): 'expected failures=1, unexpected successes=1)\n') self.assertTrue(out.endswith(expected)) + def test_ExitEmptySuite(self): + stream = BufferedWriter() + with self.assertRaises(SystemExit) as cm: + unittest.main( + argv=["empty"], + testRunner=unittest.TextTestRunner(stream=stream), + testLoader=self.TestLoader(self.Empty)) + self.assertEqual(cm.exception.code, 5) + out = stream.getvalue() + self.assertIn('\nNO TESTS RAN\n', out) + class InitialisableProgram(unittest.TestProgram): exit = False diff --git a/Lib/test/test_unittest/test_result.py b/Lib/test/test_unittest/test_result.py index 37d0fe12409ea4..db551b7890ca3e 100644 --- a/Lib/test/test_unittest/test_result.py +++ b/Lib/test/test_unittest/test_result.py @@ -451,6 +451,7 @@ def testFailFastSetByRunner(self): stream = BufferedWriter() runner = unittest.TextTestRunner(stream=stream, failfast=True) def test(result): + result.testsRun += 1 self.assertTrue(result.failfast) result = runner.run(test) stream.flush() diff --git a/Lib/test/test_unittest/test_runner.py b/Lib/test/test_unittest/test_runner.py index ceb4c8acde532c..f3b2c0cffd4513 100644 --- a/Lib/test/test_unittest/test_runner.py +++ b/Lib/test/test_unittest/test_runner.py @@ -577,6 +577,16 @@ def test(self): 'inner setup', 'inner test', 'inner cleanup', 'end outer test', 'outer cleanup']) + def test_run_empty_suite_error_message(self): + class EmptyTest(unittest.TestCase): + pass + + suite = unittest.defaultTestLoader.loadTestsFromTestCase(EmptyTest) + runner = getRunner() + runner.run(suite) + + self.assertIn("\nNO TESTS RAN\n", runner.stream.getvalue()) + class TestModuleCleanUp(unittest.TestCase): def test_add_and_do_ModuleCleanup(self): diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py index b7c6f6dd8f1b99..99c9e24994732f 100644 --- a/Lib/test/test_urllib2.py +++ b/Lib/test/test_urllib2.py @@ -524,16 +524,17 @@ def http_open(self, req): return MockResponse(200, "OK", msg, "", req.get_full_url()) -class MockHTTPSHandler(urllib.request.HTTPSHandler): - # Useful for testing the Proxy-Authorization request by verifying the - # properties of httpcon +if hasattr(http.client, 'HTTPSConnection'): + class MockHTTPSHandler(urllib.request.HTTPSHandler): + # Useful for testing the Proxy-Authorization request by verifying the + # properties of httpcon - def __init__(self, debuglevel=None, context=None, check_hostname=None): - super(MockHTTPSHandler, self).__init__(debuglevel, context, check_hostname) - self.httpconn = MockHTTPClass() + def __init__(self, debuglevel=None, context=None, check_hostname=None): + super(MockHTTPSHandler, self).__init__(debuglevel, context, check_hostname) + self.httpconn = MockHTTPClass() - def https_open(self, req): - return self.do_open(self.httpconn, req) + def https_open(self, req): + return self.do_open(self.httpconn, req) class MockHTTPHandlerCheckAuth(urllib.request.BaseHandler): @@ -1075,6 +1076,7 @@ def test_http_handler_local_debuglevel(self): o.open("http://www.example.com") self.assertEqual(h._debuglevel, 5) + @unittest.skipUnless(hasattr(http.client, 'HTTPSConnection'), 'HTTPSConnection required for HTTPS tests.') def test_https_handler_global_debuglevel(self): with mock.patch.object(http.client.HTTPSConnection, 'debuglevel', 7): o = OpenerDirector() @@ -1083,6 +1085,7 @@ def test_https_handler_global_debuglevel(self): o.open("https://www.example.com") self.assertEqual(h._debuglevel, 7) + @unittest.skipUnless(hasattr(http.client, 'HTTPSConnection'), 'HTTPSConnection required for HTTPS tests.') def test_https_handler_local_debuglevel(self): o = OpenerDirector() h = MockHTTPSHandler(debuglevel=4) @@ -1456,6 +1459,7 @@ def test_proxy_https(self): self.assertEqual([(handlers[0], "https_open")], [tup[0:2] for tup in o.calls]) + @unittest.skipUnless(hasattr(http.client, 'HTTPSConnection'), 'HTTPSConnection required for HTTPS tests.') def test_proxy_https_proxy_authorization(self): o = OpenerDirector() ph = urllib.request.ProxyHandler(dict(https='proxy.example.com:3128')) diff --git a/Lib/trace.py b/Lib/trace.py index 213e46517d683d..fb9a423ea09fce 100755 --- a/Lib/trace.py +++ b/Lib/trace.py @@ -49,6 +49,7 @@ """ __all__ = ['Trace', 'CoverageResults'] +import io import linecache import os import sys @@ -716,7 +717,7 @@ def parse_ignore_dir(s): sys.argv = [opts.progname, *opts.arguments] sys.path[0] = os.path.dirname(opts.progname) - with open(opts.progname, 'rb') as fp: + with io.open_code(opts.progname) as fp: code = compile(fp.read(), opts.progname, 'exec') # try to emulate __main__ namespace as much as possible globs = { diff --git a/Lib/traceback.py b/Lib/traceback.py index 9e720ac9948fce..ba4a9ffd001b53 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -852,12 +852,16 @@ def format_exception_only(self): yield _format_final_exc_line(stype, self._str) else: yield from self._format_syntax_error(stype) - if isinstance(self.__notes__, collections.abc.Sequence): + + if ( + isinstance(self.__notes__, collections.abc.Sequence) + and not isinstance(self.__notes__, (str, bytes)) + ): for note in self.__notes__: note = _safe_string(note, 'note') yield from [l + '\n' for l in note.split('\n')] elif self.__notes__ is not None: - yield _safe_string(self.__notes__, '__notes__', func=repr) + yield "{}\n".format(_safe_string(self.__notes__, '__notes__', func=repr)) def _format_syntax_error(self, stype): """Format SyntaxError exceptions (internal helper).""" diff --git a/Lib/turtle.py b/Lib/turtle.py index 1b369327bc8eff..2de406e0f517af 100644 --- a/Lib/turtle.py +++ b/Lib/turtle.py @@ -135,7 +135,7 @@ 'pu', 'radians', 'right', 'reset', 'resizemode', 'rt', 'seth', 'setheading', 'setpos', 'setposition', 'settiltangle', 'setundobuffer', 'setx', 'sety', 'shape', 'shapesize', 'shapetransform', 'shearfactor', 'showturtle', - 'speed', 'st', 'stamp', 'tilt', 'tiltangle', 'towards', + 'speed', 'st', 'stamp', 'teleport', 'tilt', 'tiltangle', 'towards', 'turtlesize', 'undo', 'undobufferentries', 'up', 'width', 'write', 'xcor', 'ycor'] _tg_utilities = ['write_docstringdict', 'done'] @@ -1614,6 +1614,13 @@ def _goto(self, end): """move turtle to position end.""" self._position = end + def teleport(self, x=None, y=None, *, fill_gap: bool = False) -> None: + """To be overwritten by child class RawTurtle. + Includes no TPen references.""" + new_x = x if x is not None else self._position[0] + new_y = y if y is not None else self._position[1] + self._position = Vec2D(new_x, new_y) + def forward(self, distance): """Move the turtle forward by the specified distance. @@ -2293,6 +2300,15 @@ def fillcolor(self, *args): else: return self._color(self._fillcolor) + def teleport(self, x=None, y=None, *, fill_gap: bool = False) -> None: + """To be overwritten by child class RawTurtle. + Includes no TNavigator references. + """ + pendown = self.isdown() + if pendown: + self.pen(pendown=False) + self.pen(pendown=pendown) + def showturtle(self): """Makes the turtle visible. @@ -2710,6 +2726,54 @@ def _cc(self, args): if not ((0 <= r <= 255) and (0 <= g <= 255) and (0 <= b <= 255)): raise TurtleGraphicsError("bad color sequence: %s" % str(args)) return "#%02x%02x%02x" % (r, g, b) + + def teleport(self, x=None, y=None, *, fill_gap: bool = False) -> None: + """Instantly move turtle to an absolute position. + + Arguments: + x -- a number or None + y -- a number None + fill_gap -- a boolean This argument must be specified by name. + + call: teleport(x, y) # two coordinates + --or: teleport(x) # teleport to x position, keeping y as is + --or: teleport(y=y) # teleport to y position, keeping x as is + --or: teleport(x, y, fill_gap=True) + # teleport but fill the gap in between + + Move turtle to an absolute position. Unlike goto(x, y), a line will not + be drawn. The turtle's orientation does not change. If currently + filling, the polygon(s) teleported from will be filled after leaving, + and filling will begin again after teleporting. This can be disabled + with fill_gap=True, which makes the imaginary line traveled during + teleporting act as a fill barrier like in goto(x, y). + + Example (for a Turtle instance named turtle): + >>> tp = turtle.pos() + >>> tp + (0.00,0.00) + >>> turtle.teleport(60) + >>> turtle.pos() + (60.00,0.00) + >>> turtle.teleport(y=10) + >>> turtle.pos() + (60.00,10.00) + >>> turtle.teleport(20, 30) + >>> turtle.pos() + (20.00,30.00) + """ + pendown = self.isdown() + was_filling = self.filling() + if pendown: + self.pen(pendown=False) + if was_filling and not fill_gap: + self.end_fill() + new_x = x if x is not None else self._position[0] + new_y = y if y is not None else self._position[1] + self._position = Vec2D(new_x, new_y) + self.pen(pendown=pendown) + if was_filling and not fill_gap: + self.begin_fill() def clone(self): """Create and return a clone of the turtle. diff --git a/Lib/unittest/main.py b/Lib/unittest/main.py index 0792750ffd9e0d..51b81a6c3728bb 100644 --- a/Lib/unittest/main.py +++ b/Lib/unittest/main.py @@ -9,6 +9,7 @@ from .signals import installHandler __unittest = True +_NO_TESTS_EXITCODE = 5 MAIN_EXAMPLES = """\ Examples: @@ -279,6 +280,12 @@ def runTests(self): testRunner = self.testRunner self.result = testRunner.run(self.test) if self.exit: - sys.exit(not self.result.wasSuccessful()) + if self.result.testsRun == 0: + sys.exit(_NO_TESTS_EXITCODE) + elif self.result.wasSuccessful(): + sys.exit(0) + else: + sys.exit(1) + main = TestProgram diff --git a/Lib/unittest/runner.py b/Lib/unittest/runner.py index a51c5c562df09d..e3c020e0ace96d 100644 --- a/Lib/unittest/runner.py +++ b/Lib/unittest/runner.py @@ -274,6 +274,8 @@ def run(self, test): infos.append("failures=%d" % failed) if errored: infos.append("errors=%d" % errored) + elif run == 0: + self.stream.write("NO TESTS RAN") else: self.stream.write("OK") if skipped: diff --git a/Lib/uuid.py b/Lib/uuid.py index 698be34873b9dc..697f3b45597023 100644 --- a/Lib/uuid.py +++ b/Lib/uuid.py @@ -401,7 +401,7 @@ def _get_command_stdout(command, *args): # over locally administered ones since the former are globally unique, but # we'll return the first of the latter found if that's all the machine has. # -# See https://en.wikipedia.org/wiki/MAC_address#Universal_vs._local +# See https://en.wikipedia.org/wiki/MAC_address#Universal_vs._local_(U/L_bit) def _is_universal(mac): return not (mac & (1 << 41)) @@ -615,7 +615,7 @@ def _random_getnode(): # significant bit of the first octet". This works out to be the 41st bit # counting from 1 being the least significant bit, or 1<<40. # - # See https://en.wikipedia.org/wiki/MAC_address#Unicast_vs._multicast + # See https://en.wikipedia.org/w/index.php?title=MAC_address&oldid=1128764812#Universal_vs._local_(U/L_bit) import random return random.getrandbits(48) | (1 << 40) diff --git a/Makefile.pre.in b/Makefile.pre.in index 774a29ebf59793..b285ef9e832db5 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -2043,16 +2043,26 @@ LIBSUBDIRS= asyncio \ zoneinfo \ __phello__ TESTSUBDIRS= idlelib/idle_test \ - test test/audiodata \ - test/capath test/cjkencodings \ - test/data test/decimaltestdata \ - test/dtracedata test/eintrdata \ - test/encoded_modules test/imghdrdata \ - test/libregrtest test/sndhdrdata \ - test/subprocessdata test/support \ + test \ + test/audiodata \ + test/capath \ + test/cjkencodings \ + test/crashers \ + test/data \ + test/decimaltestdata \ + test/dtracedata \ + test/encoded_modules \ + test/imghdrdata \ + test/leakers \ + test/libregrtest \ + test/sndhdrdata \ + test/subprocessdata \ + test/support \ test/test_asyncio \ + test/test_capi \ test/test_ctypes \ - test/test_email test/test_email/data \ + test/test_email \ + test/test_email/data \ test/test_import \ test/test_import/data \ test/test_import/data/circular_imports \ @@ -2111,16 +2121,39 @@ TESTSUBDIRS= idlelib/idle_test \ test/test_lib2to3/data/fixers \ test/test_lib2to3/data/fixers/myfixes \ test/test_peg_generator \ + test/test_sqlite3 \ test/test_tkinter \ + test/test_tomllib \ + test/test_tomllib/data \ + test/test_tomllib/data/invalid \ + test/test_tomllib/data/invalid/array \ + test/test_tomllib/data/invalid/array-of-tables \ + test/test_tomllib/data/invalid/boolean \ + test/test_tomllib/data/invalid/dates-and-times \ + test/test_tomllib/data/invalid/dotted-keys \ + test/test_tomllib/data/invalid/inline-table \ + test/test_tomllib/data/invalid/keys-and-vals \ + test/test_tomllib/data/invalid/literal-str \ + test/test_tomllib/data/invalid/multiline-basic-str \ + test/test_tomllib/data/invalid/multiline-literal-str \ + test/test_tomllib/data/invalid/table \ + test/test_tomllib/data/valid \ + test/test_tomllib/data/valid/array \ + test/test_tomllib/data/valid/dates-and-times \ + test/test_tomllib/data/valid/multiline-basic-str \ test/test_tools \ test/test_ttk \ - test/test_warnings test/test_warnings/data \ + test/test_unittest \ + test/test_unittest/testmock \ + test/test_warnings \ + test/test_warnings/data \ test/test_zipfile \ - test/test_zoneinfo test/test_zoneinfo/data \ - test/test_unittest test/test_unittest/testmock \ + test/test_zoneinfo \ + test/test_zoneinfo/data \ test/tracedmodules \ test/typinganndata \ - test/xmltestdata test/xmltestdata/c14n-20 \ + test/xmltestdata \ + test/xmltestdata/c14n-20 \ test/ziptestdata COMPILEALL_OPTS=-j0 diff --git a/Misc/ACKS b/Misc/ACKS index 633e9d90a36f16..42ec059a7c4ec2 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -299,6 +299,7 @@ Dave Chambers Pascal Chambon Nicholas Chammas Ofey Chan +Juhi Chandalia John Chandler Hye-Shik Chang Jeffrey Chang @@ -1487,7 +1488,7 @@ John Redford Kalyan Reddy Terry J. Reedy Gareth Rees -John Reese +Amethyst Reese Steve Reeves Lennart Regebro John Regehr @@ -1512,6 +1513,7 @@ Vlad Riscutia Wes Rishel Daniel Riti Juan M. Bello Rivas +Stefano Rivera Llandy Riveron Del Risco Mohd Sanad Zaki Rizvi Davide Rizzo diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-12-01-12-58-31.bpo-31821.1FNmwk.rst b/Misc/NEWS.d/next/Core and Builtins/2019-12-01-12-58-31.bpo-31821.1FNmwk.rst new file mode 100644 index 00000000000000..13c054fdd68276 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-12-01-12-58-31.bpo-31821.1FNmwk.rst @@ -0,0 +1 @@ +Fix :func:`!pause_reading` to work when called from :func:`!connection_made` in :mod:`asyncio`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-06-23-32-11.gh-issue-98003.xWE0Yu.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-06-23-32-11.gh-issue-98003.xWE0Yu.rst new file mode 100644 index 00000000000000..f9e71bc1344bb3 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-10-06-23-32-11.gh-issue-98003.xWE0Yu.rst @@ -0,0 +1,3 @@ +Complex function calls are now faster and consume no C stack +space. + diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-12-20-22-03.gh-issue-87729.99A7UO.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-12-20-22-03.gh-issue-87729.99A7UO.rst index e875b2ae7ac990..9d75de1565a170 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2023-04-12-20-22-03.gh-issue-87729.99A7UO.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-12-20-22-03.gh-issue-87729.99A7UO.rst @@ -1 +1,4 @@ -Add :opcode:`LOAD_SUPER_ATTR` to speed up ``super().meth()`` and ``super().attr`` calls. +Add :opcode:`LOAD_SUPER_ATTR` (and a specialization for ``super().method()``) to +speed up ``super().method()`` and ``super().attr``. This makes +``super().method()`` roughly 2.3x faster and brings it within 20% of the +performance of a simple method call. Patch by Vladimir Matveev and Carl Meyer. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-21-16-12-41.gh-issue-103590.7DHDOE.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-21-16-12-41.gh-issue-103590.7DHDOE.rst new file mode 100644 index 00000000000000..af733a8207a2c1 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-21-16-12-41.gh-issue-103590.7DHDOE.rst @@ -0,0 +1 @@ +Do not wrap a single exception raised from a ``try-except*`` construct in an :exc:`ExceptionGroup`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-24-14-38-16.gh-issue-103793.kqoH6Q.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-24-14-38-16.gh-issue-103793.kqoH6Q.rst new file mode 100644 index 00000000000000..c48348798e7142 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-24-14-38-16.gh-issue-103793.kqoH6Q.rst @@ -0,0 +1,3 @@ +Optimized asyncio Task creation by deferring expensive string formatting +(task name generation) from Task creation to the first time ``get_name`` is +called. This makes asyncio benchmarks up to 5% faster. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-26-17-56-18.gh-issue-103895.ESB6tn.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-26-17-56-18.gh-issue-103895.ESB6tn.rst new file mode 100644 index 00000000000000..6fed304c9132b3 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-26-17-56-18.gh-issue-103895.ESB6tn.rst @@ -0,0 +1,3 @@ +Improve handling of edge cases in showing ``Exception.__notes__``. Ensures +that the messages always end with a newline and that string/bytes are not +exploded over multiple lines. Patch by Carey Metcalfe. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-05-01-08-08-05.gh-issue-102213.nfH-4C.rst b/Misc/NEWS.d/next/Core and Builtins/2023-05-01-08-08-05.gh-issue-102213.nfH-4C.rst new file mode 100644 index 00000000000000..997bef226e713f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-05-01-08-08-05.gh-issue-102213.nfH-4C.rst @@ -0,0 +1 @@ +Fix performance loss when accessing an object's attributes with ``__getattr__`` defined. diff --git a/Misc/NEWS.d/next/Documentation/2023-04-25-22-58-08.gh-issue-48241.l1Gxxh.rst b/Misc/NEWS.d/next/Documentation/2023-04-25-22-58-08.gh-issue-48241.l1Gxxh.rst new file mode 100644 index 00000000000000..619505cf6ee5b8 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2023-04-25-22-58-08.gh-issue-48241.l1Gxxh.rst @@ -0,0 +1 @@ +Clarifying documentation about the url parameter to urllib.request.urlopen and urllib.request.Requst needing to be encoded properly. diff --git a/Misc/NEWS.d/next/Documentation/2023-04-26-23-55-31.gh-issue-103629.-0reqn.rst b/Misc/NEWS.d/next/Documentation/2023-04-26-23-55-31.gh-issue-103629.-0reqn.rst new file mode 100644 index 00000000000000..6dc0a1cb5a3e4f --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2023-04-26-23-55-31.gh-issue-103629.-0reqn.rst @@ -0,0 +1,2 @@ +Mention the new way of typing ``**kwargs`` with ``Unpack`` and ``TypedDict`` +introduced in :pep:`692`. diff --git a/Misc/NEWS.d/next/IDLE/2023-04-30-20-01-18.gh-issue-88496.y65vUb.rst b/Misc/NEWS.d/next/IDLE/2023-04-30-20-01-18.gh-issue-88496.y65vUb.rst new file mode 100644 index 00000000000000..4f390d189d23b5 --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2023-04-30-20-01-18.gh-issue-88496.y65vUb.rst @@ -0,0 +1 @@ +Fix IDLE test hang on macOS. diff --git a/Misc/NEWS.d/next/Library/2020-02-25-00-43-22.bpo-39744.hgK689.rst b/Misc/NEWS.d/next/Library/2020-02-25-00-43-22.bpo-39744.hgK689.rst new file mode 100644 index 00000000000000..6e690f996569a4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-02-25-00-43-22.bpo-39744.hgK689.rst @@ -0,0 +1 @@ +Make :func:`asyncio.subprocess.Process.communicate` close the subprocess's stdin even when called with ``input=None``. diff --git a/Misc/NEWS.d/next/Library/2022-07-06-11-10-37.gh-issue-51574.sveUeD.rst b/Misc/NEWS.d/next/Library/2022-07-06-11-10-37.gh-issue-51574.sveUeD.rst new file mode 100644 index 00000000000000..50a3d6a4629182 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-07-06-11-10-37.gh-issue-51574.sveUeD.rst @@ -0,0 +1,2 @@ +Make :func:`tempfile.mkdtemp` return absolute paths when its *dir* +parameter is relative. diff --git a/Misc/NEWS.d/next/Library/2022-10-21-17-20-57.gh-issue-98040.3btbmA.rst b/Misc/NEWS.d/next/Library/2022-10-21-17-20-57.gh-issue-98040.3btbmA.rst new file mode 100644 index 00000000000000..f67bffcb0ddc6c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-10-21-17-20-57.gh-issue-98040.3btbmA.rst @@ -0,0 +1 @@ +Remove the long-deprecated ``imp`` module. diff --git a/Misc/NEWS.d/next/Library/2023-02-19-12-37-08.gh-issue-62432.GnBFIB.rst b/Misc/NEWS.d/next/Library/2023-02-19-12-37-08.gh-issue-62432.GnBFIB.rst new file mode 100644 index 00000000000000..a8d66ea48c3278 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-19-12-37-08.gh-issue-62432.GnBFIB.rst @@ -0,0 +1,3 @@ +The :mod:`unittest` runner will now exit with status code 5 if no tests +were run. It is common for test runner misconfiguration to fail to find +any tests, this should be an error. diff --git a/Misc/NEWS.d/next/Library/2023-03-28-09-13-31.gh-issue-103015.ETTfNf.rst b/Misc/NEWS.d/next/Library/2023-03-28-09-13-31.gh-issue-103015.ETTfNf.rst new file mode 100644 index 00000000000000..dcac1a28ca5847 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-03-28-09-13-31.gh-issue-103015.ETTfNf.rst @@ -0,0 +1,3 @@ +Add *entrypoint* keyword-only parameter to +:meth:`sqlite3.Connection.load_extension`, for overriding the SQLite +extension entry point. Patch by Erlend E. Aasland. diff --git a/Misc/NEWS.d/next/Library/2023-04-13-13-17-47.gh-issue-103489.ZSZgmu.rst b/Misc/NEWS.d/next/Library/2023-04-13-13-17-47.gh-issue-103489.ZSZgmu.rst new file mode 100644 index 00000000000000..264564d018ceb4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-13-13-17-47.gh-issue-103489.ZSZgmu.rst @@ -0,0 +1,4 @@ +Add :meth:`~sqlite3.Connection.getconfig` and +:meth:`~sqlite3.Connection.setconfig` to :class:`~sqlite3.Connection` to +make configuration changes to a database connection. Patch by Erlend E. +Aasland. diff --git a/Misc/NEWS.d/next/Library/2023-04-16-18-29-04.gh-issue-103578.fly1wc.rst b/Misc/NEWS.d/next/Library/2023-04-16-18-29-04.gh-issue-103578.fly1wc.rst new file mode 100644 index 00000000000000..69986c2a15b39e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-16-18-29-04.gh-issue-103578.fly1wc.rst @@ -0,0 +1 @@ +Fixed a bug where :mod:`pdb` crashes when reading source file with different encoding by replacing :func:`io.open` with :func:`io.open_code`. The new method would also call into the hook set by :func:`PyFile_SetOpenCodeHook`. diff --git a/Misc/NEWS.d/next/Library/2023-04-21-10-25-39.gh-issue-103636.YK6NEa.rst b/Misc/NEWS.d/next/Library/2023-04-21-10-25-39.gh-issue-103636.YK6NEa.rst new file mode 100644 index 00000000000000..b3b5085250f078 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-21-10-25-39.gh-issue-103636.YK6NEa.rst @@ -0,0 +1 @@ +Added Enum for months and days in the calendar module. diff --git a/Misc/NEWS.d/next/Library/2023-04-22-11-20-27.gh-issue-89415.YHk760.rst b/Misc/NEWS.d/next/Library/2023-04-22-11-20-27.gh-issue-89415.YHk760.rst new file mode 100644 index 00000000000000..a5b99a2f1360f0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-22-11-20-27.gh-issue-89415.YHk760.rst @@ -0,0 +1,2 @@ +Add :mod:`socket` constants for source-specific multicast. +Patch by Reese Hyde. diff --git a/Misc/NEWS.d/next/Library/2023-04-24-16-00-28.gh-issue-90750.da0Xi8.rst b/Misc/NEWS.d/next/Library/2023-04-24-16-00-28.gh-issue-90750.da0Xi8.rst new file mode 100644 index 00000000000000..99e10f140f5049 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-24-16-00-28.gh-issue-90750.da0Xi8.rst @@ -0,0 +1,3 @@ +Use :meth:`datetime.datetime.fromisocalendar` in the implementation of +:meth:`datetime.datetime.strptime`, which should now accept only valid ISO +dates. (Patch by Paul Ganssle) diff --git a/Misc/NEWS.d/next/Library/2023-04-25-17-03-18.gh-issue-103857.Mr2Cak.rst b/Misc/NEWS.d/next/Library/2023-04-25-17-03-18.gh-issue-103857.Mr2Cak.rst new file mode 100644 index 00000000000000..3bd370dabf4ed5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-25-17-03-18.gh-issue-103857.Mr2Cak.rst @@ -0,0 +1,2 @@ +Deprecated :meth:`datetime.datetime.utcnow` and +:meth:`datetime.datetime.utcfromtimestamp`. (Patch by Paul Ganssle) diff --git a/Misc/NEWS.d/next/Library/2023-04-25-22-06-00.gh-issue-74940.TOacQ9.rst b/Misc/NEWS.d/next/Library/2023-04-25-22-06-00.gh-issue-74940.TOacQ9.rst new file mode 100644 index 00000000000000..c37d795f3eb33d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-25-22-06-00.gh-issue-74940.TOacQ9.rst @@ -0,0 +1,2 @@ +The C.UTF-8 locale is no longer converted to en_US.UTF-8, enabling the use +of UTF-8 encoding on systems which have no locales installed. diff --git a/Misc/NEWS.d/next/Library/2023-04-25-22-59-06.gh-issue-99944.pst8iT.rst b/Misc/NEWS.d/next/Library/2023-04-25-22-59-06.gh-issue-99944.pst8iT.rst new file mode 100644 index 00000000000000..80238a65e32a41 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-25-22-59-06.gh-issue-99944.pst8iT.rst @@ -0,0 +1 @@ +Make :mod:`dis` display the value of oparg of :opcode:`KW_NAMES`. diff --git a/Misc/NEWS.d/next/Library/2023-04-26-09-38-47.gh-issue-103872.8LBsDz.rst b/Misc/NEWS.d/next/Library/2023-04-26-09-38-47.gh-issue-103872.8LBsDz.rst new file mode 100644 index 00000000000000..b840f9f5769f08 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-26-09-38-47.gh-issue-103872.8LBsDz.rst @@ -0,0 +1 @@ +Update the bundled copy of pip to version 23.1.2. diff --git a/Misc/NEWS.d/next/Library/2023-04-26-15-14-36.gh-issue-103583.iCMDFt.rst b/Misc/NEWS.d/next/Library/2023-04-26-15-14-36.gh-issue-103583.iCMDFt.rst new file mode 100644 index 00000000000000..8c92ee40831619 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-26-15-14-36.gh-issue-103583.iCMDFt.rst @@ -0,0 +1,2 @@ +Isolate :mod:`!_multibytecodec` and codecs extension modules. Patches by +Erlend E. Aasland. diff --git a/Misc/NEWS.d/next/Library/2023-04-26-18-12-13.gh-issue-103636.-KvCgO.rst b/Misc/NEWS.d/next/Library/2023-04-26-18-12-13.gh-issue-103636.-KvCgO.rst new file mode 100644 index 00000000000000..a05a6f5cbcdb99 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-26-18-12-13.gh-issue-103636.-KvCgO.rst @@ -0,0 +1 @@ +Module-level attributes ``January`` and ``February`` are deprecated from :mod:`calendar`. diff --git a/Misc/NEWS.d/next/Library/2023-04-27-00-05-32.gh-issue-102628.X230E-.rst b/Misc/NEWS.d/next/Library/2023-04-27-00-05-32.gh-issue-102628.X230E-.rst new file mode 100644 index 00000000000000..eaaca5b41ba5e2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-27-00-05-32.gh-issue-102628.X230E-.rst @@ -0,0 +1,2 @@ +Substitute CTRL-D with CTRL-Z in :mod:`sqlite3` CLI banner when running on +Windows. diff --git a/Misc/NEWS.d/next/Library/2023-04-27-20-03-08.gh-issue-103935.Uaf2M0.rst b/Misc/NEWS.d/next/Library/2023-04-27-20-03-08.gh-issue-103935.Uaf2M0.rst new file mode 100644 index 00000000000000..71b2d87249c47b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-27-20-03-08.gh-issue-103935.Uaf2M0.rst @@ -0,0 +1 @@ +Use :func:`io.open_code` for files to be executed instead of raw :func:`open` diff --git a/Misc/NEWS.d/next/Library/2023-04-28-18-04-23.gh-issue-88773.xXCNJw.rst b/Misc/NEWS.d/next/Library/2023-04-28-18-04-23.gh-issue-88773.xXCNJw.rst new file mode 100644 index 00000000000000..f14c9533f3af87 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-28-18-04-23.gh-issue-88773.xXCNJw.rst @@ -0,0 +1 @@ +Added :func:`turtle.teleport` to the :mod:`turtle` module to move a turtle to a new point without tracing a line, visible or invisible. Patch by Liam Gersten. diff --git a/Misc/NEWS.d/next/Library/2023-04-28-19-08-50.gh-issue-103977.msF70A.rst b/Misc/NEWS.d/next/Library/2023-04-28-19-08-50.gh-issue-103977.msF70A.rst new file mode 100644 index 00000000000000..ff4005774a95d2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-28-19-08-50.gh-issue-103977.msF70A.rst @@ -0,0 +1 @@ +Improve import time of :mod:`platform` module. diff --git a/Misc/NEWS.d/next/Windows/2023-04-24-15-51-11.gh-issue-82814.GI3UkZ.rst b/Misc/NEWS.d/next/Windows/2023-04-24-15-51-11.gh-issue-82814.GI3UkZ.rst new file mode 100644 index 00000000000000..5bd005ffacb800 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2023-04-24-15-51-11.gh-issue-82814.GI3UkZ.rst @@ -0,0 +1,3 @@ +Fix a potential ``[Errno 13] Permission denied`` when using :func:`shutil.copystat` +within Windows Subsystem for Linux (WSL) on a mounted filesystem by adding +``errno.EACCES`` to the list of ignored errors within the internal implementation. diff --git a/Misc/NEWS.d/next/macOS/2023-04-24-18-37-48.gh-issue-60436.in-IyF.rst b/Misc/NEWS.d/next/macOS/2023-04-24-18-37-48.gh-issue-60436.in-IyF.rst new file mode 100644 index 00000000000000..f274d3b898f15d --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2023-04-24-18-37-48.gh-issue-60436.in-IyF.rst @@ -0,0 +1 @@ +update curses textbox to additionally handle backspace using the ``curses.ascii.DEL`` key press. diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index 2476dca6f58ebf..82dbc087322aa9 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -2069,8 +2069,10 @@ _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, Py_XSETREF(self->task_coro, coro); if (name == Py_None) { - name = PyUnicode_FromFormat("Task-%" PRIu64, - ++state->task_name_counter); + // optimization: defer task name formatting + // store the task counter as PyLong in the name + // for deferred formatting in get_name + name = PyLong_FromUnsignedLongLong(++state->task_name_counter); } else if (!PyUnicode_CheckExact(name)) { name = PyObject_Str(name); } else { @@ -2449,6 +2451,13 @@ _asyncio_Task_get_name_impl(TaskObj *self) /*[clinic end generated code: output=0ecf1570c3b37a8f input=a4a6595d12f4f0f8]*/ { if (self->task_name) { + if (PyLong_CheckExact(self->task_name)) { + PyObject *name = PyUnicode_FromFormat("Task-%S", self->task_name); + if (name == NULL) { + return NULL; + } + Py_SETREF(self->task_name, name); + } return Py_NewRef(self->task_name); } diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index 6f92ca08dd537b..c7ed6bd2229c79 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -126,6 +126,8 @@ bytes(cdata) #include "pycore_long.h" // _PyLong_GetZero() +ctypes_state global_state; + PyObject *PyExc_ArgError = NULL; /* This dict maps ctypes types to POINTER types */ @@ -150,13 +152,32 @@ typedef struct { PyObject *dict; } DictRemoverObject; +static int +_DictRemover_traverse(DictRemoverObject *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + Py_VISIT(self->key); + Py_VISIT(self->dict); + return 0; +} + +static int +_DictRemover_clear(DictRemoverObject *self) +{ + Py_CLEAR(self->key); + Py_CLEAR(self->dict); + return 0; +} + static void _DictRemover_dealloc(PyObject *myself) { + PyTypeObject *tp = Py_TYPE(myself); DictRemoverObject *self = (DictRemoverObject *)myself; - Py_XDECREF(self->key); - Py_XDECREF(self->dict); - Py_TYPE(self)->tp_free(myself); + PyObject_GC_UnTrack(myself); + (void)_DictRemover_clear(self); + tp->tp_free(myself); + Py_DECREF(tp); } static PyObject * @@ -173,47 +194,23 @@ _DictRemover_call(PyObject *myself, PyObject *args, PyObject *kw) Py_RETURN_NONE; } -static PyTypeObject DictRemover_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.DictRemover", /* tp_name */ - sizeof(DictRemoverObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - _DictRemover_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - _DictRemover_call, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ -/* XXX should participate in GC? */ - Py_TPFLAGS_DEFAULT, /* tp_flags */ - PyDoc_STR("deletes a key from a dictionary"), /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - 0, /* tp_new */ - 0, /* tp_free */ +PyDoc_STRVAR(dictremover_doc, "deletes a key from a dictionary"); + +static PyType_Slot dictremover_slots[] = { + {Py_tp_dealloc, _DictRemover_dealloc}, + {Py_tp_traverse, _DictRemover_traverse}, + {Py_tp_clear, _DictRemover_clear}, + {Py_tp_call, _DictRemover_call}, + {Py_tp_doc, (void *)dictremover_doc}, + {0, NULL}, +}; + +static PyType_Spec dictremover_spec = { + .name = "_ctypes.DictRemover", + .basicsize = sizeof(DictRemoverObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = dictremover_slots, }; int @@ -224,7 +221,8 @@ PyDict_SetItemProxy(PyObject *dict, PyObject *key, PyObject *item) PyObject *proxy; int result; - obj = _PyObject_CallNoArgs((PyObject *)&DictRemover_Type); + ctypes_state *st = GLOBAL_STATE(); + obj = _PyObject_CallNoArgs((PyObject *)st->DictRemover_Type); if (obj == NULL) return -1; @@ -415,23 +413,45 @@ typedef struct { PyObject *keep; // If set, a reference to the original CDataObject. } StructParamObject; +static int +StructParam_traverse(StructParamObject *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + return 0; +} + +static int +StructParam_clear(StructParamObject *self) +{ + Py_CLEAR(self->keep); + return 0; +} static void StructParam_dealloc(PyObject *myself) { StructParamObject *self = (StructParamObject *)myself; - Py_XDECREF(self->keep); + PyTypeObject *tp = Py_TYPE(self); + PyObject_GC_UnTrack(myself); + (void)StructParam_clear(self); PyMem_Free(self->ptr); - Py_TYPE(self)->tp_free(myself); + tp->tp_free(myself); + Py_DECREF(tp); } +static PyType_Slot structparam_slots[] = { + {Py_tp_traverse, StructParam_traverse}, + {Py_tp_clear, StructParam_clear}, + {Py_tp_dealloc, StructParam_dealloc}, + {0, NULL}, +}; -static PyTypeObject StructParam_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - .tp_name = "_ctypes.StructParam_Type", - .tp_basicsize = sizeof(StructParamObject), - .tp_dealloc = StructParam_dealloc, - .tp_flags = Py_TPFLAGS_DEFAULT, +static PyType_Spec structparam_spec = { + .name = "_ctypes.StructParam_Type", + .basicsize = sizeof(StructParamObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE | + Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_DISALLOW_INSTANTIATION), + .slots = structparam_slots, }; @@ -460,7 +480,9 @@ StructUnionType_paramfunc(CDataObject *self) /* Create a Python object which calls PyMem_Free(ptr) in its deallocator. The object will be destroyed at _ctypes_callproc() cleanup. */ - obj = (&StructParam_Type)->tp_alloc(&StructParam_Type, 0); + ctypes_state *st = GLOBAL_STATE(); + PyTypeObject *tp = st->StructParam_Type; + obj = tp->tp_alloc(tp, 0); if (obj == NULL) { PyMem_Free(ptr); return NULL; @@ -800,7 +822,8 @@ CDataType_from_param(PyObject *type, PyObject *value) if (res) { return Py_NewRef(value); } - if (PyCArg_CheckExact(value)) { + ctypes_state *st = GLOBAL_STATE(); + if (PyCArg_CheckExact(st, value)) { PyCArgObject *p = (PyCArgObject *)value; PyObject *ob = p->obj; const char *ob_name; @@ -1683,7 +1706,8 @@ c_wchar_p_from_param(PyObject *type, PyObject *value) return Py_NewRef(value); } } - if (PyCArg_CheckExact(value)) { + ctypes_state *st = GLOBAL_STATE(); + if (PyCArg_CheckExact(st, value)) { /* byref(c_char(...)) */ PyCArgObject *a = (PyCArgObject *)value; StgDictObject *dict = PyObject_stgdict(a->obj); @@ -1746,7 +1770,8 @@ c_char_p_from_param(PyObject *type, PyObject *value) return Py_NewRef(value); } } - if (PyCArg_CheckExact(value)) { + ctypes_state *st = GLOBAL_STATE(); + if (PyCArg_CheckExact(st, value)) { /* byref(c_char(...)) */ PyCArgObject *a = (PyCArgObject *)value; StgDictObject *dict = PyObject_stgdict(a->obj); @@ -1847,7 +1872,8 @@ c_void_p_from_param(PyObject *type, PyObject *value) return Py_NewRef(value); } /* byref(...) */ - if (PyCArg_CheckExact(value)) { + ctypes_state *st = GLOBAL_STATE(); + if (PyCArg_CheckExact(st, value)) { /* byref(c_xxx()) */ PyCArgObject *a = (PyCArgObject *)value; if (a->tag == 'P') { @@ -5635,12 +5661,22 @@ _ctypes_add_types(PyObject *mod) } \ } while (0) +#define CREATE_TYPE(MOD, TP, SPEC) do { \ + PyObject *type = PyType_FromMetaclass(NULL, MOD, SPEC, NULL); \ + if (type == NULL) { \ + return -1; \ + } \ + TP = (PyTypeObject *)type; \ +} while (0) + + ctypes_state *st = GLOBAL_STATE(); + /* Note: ob_type is the metatype (the 'type'), defaults to PyType_Type, tp_base is the base type, defaults to 'object' aka PyBaseObject_Type. */ - TYPE_READY(&PyCArg_Type); - TYPE_READY(&PyCThunk_Type); + CREATE_TYPE(mod, st->PyCArg_Type, &carg_spec); + CREATE_TYPE(mod, st->PyCThunk_Type, &cthunk_spec); TYPE_READY(&PyCData_Type); /* StgDict is derived from PyDict_Type */ TYPE_READY_BASE(&PyCStgDict_Type, &PyDict_Type); @@ -5673,17 +5709,15 @@ _ctypes_add_types(PyObject *mod) * Simple classes */ - /* PyCField_Type is derived from PyBaseObject_Type */ - TYPE_READY(&PyCField_Type); + CREATE_TYPE(mod, st->PyCField_Type, &cfield_spec); /************************************************* * * Other stuff */ - DictRemover_Type.tp_new = PyType_GenericNew; - TYPE_READY(&DictRemover_Type); - TYPE_READY(&StructParam_Type); + CREATE_TYPE(mod, st->DictRemover_Type, &dictremover_spec); + CREATE_TYPE(mod, st->StructParam_Type, &structparam_spec); #ifdef MS_WIN32 TYPE_READY_BASE(&PyComError_Type, (PyTypeObject*)PyExc_Exception); @@ -5692,6 +5726,7 @@ _ctypes_add_types(PyObject *mod) #undef TYPE_READY #undef TYPE_READY_BASE #undef MOD_ADD_TYPE +#undef CREATE_TYPE return 0; } diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c index bc8750091f65f3..8e694ba852c1d4 100644 --- a/Modules/_ctypes/callbacks.c +++ b/Modules/_ctypes/callbacks.c @@ -28,23 +28,11 @@ /**************************************************************/ -static void -CThunkObject_dealloc(PyObject *myself) -{ - CThunkObject *self = (CThunkObject *)myself; - PyObject_GC_UnTrack(self); - Py_XDECREF(self->converters); - Py_XDECREF(self->callable); - Py_XDECREF(self->restype); - if (self->pcl_write) - Py_ffi_closure_free(self->pcl_write); - PyObject_GC_Del(self); -} - static int CThunkObject_traverse(PyObject *myself, visitproc visit, void *arg) { CThunkObject *self = (CThunkObject *)myself; + Py_VISIT(Py_TYPE(self)); Py_VISIT(self->converters); Py_VISIT(self->callable); Py_VISIT(self->restype); @@ -61,36 +49,35 @@ CThunkObject_clear(PyObject *myself) return 0; } -PyTypeObject PyCThunk_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.CThunkObject", - sizeof(CThunkObject), /* tp_basicsize */ - sizeof(ffi_type), /* tp_itemsize */ - CThunkObject_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - PyDoc_STR("CThunkObject"), /* tp_doc */ - CThunkObject_traverse, /* tp_traverse */ - CThunkObject_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - 0, /* tp_members */ +static void +CThunkObject_dealloc(PyObject *myself) +{ + CThunkObject *self = (CThunkObject *)myself; + PyTypeObject *tp = Py_TYPE(myself); + PyObject_GC_UnTrack(self); + (void)CThunkObject_clear(myself); + if (self->pcl_write) { + Py_ffi_closure_free(self->pcl_write); + } + PyObject_GC_Del(self); + Py_DECREF(tp); +} + +static PyType_Slot cthunk_slots[] = { + {Py_tp_doc, (void *)PyDoc_STR("CThunkObject")}, + {Py_tp_dealloc, CThunkObject_dealloc}, + {Py_tp_traverse, CThunkObject_traverse}, + {Py_tp_clear, CThunkObject_clear}, + {0, NULL}, +}; + +PyType_Spec cthunk_spec = { + .name = "_ctypes.CThunkObject", + .basicsize = sizeof(CThunkObject), + .itemsize = sizeof(ffi_type), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION), + .slots = cthunk_slots, }; /**************************************************************/ @@ -320,7 +307,8 @@ static CThunkObject* CThunkObject_new(Py_ssize_t nargs) CThunkObject *p; Py_ssize_t i; - p = PyObject_GC_NewVar(CThunkObject, &PyCThunk_Type, nargs); + ctypes_state *st = GLOBAL_STATE(); + p = PyObject_GC_NewVar(CThunkObject, st->PyCThunk_Type, nargs); if (p == NULL) { return NULL; } @@ -357,7 +345,10 @@ CThunkObject *_ctypes_alloc_callback(PyObject *callable, if (p == NULL) return NULL; - assert(CThunk_CheckExact((PyObject *)p)); +#ifdef Py_DEBUG + ctypes_state *st = GLOBAL_STATE(); + assert(CThunk_CheckExact(st, (PyObject *)p)); +#endif p->pcl_write = Py_ffi_closure_alloc(sizeof(ffi_closure), &p->pcl_exec); if (p->pcl_write == NULL) { diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index 4438727332bc11..93bc784df5386f 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -469,21 +469,41 @@ PyCArgObject * PyCArgObject_new(void) { PyCArgObject *p; - p = PyObject_New(PyCArgObject, &PyCArg_Type); + ctypes_state *st = GLOBAL_STATE(); + p = PyObject_GC_New(PyCArgObject, st->PyCArg_Type); if (p == NULL) return NULL; p->pffi_type = NULL; p->tag = '\0'; p->obj = NULL; memset(&p->value, 0, sizeof(p->value)); + PyObject_GC_Track(p); return p; } +static int +PyCArg_traverse(PyCArgObject *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + Py_VISIT(self->obj); + return 0; +} + +static int +PyCArg_clear(PyCArgObject *self) +{ + Py_CLEAR(self->obj); + return 0; +} + static void PyCArg_dealloc(PyCArgObject *self) { - Py_XDECREF(self->obj); - PyObject_Free(self); + PyTypeObject *tp = Py_TYPE(self); + PyObject_GC_UnTrack(self); + (void)PyCArg_clear(self); + tp->tp_free((PyObject *)self); + Py_DECREF(tp); } static int @@ -567,36 +587,21 @@ static PyMemberDef PyCArgType_members[] = { { NULL }, }; -PyTypeObject PyCArg_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "CArgObject", - sizeof(PyCArgObject), - 0, - (destructor)PyCArg_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - (reprfunc)PyCArg_repr, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT, /* tp_flags */ - 0, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - PyCArgType_members, /* tp_members */ +static PyType_Slot carg_slots[] = { + {Py_tp_dealloc, PyCArg_dealloc}, + {Py_tp_traverse, PyCArg_traverse}, + {Py_tp_clear, PyCArg_clear}, + {Py_tp_repr, PyCArg_repr}, + {Py_tp_members, PyCArgType_members}, + {0, NULL}, +}; + +PyType_Spec carg_spec = { + .name = "_ctypes.CArgObject", + .basicsize = sizeof(PyCArgObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION), + .slots = carg_slots, }; /****************************************************************/ @@ -669,7 +674,8 @@ static int ConvParam(PyObject *obj, Py_ssize_t index, struct argument *pa) return 0; } - if (PyCArg_CheckExact(obj)) { + ctypes_state *st = GLOBAL_STATE(); + if (PyCArg_CheckExact(st, obj)) { PyCArgObject *carg = (PyCArgObject *)obj; pa->ffi_type = carg->pffi_type; pa->keep = Py_NewRef(obj); diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c index 796a1bec966de1..128506a9eed920 100644 --- a/Modules/_ctypes/cfield.c +++ b/Modules/_ctypes/cfield.c @@ -61,7 +61,9 @@ PyCField_FromDesc(PyObject *desc, Py_ssize_t index, #define CONT_BITFIELD 2 #define EXPAND_BITFIELD 3 - self = (CFieldObject *)PyCField_Type.tp_alloc((PyTypeObject *)&PyCField_Type, 0); + ctypes_state *st = GLOBAL_STATE(); + PyTypeObject *tp = st->PyCField_Type; + self = (CFieldObject *)tp->tp_alloc(tp, 0); if (self == NULL) return NULL; dict = PyType_stgdict(desc); @@ -256,6 +258,7 @@ static PyGetSetDef PyCField_getset[] = { static int PyCField_traverse(CFieldObject *self, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(self)); Py_VISIT(self->proto); return 0; } @@ -270,9 +273,11 @@ PyCField_clear(CFieldObject *self) static void PyCField_dealloc(PyObject *self) { + PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); - PyCField_clear((CFieldObject *)self); + (void)PyCField_clear((CFieldObject *)self); Py_TYPE(self)->tp_free((PyObject *)self); + Py_DECREF(tp); } static PyObject * @@ -296,46 +301,24 @@ PyCField_repr(CFieldObject *self) return result; } -PyTypeObject PyCField_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.CField", /* tp_name */ - sizeof(CFieldObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - PyCField_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - (reprfunc)PyCField_repr, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - PyDoc_STR("Structure/Union member"), /* tp_doc */ - (traverseproc)PyCField_traverse, /* tp_traverse */ - (inquiry)PyCField_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - 0, /* tp_members */ - PyCField_getset, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - (descrgetfunc)PyCField_get, /* tp_descr_get */ - (descrsetfunc)PyCField_set, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - 0, /* tp_new */ - 0, /* tp_free */ +static PyType_Slot cfield_slots[] = { + {Py_tp_dealloc, PyCField_dealloc}, + {Py_tp_repr, PyCField_repr}, + {Py_tp_doc, (void *)PyDoc_STR("Structure/Union member")}, + {Py_tp_traverse, PyCField_traverse}, + {Py_tp_clear, PyCField_clear}, + {Py_tp_getset, PyCField_getset}, + {Py_tp_descr_get, PyCField_get}, + {Py_tp_descr_set, PyCField_set}, + {0, NULL}, +}; + +PyType_Spec cfield_spec = { + .name = "_ctypes.CField", + .basicsize = sizeof(CFieldObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION), + .slots = cfield_slots, }; diff --git a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h index a7029b6e6da2b8..252d9da7dbb56d 100644 --- a/Modules/_ctypes/ctypes.h +++ b/Modules/_ctypes/ctypes.h @@ -32,6 +32,22 @@ #endif #endif +typedef struct { + PyTypeObject *DictRemover_Type; + PyTypeObject *PyCArg_Type; + PyTypeObject *PyCField_Type; + PyTypeObject *PyCThunk_Type; + PyTypeObject *StructParam_Type; +} ctypes_state; + +extern ctypes_state global_state; + +#define GLOBAL_STATE() (&global_state) + +extern PyType_Spec carg_spec; +extern PyType_Spec cfield_spec; +extern PyType_Spec cthunk_spec; + typedef struct tagPyCArgObject PyCArgObject; typedef struct tagCDataObject CDataObject; typedef PyObject *(* GETFUNC)(void *, Py_ssize_t size); @@ -88,8 +104,7 @@ typedef struct { ffi_type *ffi_restype; ffi_type *atypes[1]; } CThunkObject; -extern PyTypeObject PyCThunk_Type; -#define CThunk_CheckExact(v) Py_IS_TYPE(v, &PyCThunk_Type) +#define CThunk_CheckExact(st, v) Py_IS_TYPE(v, st->PyCThunk_Type) typedef struct { /* First part identical to tagCDataObject */ @@ -141,7 +156,6 @@ extern PyTypeObject PyCSimpleType_Type; #define PyCSimpleTypeObject_CheckExact(v) Py_IS_TYPE(v, &PyCSimpleType_Type) #define PyCSimpleTypeObject_Check(v) PyObject_TypeCheck(v, &PyCSimpleType_Type) -extern PyTypeObject PyCField_Type; extern struct fielddesc *_ctypes_get_fielddesc(const char *fmt); @@ -334,8 +348,7 @@ struct tagPyCArgObject { Py_ssize_t size; /* for the 'V' tag */ }; -extern PyTypeObject PyCArg_Type; -#define PyCArg_CheckExact(v) Py_IS_TYPE(v, &PyCArg_Type) +#define PyCArg_CheckExact(st, v) Py_IS_TYPE(v, st->PyCArg_Type) extern PyCArgObject *PyCArgObject_new(void); extern PyObject * diff --git a/Modules/_ctypes/stgdict.c b/Modules/_ctypes/stgdict.c index 83a52757d60979..b1b2bac1455e67 100644 --- a/Modules/_ctypes/stgdict.c +++ b/Modules/_ctypes/stgdict.c @@ -225,6 +225,8 @@ MakeFields(PyObject *type, CFieldObject *descr, if (fieldlist == NULL) return -1; + ctypes_state *st = GLOBAL_STATE(); + PyTypeObject *cfield_tp = st->PyCField_Type; for (i = 0; i < PySequence_Fast_GET_SIZE(fieldlist); ++i) { PyObject *pair = PySequence_Fast_GET_ITEM(fieldlist, i); /* borrowed */ PyObject *fname, *ftype, *bits; @@ -240,7 +242,7 @@ MakeFields(PyObject *type, CFieldObject *descr, Py_DECREF(fieldlist); return -1; } - if (!Py_IS_TYPE(fdescr, &PyCField_Type)) { + if (!Py_IS_TYPE(fdescr, cfield_tp)) { PyErr_SetString(PyExc_TypeError, "unexpected type"); Py_DECREF(fdescr); Py_DECREF(fieldlist); @@ -257,13 +259,13 @@ MakeFields(PyObject *type, CFieldObject *descr, } continue; } - new_descr = (CFieldObject *)PyCField_Type.tp_alloc((PyTypeObject *)&PyCField_Type, 0); + new_descr = (CFieldObject *)cfield_tp->tp_alloc(cfield_tp, 0); if (new_descr == NULL) { Py_DECREF(fdescr); Py_DECREF(fieldlist); return -1; } - assert(Py_IS_TYPE(new_descr, &PyCField_Type)); + assert(Py_IS_TYPE(new_descr, cfield_tp)); new_descr->size = fdescr->size; new_descr->offset = fdescr->offset + offset; new_descr->index = fdescr->index + index; @@ -304,6 +306,8 @@ MakeAnonFields(PyObject *type) if (anon_names == NULL) return -1; + ctypes_state *st = GLOBAL_STATE(); + PyTypeObject *cfield_tp = st->PyCField_Type; for (i = 0; i < PySequence_Fast_GET_SIZE(anon_names); ++i) { PyObject *fname = PySequence_Fast_GET_ITEM(anon_names, i); /* borrowed */ CFieldObject *descr = (CFieldObject *)PyObject_GetAttr(type, fname); @@ -311,7 +315,7 @@ MakeAnonFields(PyObject *type) Py_DECREF(anon_names); return -1; } - if (!Py_IS_TYPE(descr, &PyCField_Type)) { + if (!Py_IS_TYPE(descr, cfield_tp)) { PyErr_Format(PyExc_AttributeError, "'%U' is specified in _anonymous_ but not in " "_fields_", diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c index f317dc14e15bf1..8f86fc91966205 100644 --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -5144,6 +5144,13 @@ datetime_datetime_now_impl(PyTypeObject *type, PyObject *tz) static PyObject * datetime_utcnow(PyObject *cls, PyObject *dummy) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "datetime.utcnow() is deprecated and scheduled for removal in a " + "future version. Use timezone-aware objects to represent datetimes " + "in UTC: datetime.now(datetime.UTC).", 2)) + { + return NULL; + } return datetime_best_possible(cls, _PyTime_gmtime, Py_None); } @@ -5180,6 +5187,13 @@ datetime_fromtimestamp(PyObject *cls, PyObject *args, PyObject *kw) static PyObject * datetime_utcfromtimestamp(PyObject *cls, PyObject *args) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "datetime.utcfromtimestamp() is deprecated and scheduled for removal " + "in a future version. Use timezone-aware objects to represent " + "datetimes in UTC: datetime.now(datetime.UTC).", 2)) + { + return NULL; + } PyObject *timestamp; PyObject *result = NULL; diff --git a/Modules/_io/_iomodule.c b/Modules/_io/_iomodule.c index 7f4f1d939fb7e9..a3bfbc9ac5a1b1 100644 --- a/Modules/_io/_iomodule.c +++ b/Modules/_io/_iomodule.c @@ -671,13 +671,11 @@ static PyTypeObject* static_types[] = { PyStatus _PyIO_InitTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - - // Set type base classes #ifdef HAVE_WINDOWS_CONSOLE_IO - PyWindowsConsoleIO_Type.tp_base = &PyRawIOBase_Type; + if (_Py_IsMainInterpreter(interp)) { + // Set type base classes + PyWindowsConsoleIO_Type.tp_base = &PyRawIOBase_Type; + } #endif for (size_t i=0; i < Py_ARRAY_LENGTH(static_types); i++) { diff --git a/Modules/_sqlite/clinic/connection.c.h b/Modules/_sqlite/clinic/connection.c.h index 4c3fd1bd27411b..182754cca36d61 100644 --- a/Modules/_sqlite/clinic/connection.c.h +++ b/Modules/_sqlite/clinic/connection.c.h @@ -846,30 +846,63 @@ pysqlite_connection_enable_load_extension(pysqlite_Connection *self, PyObject *a #if defined(PY_SQLITE_ENABLE_LOAD_EXTENSION) PyDoc_STRVAR(pysqlite_connection_load_extension__doc__, -"load_extension($self, name, /)\n" +"load_extension($self, name, /, *, entrypoint=None)\n" "--\n" "\n" "Load SQLite extension module."); #define PYSQLITE_CONNECTION_LOAD_EXTENSION_METHODDEF \ - {"load_extension", (PyCFunction)pysqlite_connection_load_extension, METH_O, pysqlite_connection_load_extension__doc__}, + {"load_extension", _PyCFunction_CAST(pysqlite_connection_load_extension), METH_FASTCALL|METH_KEYWORDS, pysqlite_connection_load_extension__doc__}, static PyObject * pysqlite_connection_load_extension_impl(pysqlite_Connection *self, - const char *extension_name); + const char *extension_name, + const char *entrypoint); static PyObject * -pysqlite_connection_load_extension(pysqlite_Connection *self, PyObject *arg) +pysqlite_connection_load_extension(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(entrypoint), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "entrypoint", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "load_extension", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; const char *extension_name; + const char *entrypoint = NULL; - if (!PyUnicode_Check(arg)) { - _PyArg_BadArgument("load_extension", "argument", "str", arg); + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!PyUnicode_Check(args[0])) { + _PyArg_BadArgument("load_extension", "argument 1", "str", args[0]); goto exit; } Py_ssize_t extension_name_length; - extension_name = PyUnicode_AsUTF8AndSize(arg, &extension_name_length); + extension_name = PyUnicode_AsUTF8AndSize(args[0], &extension_name_length); if (extension_name == NULL) { goto exit; } @@ -877,7 +910,29 @@ pysqlite_connection_load_extension(pysqlite_Connection *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = pysqlite_connection_load_extension_impl(self, extension_name); + if (!noptargs) { + goto skip_optional_kwonly; + } + if (args[1] == Py_None) { + entrypoint = NULL; + } + else if (PyUnicode_Check(args[1])) { + Py_ssize_t entrypoint_length; + entrypoint = PyUnicode_AsUTF8AndSize(args[1], &entrypoint_length); + if (entrypoint == NULL) { + goto exit; + } + if (strlen(entrypoint) != (size_t)entrypoint_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + } + else { + _PyArg_BadArgument("load_extension", "argument 'entrypoint'", "str or None", args[1]); + goto exit; + } +skip_optional_kwonly: + return_value = pysqlite_connection_load_extension_impl(self, extension_name, entrypoint); exit: return return_value; @@ -1513,6 +1568,85 @@ getlimit(pysqlite_Connection *self, PyObject *arg) return return_value; } +PyDoc_STRVAR(setconfig__doc__, +"setconfig($self, op, enable=True, /)\n" +"--\n" +"\n" +"Set a boolean connection configuration option.\n" +"\n" +" op\n" +" The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes."); + +#define SETCONFIG_METHODDEF \ + {"setconfig", _PyCFunction_CAST(setconfig), METH_FASTCALL, setconfig__doc__}, + +static PyObject * +setconfig_impl(pysqlite_Connection *self, int op, int enable); + +static PyObject * +setconfig(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int op; + int enable = 1; + + if (!_PyArg_CheckPositional("setconfig", nargs, 1, 2)) { + goto exit; + } + op = _PyLong_AsInt(args[0]); + if (op == -1 && PyErr_Occurred()) { + goto exit; + } + if (nargs < 2) { + goto skip_optional; + } + enable = PyObject_IsTrue(args[1]); + if (enable < 0) { + goto exit; + } +skip_optional: + return_value = setconfig_impl(self, op, enable); + +exit: + return return_value; +} + +PyDoc_STRVAR(getconfig__doc__, +"getconfig($self, op, /)\n" +"--\n" +"\n" +"Query a boolean connection configuration option.\n" +"\n" +" op\n" +" The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes."); + +#define GETCONFIG_METHODDEF \ + {"getconfig", (PyCFunction)getconfig, METH_O, getconfig__doc__}, + +static int +getconfig_impl(pysqlite_Connection *self, int op); + +static PyObject * +getconfig(pysqlite_Connection *self, PyObject *arg) +{ + PyObject *return_value = NULL; + int op; + int _return_value; + + op = _PyLong_AsInt(arg); + if (op == -1 && PyErr_Occurred()) { + goto exit; + } + _return_value = getconfig_impl(self, op); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyBool_FromLong((long)_return_value); + +exit: + return return_value; +} + #ifndef CREATE_WINDOW_FUNCTION_METHODDEF #define CREATE_WINDOW_FUNCTION_METHODDEF #endif /* !defined(CREATE_WINDOW_FUNCTION_METHODDEF) */ @@ -1532,4 +1666,4 @@ getlimit(pysqlite_Connection *self, PyObject *arg) #ifndef DESERIALIZE_METHODDEF #define DESERIALIZE_METHODDEF #endif /* !defined(DESERIALIZE_METHODDEF) */ -/*[clinic end generated code: output=f10306e10427488b input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8b03149c115ee6da input=a9049054013a1b77]*/ diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c index fb61ef82ef869b..aec3aa8bbf4ed8 100644 --- a/Modules/_sqlite/connection.c +++ b/Modules/_sqlite/connection.c @@ -30,6 +30,8 @@ #include "prepare_protocol.h" #include "util.h" +#include <stdbool.h> + #if SQLITE_VERSION_NUMBER >= 3014000 #define HAVE_TRACE_V2 #endif @@ -1601,14 +1603,17 @@ _sqlite3.Connection.load_extension as pysqlite_connection_load_extension name as extension_name: str / + * + entrypoint: str(accept={str, NoneType}) = None Load SQLite extension module. [clinic start generated code]*/ static PyObject * pysqlite_connection_load_extension_impl(pysqlite_Connection *self, - const char *extension_name) -/*[clinic end generated code: output=47eb1d7312bc97a7 input=edd507389d89d621]*/ + const char *extension_name, + const char *entrypoint) +/*[clinic end generated code: output=7e61a7add9de0286 input=c36b14ea702e04f5]*/ { int rc; char* errmsg; @@ -1621,7 +1626,7 @@ pysqlite_connection_load_extension_impl(pysqlite_Connection *self, return NULL; } - rc = sqlite3_load_extension(self->db, extension_name, 0, &errmsg); + rc = sqlite3_load_extension(self->db, extension_name, entrypoint, &errmsg); if (rc != 0) { PyErr_SetString(self->OperationalError, errmsg); return NULL; @@ -2340,6 +2345,119 @@ getlimit_impl(pysqlite_Connection *self, int category) return setlimit_impl(self, category, -1); } +static inline bool +is_int_config(const int op) +{ + switch (op) { + case SQLITE_DBCONFIG_ENABLE_FKEY: + case SQLITE_DBCONFIG_ENABLE_TRIGGER: +#if SQLITE_VERSION_NUMBER >= 3012002 + case SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER: +#endif +#if SQLITE_VERSION_NUMBER >= 3013000 + case SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION: +#endif +#if SQLITE_VERSION_NUMBER >= 3016000 + case SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE: +#endif +#if SQLITE_VERSION_NUMBER >= 3020000 + case SQLITE_DBCONFIG_ENABLE_QPSG: +#endif +#if SQLITE_VERSION_NUMBER >= 3022000 + case SQLITE_DBCONFIG_TRIGGER_EQP: +#endif +#if SQLITE_VERSION_NUMBER >= 3024000 + case SQLITE_DBCONFIG_RESET_DATABASE: +#endif +#if SQLITE_VERSION_NUMBER >= 3026000 + case SQLITE_DBCONFIG_DEFENSIVE: +#endif +#if SQLITE_VERSION_NUMBER >= 3028000 + case SQLITE_DBCONFIG_WRITABLE_SCHEMA: +#endif +#if SQLITE_VERSION_NUMBER >= 3029000 + case SQLITE_DBCONFIG_DQS_DDL: + case SQLITE_DBCONFIG_DQS_DML: + case SQLITE_DBCONFIG_LEGACY_ALTER_TABLE: +#endif +#if SQLITE_VERSION_NUMBER >= 3030000 + case SQLITE_DBCONFIG_ENABLE_VIEW: +#endif +#if SQLITE_VERSION_NUMBER >= 3031000 + case SQLITE_DBCONFIG_LEGACY_FILE_FORMAT: + case SQLITE_DBCONFIG_TRUSTED_SCHEMA: +#endif + return true; + default: + return false; + } +} + +/*[clinic input] +_sqlite3.Connection.setconfig as setconfig + + op: int + The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. + enable: bool = True + / + +Set a boolean connection configuration option. +[clinic start generated code]*/ + +static PyObject * +setconfig_impl(pysqlite_Connection *self, int op, int enable) +/*[clinic end generated code: output=c60b13e618aff873 input=a10f1539c2d7da6b]*/ +{ + if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { + return NULL; + } + if (!is_int_config(op)) { + return PyErr_Format(PyExc_ValueError, "unknown config 'op': %d", op); + } + + int actual; + int rc = sqlite3_db_config(self->db, op, enable, &actual); + if (rc != SQLITE_OK) { + (void)_pysqlite_seterror(self->state, self->db); + return NULL; + } + if (enable != actual) { + PyErr_SetString(self->state->OperationalError, "Unable to set config"); + return NULL; + } + Py_RETURN_NONE; +} + +/*[clinic input] +_sqlite3.Connection.getconfig as getconfig -> bool + + op: int + The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. + / + +Query a boolean connection configuration option. +[clinic start generated code]*/ + +static int +getconfig_impl(pysqlite_Connection *self, int op) +/*[clinic end generated code: output=25ac05044c7b78a3 input=b0526d7e432e3f2f]*/ +{ + if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { + return -1; + } + if (!is_int_config(op)) { + PyErr_Format(PyExc_ValueError, "unknown config 'op': %d", op); + return -1; + } + + int current; + int rc = sqlite3_db_config(self->db, op, -1, ¤t); + if (rc != SQLITE_OK) { + (void)_pysqlite_seterror(self->state, self->db); + return -1; + } + return current; +} static PyObject * get_autocommit(pysqlite_Connection *self, void *Py_UNUSED(ctx)) @@ -2421,6 +2539,8 @@ static PyMethodDef connection_methods[] = { DESERIALIZE_METHODDEF CREATE_WINDOW_FUNCTION_METHODDEF BLOBOPEN_METHODDEF + SETCONFIG_METHODDEF + GETCONFIG_METHODDEF {NULL, NULL} }; diff --git a/Modules/_sqlite/module.c b/Modules/_sqlite/module.c index 6db3d51fd20220..9c42faa232c70d 100644 --- a/Modules/_sqlite/module.c +++ b/Modules/_sqlite/module.c @@ -499,6 +499,49 @@ add_integer_constants(PyObject *module) { #if SQLITE_VERSION_NUMBER >= 3008007 ADD_INT(SQLITE_LIMIT_WORKER_THREADS); #endif + + /* + * Database connection configuration options. + * See https://www.sqlite.org/c3ref/c_dbconfig_defensive.html + */ + ADD_INT(SQLITE_DBCONFIG_ENABLE_FKEY); + ADD_INT(SQLITE_DBCONFIG_ENABLE_TRIGGER); +#if SQLITE_VERSION_NUMBER >= 3012002 + ADD_INT(SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER); +#endif +#if SQLITE_VERSION_NUMBER >= 3013000 + ADD_INT(SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION); +#endif +#if SQLITE_VERSION_NUMBER >= 3016000 + ADD_INT(SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE); +#endif +#if SQLITE_VERSION_NUMBER >= 3020000 + ADD_INT(SQLITE_DBCONFIG_ENABLE_QPSG); +#endif +#if SQLITE_VERSION_NUMBER >= 3022000 + ADD_INT(SQLITE_DBCONFIG_TRIGGER_EQP); +#endif +#if SQLITE_VERSION_NUMBER >= 3024000 + ADD_INT(SQLITE_DBCONFIG_RESET_DATABASE); +#endif +#if SQLITE_VERSION_NUMBER >= 3026000 + ADD_INT(SQLITE_DBCONFIG_DEFENSIVE); +#endif +#if SQLITE_VERSION_NUMBER >= 3028000 + ADD_INT(SQLITE_DBCONFIG_WRITABLE_SCHEMA); +#endif +#if SQLITE_VERSION_NUMBER >= 3029000 + ADD_INT(SQLITE_DBCONFIG_DQS_DDL); + ADD_INT(SQLITE_DBCONFIG_DQS_DML); + ADD_INT(SQLITE_DBCONFIG_LEGACY_ALTER_TABLE); +#endif +#if SQLITE_VERSION_NUMBER >= 3030000 + ADD_INT(SQLITE_DBCONFIG_ENABLE_VIEW); +#endif +#if SQLITE_VERSION_NUMBER >= 3031000 + ADD_INT(SQLITE_DBCONFIG_LEGACY_FILE_FORMAT); + ADD_INT(SQLITE_DBCONFIG_TRUSTED_SCHEMA); +#endif #undef ADD_INT return 0; } diff --git a/Modules/_testmultiphase.c b/Modules/_testmultiphase.c index e34854f7025798..cf8990a2df0a9b 100644 --- a/Modules/_testmultiphase.c +++ b/Modules/_testmultiphase.c @@ -884,15 +884,3 @@ PyInit__test_module_state_shared(void) } return module; } - - -/*** Helper for imp test ***/ - -static PyModuleDef imp_dummy_def = TEST_MODULE_DEF("imp_dummy", main_slots, testexport_methods); - -PyMODINIT_FUNC -PyInit_imp_dummy(void) -{ - return PyModuleDef_Init(&imp_dummy_def); -} - diff --git a/Modules/cjkcodecs/_codecs_hk.c b/Modules/cjkcodecs/_codecs_hk.c index 43593b873733e6..e7273bf18e3494 100644 --- a/Modules/cjkcodecs/_codecs_hk.c +++ b/Modules/cjkcodecs/_codecs_hk.c @@ -6,6 +6,10 @@ #define USING_IMPORTED_MAPS +#define CJK_MOD_SPECIFIC_STATE \ + const encode_map *big5_encmap; \ + const decode_map *big5_decmap; + #include "cjkcodecs.h" #include "mappings_hk.h" @@ -13,16 +17,12 @@ * BIG5HKSCS codec */ -static const encode_map *big5_encmap = NULL; -static const decode_map *big5_decmap = NULL; - CODEC_INIT(big5hkscs) { - static int initialized = 0; - - if (!initialized && IMPORT_MAP(tw, big5, &big5_encmap, &big5_decmap)) + cjkcodecs_module_state *st = codec->modstate; + if (IMPORT_MAP(tw, big5, &st->big5_encmap, &st->big5_decmap)) { return -1; - initialized = 1; + } return 0; } @@ -81,7 +81,7 @@ ENCODER(big5hkscs) } } } - else if (TRYMAP_ENC(big5, code, c)) + else if (TRYMAP_ENC_ST(big5, code, c)) ; else return 1; @@ -122,7 +122,7 @@ DECODER(big5hkscs) REQUIRE_INBUF(2); if (0xc6 > c || c > 0xc8 || (c < 0xc7 && INBYTE2 < 0xa1)) { - if (TRYMAP_DEC(big5, decoded, c, INBYTE2)) { + if (TRYMAP_DEC_ST(big5, decoded, c, INBYTE2)) { OUTCHAR(decoded); NEXT_IN(2); continue; diff --git a/Modules/cjkcodecs/_codecs_iso2022.c b/Modules/cjkcodecs/_codecs_iso2022.c index cf34752e16a527..86bb73b982a551 100644 --- a/Modules/cjkcodecs/_codecs_iso2022.c +++ b/Modules/cjkcodecs/_codecs_iso2022.c @@ -10,6 +10,27 @@ #define EMULATE_JISX0213_2000_ENCODE_INVALID MAP_UNMAPPABLE #define EMULATE_JISX0213_2000_DECODE_INVALID MAP_UNMAPPABLE +#define CJK_MOD_SPECIFIC_STATE \ + /* kr */ \ + const encode_map *cp949_encmap; \ + const decode_map *ksx1001_decmap; \ + \ + /* jp */ \ + const encode_map *jisxcommon_encmap; \ + const decode_map *jisx0208_decmap; \ + const decode_map *jisx0212_decmap; \ + const encode_map *jisx0213_bmp_encmap; \ + const decode_map *jisx0213_1_bmp_decmap; \ + const decode_map *jisx0213_2_bmp_decmap; \ + const encode_map *jisx0213_emp_encmap; \ + const decode_map *jisx0213_1_emp_decmap; \ + const decode_map *jisx0213_2_emp_decmap; \ + \ + /* cn */ \ + const encode_map *gbcommon_encmap; \ + const decode_map *gb2312_decmap; + + #include "cjkcodecs.h" #include "alg_jisx0201.h" #include "emu_jisx0213_2000.h" @@ -90,7 +111,7 @@ #define STATE_CLEARFLAG(f) do { ((state)->c[4]) &= ~(f); } while (0) #define STATE_CLEARFLAGS() do { ((state)->c[4]) = 0; } while (0) -#define ISO2022_CONFIG ((const struct iso2022_config *)config) +#define ISO2022_CONFIG ((const struct iso2022_config *)(codec->config)) #define CONFIG_ISSET(flag) (ISO2022_CONFIG->flags & (flag)) #define CONFIG_DESIGNATIONS (ISO2022_CONFIG->designations) @@ -101,9 +122,12 @@ /*-*- internal data structures -*-*/ -typedef int (*iso2022_init_func)(void); -typedef Py_UCS4 (*iso2022_decode_func)(const unsigned char *data); -typedef DBCHAR (*iso2022_encode_func)(const Py_UCS4 *data, Py_ssize_t *length); +typedef int (*iso2022_init_func)(const MultibyteCodec *codec); +typedef Py_UCS4 (*iso2022_decode_func)(const MultibyteCodec *codec, + const unsigned char *data); +typedef DBCHAR (*iso2022_encode_func)(const MultibyteCodec *codec, + const Py_UCS4 *data, + Py_ssize_t *length); struct iso2022_designation { unsigned char mark; @@ -124,9 +148,11 @@ struct iso2022_config { CODEC_INIT(iso2022) { const struct iso2022_designation *desig; - for (desig = CONFIG_DESIGNATIONS; desig->mark; desig++) - if (desig->initializer != NULL && desig->initializer() != 0) + for (desig = CONFIG_DESIGNATIONS; desig->mark; desig++) { + if (desig->initializer != NULL && desig->initializer(codec) != 0) { return -1; + } + } return 0; } @@ -182,7 +208,7 @@ ENCODER(iso2022) encoded = MAP_UNMAPPABLE; for (dsg = CONFIG_DESIGNATIONS; dsg->mark; dsg++) { Py_ssize_t length = 1; - encoded = dsg->encoder(&c, &length); + encoded = dsg->encoder(codec, &c, &length); if (encoded == MAP_MULTIPLE_AVAIL) { /* this implementation won't work for pair * of non-bmp characters. */ @@ -193,7 +219,7 @@ ENCODER(iso2022) } else length = 2; - encoded = dsg->encoder(&c, &length); + encoded = dsg->encoder(codec, &c, &length); if (encoded != MAP_UNMAPPABLE) { insize = length; break; @@ -288,7 +314,7 @@ DECODER_RESET(iso2022) } static Py_ssize_t -iso2022processesc(const void *config, MultibyteCodec_State *state, +iso2022processesc(const MultibyteCodec *codec, MultibyteCodec_State *state, const unsigned char **inbuf, Py_ssize_t *inleft) { unsigned char charset, designation; @@ -388,7 +414,7 @@ iso2022processesc(const void *config, MultibyteCodec_State *state, } static Py_ssize_t -iso2022processg2(const void *config, MultibyteCodec_State *state, +iso2022processg2(const MultibyteCodec *codec, MultibyteCodec_State *state, const unsigned char **inbuf, Py_ssize_t *inleft, _PyUnicodeWriter *writer) { @@ -442,14 +468,14 @@ DECODER(iso2022) case ESC: REQUIRE_INBUF(2); if (IS_ISO2022ESC(INBYTE2)) { - err = iso2022processesc(config, state, + err = iso2022processesc(codec, state, inbuf, &inleft); if (err != 0) return err; } else if (CONFIG_ISSET(USE_G2) && INBYTE2 == 'N') {/* SS2 */ REQUIRE_INBUF(3); - err = iso2022processg2(config, state, + err = iso2022processg2(codec, state, inbuf, &inleft, writer); if (err != 0) return err; @@ -517,7 +543,7 @@ DECODER(iso2022) } REQUIRE_INBUF(dsg->width); - decoded = dsg->decoder(*inbuf); + decoded = dsg->decoder(codec, *inbuf); if (decoded == MAP_UNMAPPABLE) return dsg->width; @@ -538,64 +564,38 @@ DECODER(iso2022) return 0; } -/*-*- mapping table holders -*-*/ - -#define ENCMAP(enc) static const encode_map *enc##_encmap = NULL; -#define DECMAP(enc) static const decode_map *enc##_decmap = NULL; - -/* kr */ -ENCMAP(cp949) -DECMAP(ksx1001) - -/* jp */ -ENCMAP(jisxcommon) -DECMAP(jisx0208) -DECMAP(jisx0212) -ENCMAP(jisx0213_bmp) -DECMAP(jisx0213_1_bmp) -DECMAP(jisx0213_2_bmp) -ENCMAP(jisx0213_emp) -DECMAP(jisx0213_1_emp) -DECMAP(jisx0213_2_emp) - -/* cn */ -ENCMAP(gbcommon) -DECMAP(gb2312) - -/* tw */ - /*-*- mapping access functions -*-*/ static int -ksx1001_init(void) +ksx1001_init(const MultibyteCodec *codec) { - static int initialized = 0; - - if (!initialized && ( - IMPORT_MAP(kr, cp949, &cp949_encmap, NULL) || - IMPORT_MAP(kr, ksx1001, NULL, &ksx1001_decmap))) + cjkcodecs_module_state *st = codec->modstate; + if (IMPORT_MAP(kr, cp949, &st->cp949_encmap, NULL) || + IMPORT_MAP(kr, ksx1001, NULL, &st->ksx1001_decmap)) + { return -1; - initialized = 1; + } return 0; } static Py_UCS4 -ksx1001_decoder(const unsigned char *data) +ksx1001_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; - if (TRYMAP_DEC(ksx1001, u, data[0], data[1])) + if (TRYMAP_DEC_ST(ksx1001, u, data[0], data[1])) return u; else return MAP_UNMAPPABLE; } static DBCHAR -ksx1001_encoder(const Py_UCS4 *data, Py_ssize_t *length) +ksx1001_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { DBCHAR coded; assert(*length == 1); if (*data < 0x10000) { - if (TRYMAP_ENC(cp949, coded, *data)) { + if (TRYMAP_ENC_ST(cp949, coded, *data)) { if (!(coded & 0x8000)) return coded; } @@ -604,39 +604,39 @@ ksx1001_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static int -jisx0208_init(void) +jisx0208_init(const MultibyteCodec *codec) { - static int initialized = 0; - - if (!initialized && ( - IMPORT_MAP(jp, jisxcommon, &jisxcommon_encmap, NULL) || - IMPORT_MAP(jp, jisx0208, NULL, &jisx0208_decmap))) + cjkcodecs_module_state *st = codec->modstate; + if (IMPORT_MAP(jp, jisxcommon, &st->jisxcommon_encmap, NULL) || + IMPORT_MAP(jp, jisx0208, NULL, &st->jisx0208_decmap)) + { return -1; - initialized = 1; + } return 0; } static Py_UCS4 -jisx0208_decoder(const unsigned char *data) +jisx0208_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; if (data[0] == 0x21 && data[1] == 0x40) /* F/W REVERSE SOLIDUS */ return 0xff3c; - else if (TRYMAP_DEC(jisx0208, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0208, u, data[0], data[1])) return u; else return MAP_UNMAPPABLE; } static DBCHAR -jisx0208_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0208_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { DBCHAR coded; assert(*length == 1); if (*data < 0x10000) { if (*data == 0xff3c) /* F/W REVERSE SOLIDUS */ return 0x2140; - else if (TRYMAP_ENC(jisxcommon, coded, *data)) { + else if (TRYMAP_ENC_ST(jisxcommon, coded, *data)) { if (!(coded & 0x8000)) return coded; } @@ -645,35 +645,35 @@ jisx0208_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static int -jisx0212_init(void) +jisx0212_init(const MultibyteCodec *codec) { - static int initialized = 0; - - if (!initialized && ( - IMPORT_MAP(jp, jisxcommon, &jisxcommon_encmap, NULL) || - IMPORT_MAP(jp, jisx0212, NULL, &jisx0212_decmap))) + cjkcodecs_module_state *st = codec->modstate; + if (IMPORT_MAP(jp, jisxcommon, &st->jisxcommon_encmap, NULL) || + IMPORT_MAP(jp, jisx0212, NULL, &st->jisx0212_decmap)) + { return -1; - initialized = 1; + } return 0; } static Py_UCS4 -jisx0212_decoder(const unsigned char *data) +jisx0212_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; - if (TRYMAP_DEC(jisx0212, u, data[0], data[1])) + if (TRYMAP_DEC_ST(jisx0212, u, data[0], data[1])) return u; else return MAP_UNMAPPABLE; } static DBCHAR -jisx0212_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0212_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { DBCHAR coded; assert(*length == 1); if (*data < 0x10000) { - if (TRYMAP_ENC(jisxcommon, coded, *data)) { + if (TRYMAP_ENC_ST(jisxcommon, coded, *data)) { if (coded & 0x8000) return coded & 0x7fff; } @@ -682,44 +682,37 @@ jisx0212_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static int -jisx0213_init(void) +jisx0213_init(const MultibyteCodec *codec) { - static int initialized = 0; - - if (!initialized && ( - jisx0208_init() || - IMPORT_MAP(jp, jisx0213_bmp, - &jisx0213_bmp_encmap, NULL) || - IMPORT_MAP(jp, jisx0213_1_bmp, - NULL, &jisx0213_1_bmp_decmap) || - IMPORT_MAP(jp, jisx0213_2_bmp, - NULL, &jisx0213_2_bmp_decmap) || - IMPORT_MAP(jp, jisx0213_emp, - &jisx0213_emp_encmap, NULL) || - IMPORT_MAP(jp, jisx0213_1_emp, - NULL, &jisx0213_1_emp_decmap) || - IMPORT_MAP(jp, jisx0213_2_emp, - NULL, &jisx0213_2_emp_decmap) || - IMPORT_MAP(jp, jisx0213_pair, &jisx0213_pair_encmap, - &jisx0213_pair_decmap))) + cjkcodecs_module_state *st = codec->modstate; + if (jisx0208_init(codec) || + IMPORT_MAP(jp, jisx0213_bmp, &st->jisx0213_bmp_encmap, NULL) || + IMPORT_MAP(jp, jisx0213_1_bmp, NULL, &st->jisx0213_1_bmp_decmap) || + IMPORT_MAP(jp, jisx0213_2_bmp, NULL, &st->jisx0213_2_bmp_decmap) || + IMPORT_MAP(jp, jisx0213_emp, &st->jisx0213_emp_encmap, NULL) || + IMPORT_MAP(jp, jisx0213_1_emp, NULL, &st->jisx0213_1_emp_decmap) || + IMPORT_MAP(jp, jisx0213_2_emp, NULL, &st->jisx0213_2_emp_decmap) || + IMPORT_MAP(jp, jisx0213_pair, + &jisx0213_pair_encmap, &jisx0213_pair_decmap)) + { return -1; - initialized = 1; + } return 0; } #define config ((void *)2000) static Py_UCS4 -jisx0213_2000_1_decoder(const unsigned char *data) +jisx0213_2000_1_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; - EMULATE_JISX0213_2000_DECODE_PLANE1(u, data[0], data[1]) + EMULATE_JISX0213_2000_DECODE_PLANE1(config, u, data[0], data[1]) else if (data[0] == 0x21 && data[1] == 0x40) /* F/W REVERSE SOLIDUS */ return 0xff3c; - else if (TRYMAP_DEC(jisx0208, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0208, u, data[0], data[1])) ; - else if (TRYMAP_DEC(jisx0213_1_bmp, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0213_1_bmp, u, data[0], data[1])) ; - else if (TRYMAP_DEC(jisx0213_1_emp, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0213_1_emp, u, data[0], data[1])) u |= 0x20000; else if (TRYMAP_DEC(jisx0213_pair, u, data[0], data[1])) ; @@ -729,13 +722,13 @@ jisx0213_2000_1_decoder(const unsigned char *data) } static Py_UCS4 -jisx0213_2000_2_decoder(const unsigned char *data) +jisx0213_2000_2_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; - EMULATE_JISX0213_2000_DECODE_PLANE2_CHAR(u, data[0], data[1]) - if (TRYMAP_DEC(jisx0213_2_bmp, u, data[0], data[1])) + EMULATE_JISX0213_2000_DECODE_PLANE2_CHAR(config, u, data[0], data[1]) + if (TRYMAP_DEC_ST(jisx0213_2_bmp, u, data[0], data[1])) ; - else if (TRYMAP_DEC(jisx0213_2_emp, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0213_2_emp, u, data[0], data[1])) u |= 0x20000; else return MAP_UNMAPPABLE; @@ -744,16 +737,16 @@ jisx0213_2000_2_decoder(const unsigned char *data) #undef config static Py_UCS4 -jisx0213_2004_1_decoder(const unsigned char *data) +jisx0213_2004_1_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; if (data[0] == 0x21 && data[1] == 0x40) /* F/W REVERSE SOLIDUS */ return 0xff3c; - else if (TRYMAP_DEC(jisx0208, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0208, u, data[0], data[1])) ; - else if (TRYMAP_DEC(jisx0213_1_bmp, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0213_1_bmp, u, data[0], data[1])) ; - else if (TRYMAP_DEC(jisx0213_1_emp, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0213_1_emp, u, data[0], data[1])) u |= 0x20000; else if (TRYMAP_DEC(jisx0213_pair, u, data[0], data[1])) ; @@ -763,12 +756,12 @@ jisx0213_2004_1_decoder(const unsigned char *data) } static Py_UCS4 -jisx0213_2004_2_decoder(const unsigned char *data) +jisx0213_2004_2_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; - if (TRYMAP_DEC(jisx0213_2_bmp, u, data[0], data[1])) + if (TRYMAP_DEC_ST(jisx0213_2_bmp, u, data[0], data[1])) ; - else if (TRYMAP_DEC(jisx0213_2_emp, u, data[0], data[1])) + else if (TRYMAP_DEC_ST(jisx0213_2_emp, u, data[0], data[1])) u |= 0x20000; else return MAP_UNMAPPABLE; @@ -776,7 +769,8 @@ jisx0213_2004_2_decoder(const unsigned char *data) } static DBCHAR -jisx0213_encoder(const Py_UCS4 *data, Py_ssize_t *length, void *config) +jisx0213_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length, const void *config) { DBCHAR coded; @@ -784,19 +778,19 @@ jisx0213_encoder(const Py_UCS4 *data, Py_ssize_t *length, void *config) case 1: /* first character */ if (*data >= 0x10000) { if ((*data) >> 16 == 0x20000 >> 16) { - EMULATE_JISX0213_2000_ENCODE_EMP(coded, *data) - else if (TRYMAP_ENC(jisx0213_emp, coded, (*data) & 0xffff)) + EMULATE_JISX0213_2000_ENCODE_EMP(config, coded, *data) + else if (TRYMAP_ENC_ST(jisx0213_emp, coded, (*data) & 0xffff)) return coded; } return MAP_UNMAPPABLE; } - EMULATE_JISX0213_2000_ENCODE_BMP(coded, *data) - else if (TRYMAP_ENC(jisx0213_bmp, coded, *data)) { + EMULATE_JISX0213_2000_ENCODE_BMP(config, coded, *data) + else if (TRYMAP_ENC_ST(jisx0213_bmp, coded, *data)) { if (coded == MULTIC) return MAP_MULTIPLE_AVAIL; } - else if (TRYMAP_ENC(jisxcommon, coded, *data)) { + else if (TRYMAP_ENC_ST(jisxcommon, coded, *data)) { if (coded & 0x8000) return MAP_UNMAPPABLE; } @@ -827,9 +821,10 @@ jisx0213_encoder(const Py_UCS4 *data, Py_ssize_t *length, void *config) } static DBCHAR -jisx0213_2000_1_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0213_2000_1_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { - DBCHAR coded = jisx0213_encoder(data, length, (void *)2000); + DBCHAR coded = jisx0213_encoder(codec, data, length, (void *)2000); if (coded == MAP_UNMAPPABLE || coded == MAP_MULTIPLE_AVAIL) return coded; else if (coded & 0x8000) @@ -839,12 +834,13 @@ jisx0213_2000_1_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static DBCHAR -jisx0213_2000_1_encoder_paironly(const Py_UCS4 *data, Py_ssize_t *length) +jisx0213_2000_1_encoder_paironly(const MultibyteCodec *codec, + const Py_UCS4 *data, Py_ssize_t *length) { DBCHAR coded; Py_ssize_t ilength = *length; - coded = jisx0213_encoder(data, length, (void *)2000); + coded = jisx0213_encoder(codec, data, length, (void *)2000); switch (ilength) { case 1: if (coded == MAP_MULTIPLE_AVAIL) @@ -862,9 +858,10 @@ jisx0213_2000_1_encoder_paironly(const Py_UCS4 *data, Py_ssize_t *length) } static DBCHAR -jisx0213_2000_2_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0213_2000_2_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { - DBCHAR coded = jisx0213_encoder(data, length, (void *)2000); + DBCHAR coded = jisx0213_encoder(codec, data, length, (void *)2000); if (coded == MAP_UNMAPPABLE || coded == MAP_MULTIPLE_AVAIL) return coded; else if (coded & 0x8000) @@ -874,9 +871,10 @@ jisx0213_2000_2_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static DBCHAR -jisx0213_2004_1_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0213_2004_1_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { - DBCHAR coded = jisx0213_encoder(data, length, NULL); + DBCHAR coded = jisx0213_encoder(codec, data, length, NULL); if (coded == MAP_UNMAPPABLE || coded == MAP_MULTIPLE_AVAIL) return coded; else if (coded & 0x8000) @@ -886,12 +884,13 @@ jisx0213_2004_1_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static DBCHAR -jisx0213_2004_1_encoder_paironly(const Py_UCS4 *data, Py_ssize_t *length) +jisx0213_2004_1_encoder_paironly(const MultibyteCodec *codec, + const Py_UCS4 *data, Py_ssize_t *length) { DBCHAR coded; Py_ssize_t ilength = *length; - coded = jisx0213_encoder(data, length, NULL); + coded = jisx0213_encoder(codec, data, length, NULL); switch (ilength) { case 1: if (coded == MAP_MULTIPLE_AVAIL) @@ -909,9 +908,10 @@ jisx0213_2004_1_encoder_paironly(const Py_UCS4 *data, Py_ssize_t *length) } static DBCHAR -jisx0213_2004_2_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0213_2004_2_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { - DBCHAR coded = jisx0213_encoder(data, length, NULL); + DBCHAR coded = jisx0213_encoder(codec, data, length, NULL); if (coded == MAP_UNMAPPABLE || coded == MAP_MULTIPLE_AVAIL) return coded; else if (coded & 0x8000) @@ -921,7 +921,7 @@ jisx0213_2004_2_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static Py_UCS4 -jisx0201_r_decoder(const unsigned char *data) +jisx0201_r_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; JISX0201_R_DECODE_CHAR(*data, u) @@ -931,7 +931,8 @@ jisx0201_r_decoder(const unsigned char *data) } static DBCHAR -jisx0201_r_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0201_r_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { DBCHAR coded; JISX0201_R_ENCODE(*data, coded) @@ -941,7 +942,7 @@ jisx0201_r_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static Py_UCS4 -jisx0201_k_decoder(const unsigned char *data) +jisx0201_k_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; JISX0201_K_DECODE_CHAR(*data ^ 0x80, u) @@ -951,7 +952,8 @@ jisx0201_k_decoder(const unsigned char *data) } static DBCHAR -jisx0201_k_encoder(const Py_UCS4 *data, Py_ssize_t *length) +jisx0201_k_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { DBCHAR coded; JISX0201_K_ENCODE(*data, coded) @@ -961,35 +963,35 @@ jisx0201_k_encoder(const Py_UCS4 *data, Py_ssize_t *length) } static int -gb2312_init(void) +gb2312_init(const MultibyteCodec *codec) { - static int initialized = 0; - - if (!initialized && ( - IMPORT_MAP(cn, gbcommon, &gbcommon_encmap, NULL) || - IMPORT_MAP(cn, gb2312, NULL, &gb2312_decmap))) + cjkcodecs_module_state *st = codec->modstate; + if (IMPORT_MAP(cn, gbcommon, &st->gbcommon_encmap, NULL) || + IMPORT_MAP(cn, gb2312, NULL, &st->gb2312_decmap)) + { return -1; - initialized = 1; + } return 0; } static Py_UCS4 -gb2312_decoder(const unsigned char *data) +gb2312_decoder(const MultibyteCodec *codec, const unsigned char *data) { Py_UCS4 u; - if (TRYMAP_DEC(gb2312, u, data[0], data[1])) + if (TRYMAP_DEC_ST(gb2312, u, data[0], data[1])) return u; else return MAP_UNMAPPABLE; } static DBCHAR -gb2312_encoder(const Py_UCS4 *data, Py_ssize_t *length) +gb2312_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { DBCHAR coded; assert(*length == 1); if (*data < 0x10000) { - if (TRYMAP_ENC(gbcommon, coded, *data)) { + if (TRYMAP_ENC_ST(gbcommon, coded, *data)) { if (!(coded & 0x8000)) return coded; } @@ -999,13 +1001,14 @@ gb2312_encoder(const Py_UCS4 *data, Py_ssize_t *length) static Py_UCS4 -dummy_decoder(const unsigned char *data) +dummy_decoder(const MultibyteCodec *codec, const unsigned char *data) { return MAP_UNMAPPABLE; } static DBCHAR -dummy_encoder(const Py_UCS4 *data, Py_ssize_t *length) +dummy_encoder(const MultibyteCodec *codec, const Py_UCS4 *data, + Py_ssize_t *length) { return MAP_UNMAPPABLE; } diff --git a/Modules/cjkcodecs/_codecs_jp.c b/Modules/cjkcodecs/_codecs_jp.c index 7a8b78a23592ea..f7127487aa5f59 100644 --- a/Modules/cjkcodecs/_codecs_jp.c +++ b/Modules/cjkcodecs/_codecs_jp.c @@ -164,7 +164,7 @@ ENCODER(euc_jis_2004) insize = 1; if (c <= 0xFFFF) { - EMULATE_JISX0213_2000_ENCODE_BMP(code, c) + EMULATE_JISX0213_2000_ENCODE_BMP(codec->config, code, c) else if (TRYMAP_ENC(jisx0213_bmp, code, c)) { if (code == MULTIC) { if (inlen - *inpos < 2) { @@ -215,7 +215,7 @@ ENCODER(euc_jis_2004) return 1; } else if (c >> 16 == EMPBASE >> 16) { - EMULATE_JISX0213_2000_ENCODE_EMP(code, c) + EMULATE_JISX0213_2000_ENCODE_EMP(codec->config, code, c) else if (TRYMAP_ENC(jisx0213_emp, code, c & 0xffff)) ; else @@ -271,7 +271,7 @@ DECODER(euc_jis_2004) c3 = INBYTE3 ^ 0x80; /* JIS X 0213 Plane 2 or JIS X 0212 (see NOTES) */ - EMULATE_JISX0213_2000_DECODE_PLANE2(writer, c2, c3) + EMULATE_JISX0213_2000_DECODE_PLANE2(codec->config, writer, c2, c3) else if (TRYMAP_DEC(jisx0213_2_bmp, decoded, c2, c3)) OUTCHAR(decoded); else if (TRYMAP_DEC(jisx0213_2_emp, code, c2, c3)) { @@ -293,7 +293,7 @@ DECODER(euc_jis_2004) c2 = INBYTE2 ^ 0x80; /* JIS X 0213 Plane 1 */ - EMULATE_JISX0213_2000_DECODE_PLANE1(writer, c, c2) + EMULATE_JISX0213_2000_DECODE_PLANE1(codec->config, writer, c, c2) else if (c == 0x21 && c2 == 0x40) OUTCHAR(0xff3c); else if (c == 0x22 && c2 == 0x32) @@ -582,7 +582,7 @@ ENCODER(shift_jis_2004) if (code == NOCHAR) { if (c <= 0xffff) { - EMULATE_JISX0213_2000_ENCODE_BMP(code, c) + EMULATE_JISX0213_2000_ENCODE_BMP(codec->config, code, c) else if (TRYMAP_ENC(jisx0213_bmp, code, c)) { if (code == MULTIC) { if (inlen - *inpos < 2) { @@ -625,7 +625,7 @@ ENCODER(shift_jis_2004) return 1; } else if (c >> 16 == EMPBASE >> 16) { - EMULATE_JISX0213_2000_ENCODE_EMP(code, c) + EMULATE_JISX0213_2000_ENCODE_EMP(codec->config, code, c) else if (TRYMAP_ENC(jisx0213_emp, code, c&0xffff)) ; else @@ -686,7 +686,7 @@ DECODER(shift_jis_2004) if (c1 < 0x5e) { /* Plane 1 */ c1 += 0x21; - EMULATE_JISX0213_2000_DECODE_PLANE1(writer, + EMULATE_JISX0213_2000_DECODE_PLANE1(codec->config, writer, c1, c2) else if (TRYMAP_DEC(jisx0208, decoded, c1, c2)) OUTCHAR(decoded); @@ -708,7 +708,7 @@ DECODER(shift_jis_2004) else c1 -= 0x3d; - EMULATE_JISX0213_2000_DECODE_PLANE2(writer, + EMULATE_JISX0213_2000_DECODE_PLANE2(codec->config, writer, c1, c2) else if (TRYMAP_DEC(jisx0213_2_bmp, decoded, c1, c2)) OUTCHAR(decoded); diff --git a/Modules/cjkcodecs/cjkcodecs.h b/Modules/cjkcodecs/cjkcodecs.h index 1b0355310eddab..e553ff3e17b898 100644 --- a/Modules/cjkcodecs/cjkcodecs.h +++ b/Modules/cjkcodecs/cjkcodecs.h @@ -60,11 +60,17 @@ struct pair_encodemap { DBCHAR code; }; -typedef struct { +#ifndef CJK_MOD_SPECIFIC_STATE +#define CJK_MOD_SPECIFIC_STATE +#endif + +typedef struct _cjk_mod_state { int num_mappings; int num_codecs; struct dbcs_map *mapping_list; MultibyteCodec *codec_list; + + CJK_MOD_SPECIFIC_STATE } cjkcodecs_module_state; static inline cjkcodecs_module_state * @@ -76,33 +82,33 @@ get_module_state(PyObject *mod) } #define CODEC_INIT(encoding) \ - static int encoding##_codec_init(const void *config) + static int encoding##_codec_init(const MultibyteCodec *codec) #define ENCODER_INIT(encoding) \ static int encoding##_encode_init( \ - MultibyteCodec_State *state, const void *config) + MultibyteCodec_State *state, const MultibyteCodec *codec) #define ENCODER(encoding) \ static Py_ssize_t encoding##_encode( \ - MultibyteCodec_State *state, const void *config, \ + MultibyteCodec_State *state, const MultibyteCodec *codec, \ int kind, const void *data, \ Py_ssize_t *inpos, Py_ssize_t inlen, \ unsigned char **outbuf, Py_ssize_t outleft, int flags) #define ENCODER_RESET(encoding) \ static Py_ssize_t encoding##_encode_reset( \ - MultibyteCodec_State *state, const void *config, \ + MultibyteCodec_State *state, const MultibyteCodec *codec, \ unsigned char **outbuf, Py_ssize_t outleft) #define DECODER_INIT(encoding) \ static int encoding##_decode_init( \ - MultibyteCodec_State *state, const void *config) + MultibyteCodec_State *state, const MultibyteCodec *codec) #define DECODER(encoding) \ static Py_ssize_t encoding##_decode( \ - MultibyteCodec_State *state, const void *config, \ + MultibyteCodec_State *state, const MultibyteCodec *codec, \ const unsigned char **inbuf, Py_ssize_t inleft, \ _PyUnicodeWriter *writer) #define DECODER_RESET(encoding) \ static Py_ssize_t encoding##_decode_reset( \ - MultibyteCodec_State *state, const void *config) + MultibyteCodec_State *state, const MultibyteCodec *codec) #define NEXT_IN(i) \ do { \ @@ -205,6 +211,9 @@ get_module_state(PyObject *mod) (m)->bottom]) != NOCHAR) #define TRYMAP_ENC(charset, assi, uni) \ _TRYMAP_ENC(&charset##_encmap[(uni) >> 8], assi, (uni) & 0xff) +#define TRYMAP_ENC_ST(charset, assi, uni) \ + _TRYMAP_ENC(&(codec->modstate->charset##_encmap)[(uni) >> 8], \ + assi, (uni) & 0xff) #define _TRYMAP_DEC(m, assi, val) \ ((m)->map != NULL && \ @@ -213,6 +222,8 @@ get_module_state(PyObject *mod) ((assi) = (m)->map[(val) - (m)->bottom]) != UNIINV) #define TRYMAP_DEC(charset, assi, c1, c2) \ _TRYMAP_DEC(&charset##_decmap[c1], assi, c2) +#define TRYMAP_DEC_ST(charset, assi, c1, c2) \ + _TRYMAP_DEC(&(codec->modstate->charset##_decmap)[c1], assi, c2) #define BEGIN_MAPPINGS_LIST(NUM) \ static int \ @@ -271,9 +282,12 @@ add_codecs(cjkcodecs_module_state *st) \ #define CODEC_STATELESS_WINIT(enc) \ NEXT_CODEC = (MultibyteCodec){#enc, NULL, enc##_codec_init, _STATELESS_METHODS(enc)}; -#define END_CODECS_LIST \ - assert(st->num_codecs == idx); \ - return 0; \ +#define END_CODECS_LIST \ + assert(st->num_codecs == idx); \ + for (int i = 0; i < st->num_codecs; i++) { \ + st->codec_list[i].modstate = st; \ + } \ + return 0; \ } diff --git a/Modules/cjkcodecs/emu_jisx0213_2000.h b/Modules/cjkcodecs/emu_jisx0213_2000.h index a5d5a7063d37e6..c30c948a2b1279 100644 --- a/Modules/cjkcodecs/emu_jisx0213_2000.h +++ b/Modules/cjkcodecs/emu_jisx0213_2000.h @@ -5,8 +5,8 @@ # define EMULATE_JISX0213_2000_ENCODE_INVALID 1 #endif -#define EMULATE_JISX0213_2000_ENCODE_BMP(assi, c) \ - if (config == (void *)2000 && ( \ +#define EMULATE_JISX0213_2000_ENCODE_BMP(config, assi, c) \ + if ((config) == (void *)2000 && ( \ (c) == 0x9B1C || (c) == 0x4FF1 || \ (c) == 0x525D || (c) == 0x541E || \ (c) == 0x5653 || (c) == 0x59F8 || \ @@ -14,12 +14,12 @@ (c) == 0x7626 || (c) == 0x7E6B)) { \ return EMULATE_JISX0213_2000_ENCODE_INVALID; \ } \ - else if (config == (void *)2000 && (c) == 0x9B1D) { \ + else if ((config) == (void *)2000 && (c) == 0x9B1D) { \ (assi) = 0x8000 | 0x7d3b; \ } -#define EMULATE_JISX0213_2000_ENCODE_EMP(assi, c) \ - if (config == (void *)2000 && (c) == 0x20B9F) { \ +#define EMULATE_JISX0213_2000_ENCODE_EMP(config, assi, c) \ + if ((config) == (void *)2000 && (c) == 0x20B9F) { \ return EMULATE_JISX0213_2000_ENCODE_INVALID; \ } @@ -27,8 +27,8 @@ # define EMULATE_JISX0213_2000_DECODE_INVALID 2 #endif -#define EMULATE_JISX0213_2000_DECODE_PLANE1(assi, c1, c2) \ - if (config == (void *)2000 && \ +#define EMULATE_JISX0213_2000_DECODE_PLANE1(config, assi, c1, c2) \ + if ((config) == (void *)2000 && \ (((c1) == 0x2E && (c2) == 0x21) || \ ((c1) == 0x2F && (c2) == 0x7E) || \ ((c1) == 0x4F && (c2) == 0x54) || \ @@ -42,13 +42,13 @@ return EMULATE_JISX0213_2000_DECODE_INVALID; \ } -#define EMULATE_JISX0213_2000_DECODE_PLANE2(writer, c1, c2) \ - if (config == (void *)2000 && (c1) == 0x7D && (c2) == 0x3B) { \ +#define EMULATE_JISX0213_2000_DECODE_PLANE2(config, writer, c1, c2) \ + if ((config) == (void *)2000 && (c1) == 0x7D && (c2) == 0x3B) { \ OUTCHAR(0x9B1D); \ } -#define EMULATE_JISX0213_2000_DECODE_PLANE2_CHAR(assi, c1, c2) \ - if (config == (void *)2000 && (c1) == 0x7D && (c2) == 0x3B) { \ +#define EMULATE_JISX0213_2000_DECODE_PLANE2_CHAR(config, assi, c1, c2) \ + if ((config) == (void *)2000 && (c1) == 0x7D && (c2) == 0x3B) { \ (assi) = 0x9B1D; \ } diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c index 8976ad331aaa2a..233fc3020fd6a8 100644 --- a/Modules/cjkcodecs/multibytecodec.c +++ b/Modules/cjkcodecs/multibytecodec.c @@ -272,7 +272,7 @@ multibytecodec_encerror(const MultibyteCodec *codec, for (;;) { Py_ssize_t outleft = (Py_ssize_t)(buf->outbuf_end - buf->outbuf); - r = codec->encode(state, codec->config, + r = codec->encode(state, codec, kind, data, &inpos, 1, &buf->outbuf, outleft, 0); if (r == MBERR_TOOSMALL) { @@ -521,7 +521,7 @@ multibytecodec_encode(const MultibyteCodec *codec, * error callbacks can relocate the cursor anywhere on buffer*/ Py_ssize_t outleft = (Py_ssize_t)(buf.outbuf_end - buf.outbuf); - r = codec->encode(state, codec->config, + r = codec->encode(state, codec, kind, data, &buf.inpos, buf.inlen, &buf.outbuf, outleft, flags); @@ -538,7 +538,7 @@ multibytecodec_encode(const MultibyteCodec *codec, Py_ssize_t outleft; outleft = (Py_ssize_t)(buf.outbuf_end - buf.outbuf); - r = codec->encreset(state, codec->config, &buf.outbuf, + r = codec->encreset(state, codec, &buf.outbuf, outleft); if (r == 0) break; @@ -616,7 +616,7 @@ _multibytecodec_MultibyteCodec_encode_impl(MultibyteCodecObject *self, } if (self->codec->encinit != NULL && - self->codec->encinit(&state, self->codec->config) != 0) + self->codec->encinit(&state, self->codec) != 0) goto errorexit; r = multibytecodec_encode(self->codec, &state, input, NULL, errorcb, @@ -680,7 +680,7 @@ _multibytecodec_MultibyteCodec_decode_impl(MultibyteCodecObject *self, buf.inbuf_end = buf.inbuf_top + datalen; if (self->codec->decinit != NULL && - self->codec->decinit(&state, self->codec->config) != 0) + self->codec->decinit(&state, self->codec) != 0) goto errorexit; while (buf.inbuf < buf.inbuf_end) { @@ -688,7 +688,7 @@ _multibytecodec_MultibyteCodec_decode_impl(MultibyteCodecObject *self, inleft = (Py_ssize_t)(buf.inbuf_end - buf.inbuf); - r = self->codec->decode(&state, self->codec->config, + r = self->codec->decode(&state, self->codec, &buf.inbuf, inleft, &buf.writer); if (r == 0) break; @@ -888,7 +888,7 @@ decoder_feed_buffer(MultibyteStatefulDecoderContext *ctx, inleft = (Py_ssize_t)(buf->inbuf_end - buf->inbuf); - r = ctx->codec->decode(&ctx->state, ctx->codec->config, + r = ctx->codec->decode(&ctx->state, ctx->codec, &buf->inbuf, inleft, &buf->writer); if (r == 0 || r == MBERR_TOOFEW) break; @@ -1015,7 +1015,7 @@ _multibytecodec_MultibyteIncrementalEncoder_reset_impl(MultibyteIncrementalEncod Py_ssize_t r; if (self->codec->encreset != NULL) { outbuf = buffer; - r = self->codec->encreset(&self->state, self->codec->config, + r = self->codec->encreset(&self->state, self->codec, &outbuf, sizeof(buffer)); if (r != 0) return NULL; @@ -1063,7 +1063,7 @@ mbiencoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (self->errors == NULL) goto errorexit; if (self->codec->encinit != NULL && - self->codec->encinit(&self->state, self->codec->config) != 0) + self->codec->encinit(&self->state, self->codec) != 0) goto errorexit; Py_DECREF(codec); @@ -1292,7 +1292,7 @@ _multibytecodec_MultibyteIncrementalDecoder_reset_impl(MultibyteIncrementalDecod /*[clinic end generated code: output=da423b1782c23ed1 input=3b63b3be85b2fb45]*/ { if (self->codec->decreset != NULL && - self->codec->decreset(&self->state, self->codec->config) != 0) + self->codec->decreset(&self->state, self->codec) != 0) return NULL; self->pendingsize = 0; @@ -1338,7 +1338,7 @@ mbidecoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (self->errors == NULL) goto errorexit; if (self->codec->decinit != NULL && - self->codec->decinit(&self->state, self->codec->config) != 0) + self->codec->decinit(&self->state, self->codec) != 0) goto errorexit; Py_DECREF(codec); @@ -1600,7 +1600,7 @@ _multibytecodec_MultibyteStreamReader_reset_impl(MultibyteStreamReaderObject *se /*[clinic end generated code: output=138490370a680abc input=5d4140db84b5e1e2]*/ { if (self->codec->decreset != NULL && - self->codec->decreset(&self->state, self->codec->config) != 0) + self->codec->decreset(&self->state, self->codec) != 0) return NULL; self->pendingsize = 0; @@ -1654,7 +1654,7 @@ mbstreamreader_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (self->errors == NULL) goto errorexit; if (self->codec->decinit != NULL && - self->codec->decinit(&self->state, self->codec->config) != 0) + self->codec->decinit(&self->state, self->codec) != 0) goto errorexit; Py_DECREF(codec); @@ -1877,7 +1877,7 @@ mbstreamwriter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (self->errors == NULL) goto errorexit; if (self->codec->encinit != NULL && - self->codec->encinit(&self->state, self->codec->config) != 0) + self->codec->encinit(&self->state, self->codec) != 0) goto errorexit; Py_DECREF(codec); @@ -1971,7 +1971,7 @@ _multibytecodec___create_codec(PyObject *module, PyObject *arg) codec_capsule *data = PyCapsule_GetPointer(arg, CODEC_CAPSULE); const MultibyteCodec *codec = data->codec; - if (codec->codecinit != NULL && codec->codecinit(codec->config) != 0) + if (codec->codecinit != NULL && codec->codecinit(codec) != 0) return NULL; module_state *state = get_module_state(module); diff --git a/Modules/cjkcodecs/multibytecodec.h b/Modules/cjkcodecs/multibytecodec.h index 327cb51129d945..f59362205d26fc 100644 --- a/Modules/cjkcodecs/multibytecodec.h +++ b/Modules/cjkcodecs/multibytecodec.h @@ -27,28 +27,31 @@ typedef struct { unsigned char c[8]; } MultibyteCodec_State; -typedef int (*mbcodec_init)(const void *config); +struct _cjk_mod_state; +struct _multibyte_codec; + +typedef int (*mbcodec_init)(const struct _multibyte_codec *codec); typedef Py_ssize_t (*mbencode_func)(MultibyteCodec_State *state, - const void *config, + const struct _multibyte_codec *codec, int kind, const void *data, Py_ssize_t *inpos, Py_ssize_t inlen, unsigned char **outbuf, Py_ssize_t outleft, int flags); typedef int (*mbencodeinit_func)(MultibyteCodec_State *state, - const void *config); + const struct _multibyte_codec *codec); typedef Py_ssize_t (*mbencodereset_func)(MultibyteCodec_State *state, - const void *config, + const struct _multibyte_codec *codec, unsigned char **outbuf, Py_ssize_t outleft); typedef Py_ssize_t (*mbdecode_func)(MultibyteCodec_State *state, - const void *config, + const struct _multibyte_codec *codec, const unsigned char **inbuf, Py_ssize_t inleft, _PyUnicodeWriter *writer); typedef int (*mbdecodeinit_func)(MultibyteCodec_State *state, - const void *config); + const struct _multibyte_codec *codec); typedef Py_ssize_t (*mbdecodereset_func)(MultibyteCodec_State *state, - const void *config); + const struct _multibyte_codec *codec); -typedef struct { +typedef struct _multibyte_codec { const char *encoding; const void *config; mbcodec_init codecinit; @@ -58,6 +61,7 @@ typedef struct { mbdecode_func decode; mbdecodeinit_func decinit; mbdecodereset_func decreset; + struct _cjk_mod_state *modstate; } MultibyteCodec; typedef struct { diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c index eddc1a33a953e6..4a2381d9611776 100644 --- a/Modules/mathmodule.c +++ b/Modules/mathmodule.c @@ -2096,7 +2096,7 @@ math_trunc(PyObject *module, PyObject *x) return PyFloat_Type.tp_as_number->nb_int(x); } - if (Py_TYPE(x)->tp_dict == NULL) { + if (_PyType_IsReady(Py_TYPE(x))) { if (PyType_Ready(Py_TYPE(x)) < 0) return NULL; } @@ -2314,7 +2314,7 @@ math_log(PyObject *module, PyObject * const *args, Py_ssize_t nargs) PyDoc_STRVAR(math_log_doc, "log(x, [base=math.e])\n\ Return the logarithm of x to the given base.\n\n\ -If the base not specified, returns the natural logarithm (base e) of x."); +If the base is not specified, returns the natural logarithm (base e) of x."); /*[clinic input] math.log2 diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index dd150107e4a9de..dcb5e7a0e0408c 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -4789,6 +4789,8 @@ os__path_isdir_impl(PyObject *module, PyObject *path) FILE_BASIC_INFO info; path_t _path = PATH_T_INITIALIZE("isdir", "path", 0, 1); int result; + BOOL slow_path = TRUE; + FILE_STAT_BASIC_INFORMATION statInfo; if (!path_converter(path, &_path)) { path_cleanup(&_path); @@ -4800,43 +4802,60 @@ os__path_isdir_impl(PyObject *module, PyObject *path) } Py_BEGIN_ALLOW_THREADS - if (_path.fd != -1) { - hfile = _Py_get_osfhandle_noraise(_path.fd); - close_file = FALSE; - } - else { - hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, - OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); + if (_path.wide) { + if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, + &statInfo, sizeof(statInfo))) { + if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) { + slow_path = FALSE; + result = statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY; + } else if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY)) { + slow_path = FALSE; + result = 0; + } + } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) { + slow_path = FALSE; + result = 0; + } } - if (hfile != INVALID_HANDLE_VALUE) { - if (GetFileInformationByHandleEx(hfile, FileBasicInfo, &info, - sizeof(info))) - { - result = info.FileAttributes & FILE_ATTRIBUTE_DIRECTORY; + if (slow_path) { + if (_path.fd != -1) { + hfile = _Py_get_osfhandle_noraise(_path.fd); + close_file = FALSE; } else { - result = 0; + hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, + OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); } - if (close_file) { - CloseHandle(hfile); - } - } - else { - STRUCT_STAT st; - switch (GetLastError()) { - case ERROR_ACCESS_DENIED: - case ERROR_SHARING_VIOLATION: - case ERROR_CANT_ACCESS_FILE: - case ERROR_INVALID_PARAMETER: - if (STAT(_path.wide, &st)) { - result = 0; + if (hfile != INVALID_HANDLE_VALUE) { + if (GetFileInformationByHandleEx(hfile, FileBasicInfo, &info, + sizeof(info))) + { + result = info.FileAttributes & FILE_ATTRIBUTE_DIRECTORY; } else { - result = S_ISDIR(st.st_mode); + result = 0; + } + if (close_file) { + CloseHandle(hfile); + } + } + else { + STRUCT_STAT st; + switch (GetLastError()) { + case ERROR_ACCESS_DENIED: + case ERROR_SHARING_VIOLATION: + case ERROR_CANT_ACCESS_FILE: + case ERROR_INVALID_PARAMETER: + if (STAT(_path.wide, &st)) { + result = 0; + } + else { + result = S_ISDIR(st.st_mode); + } + break; + default: + result = 0; } - break; - default: - result = 0; } } Py_END_ALLOW_THREADS @@ -4867,6 +4886,8 @@ os__path_isfile_impl(PyObject *module, PyObject *path) FILE_BASIC_INFO info; path_t _path = PATH_T_INITIALIZE("isfile", "path", 0, 1); int result; + BOOL slow_path = TRUE; + FILE_STAT_BASIC_INFORMATION statInfo; if (!path_converter(path, &_path)) { path_cleanup(&_path); @@ -4878,43 +4899,60 @@ os__path_isfile_impl(PyObject *module, PyObject *path) } Py_BEGIN_ALLOW_THREADS - if (_path.fd != -1) { - hfile = _Py_get_osfhandle_noraise(_path.fd); - close_file = FALSE; - } - else { - hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, - OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); + if (_path.wide) { + if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, + &statInfo, sizeof(statInfo))) { + if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) { + slow_path = FALSE; + result = !(statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY); + } else if (statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY) { + slow_path = FALSE; + result = 0; + } + } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) { + slow_path = FALSE; + result = 0; + } } - if (hfile != INVALID_HANDLE_VALUE) { - if (GetFileInformationByHandleEx(hfile, FileBasicInfo, &info, - sizeof(info))) - { - result = !(info.FileAttributes & FILE_ATTRIBUTE_DIRECTORY); + if (slow_path) { + if (_path.fd != -1) { + hfile = _Py_get_osfhandle_noraise(_path.fd); + close_file = FALSE; } else { - result = 0; - } - if (close_file) { - CloseHandle(hfile); + hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, + OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); } - } - else { - STRUCT_STAT st; - switch (GetLastError()) { - case ERROR_ACCESS_DENIED: - case ERROR_SHARING_VIOLATION: - case ERROR_CANT_ACCESS_FILE: - case ERROR_INVALID_PARAMETER: - if (STAT(_path.wide, &st)) { - result = 0; + if (hfile != INVALID_HANDLE_VALUE) { + if (GetFileInformationByHandleEx(hfile, FileBasicInfo, &info, + sizeof(info))) + { + result = !(info.FileAttributes & FILE_ATTRIBUTE_DIRECTORY); } else { - result = S_ISREG(st.st_mode); + result = 0; + } + if (close_file) { + CloseHandle(hfile); + } + } + else { + STRUCT_STAT st; + switch (GetLastError()) { + case ERROR_ACCESS_DENIED: + case ERROR_SHARING_VIOLATION: + case ERROR_CANT_ACCESS_FILE: + case ERROR_INVALID_PARAMETER: + if (STAT(_path.wide, &st)) { + result = 0; + } + else { + result = S_ISREG(st.st_mode); + } + break; + default: + result = 0; } - break; - default: - result = 0; } } Py_END_ALLOW_THREADS @@ -4944,6 +4982,8 @@ os__path_exists_impl(PyObject *module, PyObject *path) BOOL close_file = TRUE; path_t _path = PATH_T_INITIALIZE("exists", "path", 0, 1); int result; + BOOL slow_path = TRUE; + FILE_STAT_BASIC_INFORMATION statInfo; if (!path_converter(path, &_path)) { path_cleanup(&_path); @@ -4955,36 +4995,50 @@ os__path_exists_impl(PyObject *module, PyObject *path) } Py_BEGIN_ALLOW_THREADS - if (_path.fd != -1) { - hfile = _Py_get_osfhandle_noraise(_path.fd); - close_file = FALSE; - } - else { - hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, - OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); - } - if (hfile != INVALID_HANDLE_VALUE) { - result = 1; - if (close_file) { - CloseHandle(hfile); + if (_path.wide) { + if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, + &statInfo, sizeof(statInfo))) { + if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) { + slow_path = FALSE; + result = 1; + } + } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) { + slow_path = FALSE; + result = 0; } } - else { - STRUCT_STAT st; - switch (GetLastError()) { - case ERROR_ACCESS_DENIED: - case ERROR_SHARING_VIOLATION: - case ERROR_CANT_ACCESS_FILE: - case ERROR_INVALID_PARAMETER: - if (STAT(_path.wide, &st)) { - result = 0; + if (slow_path) { + if (_path.fd != -1) { + hfile = _Py_get_osfhandle_noraise(_path.fd); + close_file = FALSE; + } + else { + hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, + OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); + } + if (hfile != INVALID_HANDLE_VALUE) { + result = 1; + if (close_file) { + CloseHandle(hfile); } - else { - result = 1; + } + else { + STRUCT_STAT st; + switch (GetLastError()) { + case ERROR_ACCESS_DENIED: + case ERROR_SHARING_VIOLATION: + case ERROR_CANT_ACCESS_FILE: + case ERROR_INVALID_PARAMETER: + if (STAT(_path.wide, &st)) { + result = 0; + } + else { + result = 1; + } + break; + default: + result = 0; } - break; - default: - result = 0; } } Py_END_ALLOW_THREADS @@ -5015,6 +5069,8 @@ os__path_islink_impl(PyObject *module, PyObject *path) FILE_ATTRIBUTE_TAG_INFO info; path_t _path = PATH_T_INITIALIZE("islink", "path", 0, 1); int result; + BOOL slow_path = TRUE; + FILE_STAT_BASIC_INFORMATION statInfo; if (!path_converter(path, &_path)) { path_cleanup(&_path); @@ -5026,45 +5082,62 @@ os__path_islink_impl(PyObject *module, PyObject *path) } Py_BEGIN_ALLOW_THREADS - if (_path.fd != -1) { - hfile = _Py_get_osfhandle_noraise(_path.fd); - close_file = FALSE; + if (_path.wide) { + if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, + &statInfo, sizeof(statInfo))) { + slow_path = FALSE; + if (statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT) { + result = (statInfo.ReparseTag == IO_REPARSE_TAG_SYMLINK); + } + else { + result = 0; + } + } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) { + slow_path = FALSE; + result = 0; + } } - else { - hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, - OPEN_EXISTING, - FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS, - NULL); - } - if (hfile != INVALID_HANDLE_VALUE) { - if (GetFileInformationByHandleEx(hfile, FileAttributeTagInfo, &info, - sizeof(info))) - { - result = (info.ReparseTag == IO_REPARSE_TAG_SYMLINK); + if (slow_path) { + if (_path.fd != -1) { + hfile = _Py_get_osfhandle_noraise(_path.fd); + close_file = FALSE; } else { - result = 0; + hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, + OPEN_EXISTING, + FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS, + NULL); } - if (close_file) { - CloseHandle(hfile); - } - } - else { - STRUCT_STAT st; - switch (GetLastError()) { - case ERROR_ACCESS_DENIED: - case ERROR_SHARING_VIOLATION: - case ERROR_CANT_ACCESS_FILE: - case ERROR_INVALID_PARAMETER: - if (LSTAT(_path.wide, &st)) { - result = 0; + if (hfile != INVALID_HANDLE_VALUE) { + if (GetFileInformationByHandleEx(hfile, FileAttributeTagInfo, &info, + sizeof(info))) + { + result = (info.ReparseTag == IO_REPARSE_TAG_SYMLINK); } else { - result = S_ISLNK(st.st_mode); + result = 0; + } + if (close_file) { + CloseHandle(hfile); + } + } + else { + STRUCT_STAT st; + switch (GetLastError()) { + case ERROR_ACCESS_DENIED: + case ERROR_SHARING_VIOLATION: + case ERROR_CANT_ACCESS_FILE: + case ERROR_INVALID_PARAMETER: + if (LSTAT(_path.wide, &st)) { + result = 0; + } + else { + result = S_ISLNK(st.st_mode); + } + break; + default: + result = 0; } - break; - default: - result = 0; } } Py_END_ALLOW_THREADS diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index 656cd546d46d31..f11d4b1a6e0591 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -8418,6 +8418,18 @@ socket_exec(PyObject *m) #ifdef IP_BIND_ADDRESS_NO_PORT ADD_INT_MACRO(m, IP_BIND_ADDRESS_NO_PORT); #endif +#ifdef IP_UNBLOCK_SOURCE + ADD_INT_MACRO(m, IP_UNBLOCK_SOURCE); +#endif +#ifdef IP_BLOCK_SOURCE + ADD_INT_MACRO(m, IP_BLOCK_SOURCE); +#endif +#ifdef IP_ADD_SOURCE_MEMBERSHIP + ADD_INT_MACRO(m, IP_ADD_SOURCE_MEMBERSHIP); +#endif +#ifdef IP_DROP_SOURCE_MEMBERSHIP + ADD_INT_MACRO(m, IP_DROP_SOURCE_MEMBERSHIP); +#endif /* IPv6 [gs]etsockopt options, defined in RFC2553 */ #ifdef IPV6_JOIN_GROUP diff --git a/Modules/symtablemodule.c b/Modules/symtablemodule.c index 4ef1d8cde07db6..91538b4fb15cbd 100644 --- a/Modules/symtablemodule.c +++ b/Modules/symtablemodule.c @@ -66,12 +66,6 @@ static PyMethodDef symtable_methods[] = { {NULL, NULL} /* sentinel */ }; -static int -symtable_init_stentry_type(PyObject *m) -{ - return PyType_Ready(&PySTEntry_Type); -} - static int symtable_init_constants(PyObject *m) { @@ -105,7 +99,6 @@ symtable_init_constants(PyObject *m) } static PyModuleDef_Slot symtable_slots[] = { - {Py_mod_exec, symtable_init_stentry_type}, {Py_mod_exec, symtable_init_constants}, {0, NULL} }; diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index 2d8dab6f378006..27b2ad4f2cb38f 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -3090,25 +3090,6 @@ _Py_COMP_DIAG_POP } -PyStatus -_PyBytes_InitTypes(PyInterpreterState *interp) -{ - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - - if (PyType_Ready(&PyBytes_Type) < 0) { - return _PyStatus_ERR("Can't initialize bytes type"); - } - - if (PyType_Ready(&PyBytesIter_Type) < 0) { - return _PyStatus_ERR("Can't initialize bytes iterator type"); - } - - return _PyStatus_OK(); -} - - /*********************** Bytes Iterator ****************************/ typedef struct { diff --git a/Objects/call.c b/Objects/call.c index bd027e41f8a9a5..cf6e357a990441 100644 --- a/Objects/call.c +++ b/Objects/call.c @@ -8,16 +8,6 @@ #include "pycore_tuple.h" // _PyTuple_ITEMS() -static PyObject *const * -_PyStack_UnpackDict(PyThreadState *tstate, - PyObject *const *args, Py_ssize_t nargs, - PyObject *kwargs, PyObject **p_kwnames); - -static void -_PyStack_UnpackDict_Free(PyObject *const *stack, Py_ssize_t nargs, - PyObject *kwnames); - - static PyObject * null_error(PyThreadState *tstate) { @@ -965,7 +955,7 @@ _PyStack_AsDict(PyObject *const *values, PyObject *kwnames) The newly allocated argument vector supports PY_VECTORCALL_ARGUMENTS_OFFSET. When done, you must call _PyStack_UnpackDict_Free(stack, nargs, kwnames) */ -static PyObject *const * +PyObject *const * _PyStack_UnpackDict(PyThreadState *tstate, PyObject *const *args, Py_ssize_t nargs, PyObject *kwargs, PyObject **p_kwnames) @@ -1034,7 +1024,7 @@ _PyStack_UnpackDict(PyThreadState *tstate, return stack; } -static void +void _PyStack_UnpackDict_Free(PyObject *const *stack, Py_ssize_t nargs, PyObject *kwnames) { @@ -1042,6 +1032,12 @@ _PyStack_UnpackDict_Free(PyObject *const *stack, Py_ssize_t nargs, for (Py_ssize_t i = 0; i < n; i++) { Py_DECREF(stack[i]); } + _PyStack_UnpackDict_FreeNoDecRef(stack, kwnames); +} + +void +_PyStack_UnpackDict_FreeNoDecRef(PyObject *const *stack, PyObject *kwnames) +{ PyMem_Free((PyObject **)stack - 1); Py_DECREF(kwnames); } diff --git a/Objects/classobject.c b/Objects/classobject.c index 2cb192e725d40d..71c4a4e5d0f8ab 100644 --- a/Objects/classobject.c +++ b/Objects/classobject.c @@ -181,7 +181,7 @@ method_getattro(PyObject *obj, PyObject *name) PyObject *descr = NULL; { - if (tp->tp_dict == NULL) { + if (!_PyType_IsReady(tp)) { if (PyType_Ready(tp) < 0) return NULL; } @@ -395,7 +395,7 @@ instancemethod_getattro(PyObject *self, PyObject *name) PyTypeObject *tp = Py_TYPE(self); PyObject *descr = NULL; - if (tp->tp_dict == NULL) { + if (!_PyType_IsReady(tp)) { if (PyType_Ready(tp) < 0) return NULL; } diff --git a/Objects/exceptions.c b/Objects/exceptions.c index a355244cf997e6..6c9dfbd9b415cf 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -1421,7 +1421,12 @@ _PyExc_PrepReraiseStar(PyObject *orig, PyObject *excs) if (res < 0) { goto done; } - result = _PyExc_CreateExceptionGroup("", raised_list); + if (PyList_GET_SIZE(raised_list) > 1) { + result = _PyExc_CreateExceptionGroup("", raised_list); + } + else { + result = Py_NewRef(PyList_GetItem(raised_list, 0)); + } if (result == NULL) { goto done; } @@ -3591,10 +3596,6 @@ static struct static_exception static_exceptions[] = { int _PyExc_InitTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return 0; - } - for (size_t i=0; i < Py_ARRAY_LENGTH(static_exceptions); i++) { PyTypeObject *exc = static_exceptions[i].exc; if (_PyStaticType_InitBuiltin(exc) < 0) { diff --git a/Objects/floatobject.c b/Objects/floatobject.c index d641311f1126cd..9c2315781bed36 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -1990,20 +1990,10 @@ _PyFloat_InitState(PyInterpreterState *interp) PyStatus _PyFloat_InitTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - - if (PyType_Ready(&PyFloat_Type) < 0) { - return _PyStatus_ERR("Can't initialize float type"); - } - /* Init float info */ - if (FloatInfoType.tp_name == NULL) { - if (_PyStructSequence_InitBuiltin(&FloatInfoType, - &floatinfo_desc) < 0) { - return _PyStatus_ERR("can't init float info type"); - } + if (_PyStructSequence_InitBuiltin(&FloatInfoType, + &floatinfo_desc) < 0) { + return _PyStatus_ERR("can't init float info type"); } return _PyStatus_OK(); diff --git a/Objects/longobject.c b/Objects/longobject.c index d98bbbb6d6ff46..f84809b8a8986a 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -6351,19 +6351,9 @@ PyLong_GetInfo(void) PyStatus _PyLong_InitTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - - if (PyType_Ready(&PyLong_Type) < 0) { - return _PyStatus_ERR("Can't initialize int type"); - } - /* initialize int_info */ - if (Int_InfoType.tp_name == NULL) { - if (_PyStructSequence_InitBuiltin(&Int_InfoType, &int_info_desc) < 0) { - return _PyStatus_ERR("can't init int info type"); - } + if (_PyStructSequence_InitBuiltin(&Int_InfoType, &int_info_desc) < 0) { + return _PyStatus_ERR("can't init int info type"); } return _PyStatus_OK(); diff --git a/Objects/object.c b/Objects/object.c index 65c296e9340601..4ce10cf1192d3f 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -890,7 +890,7 @@ PyObject_Hash(PyObject *v) * an explicit call to PyType_Ready, we implicitly call * PyType_Ready here and then check the tp_hash slot again */ - if (tp->tp_dict == NULL) { + if (!_PyType_IsReady(tp)) { if (PyType_Ready(tp) < 0) return -1; if (tp->tp_hash != NULL) @@ -1385,7 +1385,7 @@ _PyObject_GenericGetAttrWithDict(PyObject *obj, PyObject *name, } Py_INCREF(name); - if (tp->tp_dict == NULL) { + if (!_PyType_IsReady(tp)) { if (PyType_Ready(tp) < 0) goto done; } @@ -1507,8 +1507,9 @@ _PyObject_GenericSetAttrWithDict(PyObject *obj, PyObject *name, return -1; } - if (tp->tp_dict == NULL && PyType_Ready(tp) < 0) + if (!_PyType_IsReady(tp) && PyType_Ready(tp) < 0) { return -1; + } Py_INCREF(name); Py_INCREF(tp); @@ -2101,10 +2102,6 @@ static PyTypeObject* static_types[] = { PyStatus _PyTypes_InitTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - // All other static types (unless initialized elsewhere) for (size_t i=0; i < Py_ARRAY_LENGTH(static_types); i++) { PyTypeObject *type = static_types[i]; diff --git a/Objects/structseq.c b/Objects/structseq.c index 2a5343815866d3..88a71bc52958f5 100644 --- a/Objects/structseq.c +++ b/Objects/structseq.c @@ -31,6 +31,7 @@ get_type_attr_as_size(PyTypeObject *tp, PyObject *name) PyErr_Format(PyExc_TypeError, "Missed attribute '%U' of type %s", name, tp->tp_name); + return -1; } return PyLong_AsSsize_t(v); } @@ -509,6 +510,13 @@ _PyStructSequence_InitBuiltinWithFlags(PyTypeObject *type, PyStructSequence_Desc *desc, unsigned long tp_flags) { + if (type->tp_flags & Py_TPFLAGS_READY) { + if (_PyStaticType_InitBuiltin(type) < 0) { + goto failed_init_builtin; + } + return 0; + } + PyMemberDef *members; Py_ssize_t n_members, n_unnamed_members; @@ -517,18 +525,25 @@ _PyStructSequence_InitBuiltinWithFlags(PyTypeObject *type, return -1; } initialize_static_fields(type, desc, members, tp_flags); + + Py_INCREF(type); // XXX It should be immortal. if (_PyStaticType_InitBuiltin(type) < 0) { PyMem_Free(members); - PyErr_Format(PyExc_RuntimeError, - "Can't initialize builtin type %s", - desc->name); - return -1; + goto failed_init_builtin; } - if (initialize_static_type(type, desc, n_members, n_unnamed_members) < 0) { + + if (initialize_structseq_dict( + desc, type->tp_dict, n_members, n_unnamed_members) < 0) { PyMem_Free(members); return -1; } return 0; + +failed_init_builtin: + PyErr_Format(PyExc_RuntimeError, + "Can't initialize builtin type %s", + desc->name); + return -1; } int diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 61fab4078d66ba..991edcc86677de 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -960,24 +960,6 @@ _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize) } -PyStatus -_PyTuple_InitTypes(PyInterpreterState *interp) -{ - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - - if (PyType_Ready(&PyTuple_Type) < 0) { - return _PyStatus_ERR("Can't initialize tuple type"); - } - - if (PyType_Ready(&PyTupleIter_Type) < 0) { - return _PyStatus_ERR("Can't initialize tuple iterator type"); - } - - return _PyStatus_OK(); -} - static void maybe_freelist_clear(PyInterpreterState *, int); void diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 07c900932b4c24..e807cc90faa16a 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -6706,7 +6706,6 @@ type_ready_mro(PyTypeObject *type) and static builtin types must have static builtin bases. */ if (!(type->tp_flags & Py_TPFLAGS_HEAPTYPE)) { assert(type->tp_flags & Py_TPFLAGS_IMMUTABLETYPE); - int isbuiltin = type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN; PyObject *mro = type->tp_mro; Py_ssize_t n = PyTuple_GET_SIZE(mro); for (Py_ssize_t i = 0; i < n; i++) { @@ -6718,7 +6717,8 @@ type_ready_mro(PyTypeObject *type) type->tp_name, base->tp_name); return -1; } - assert(!isbuiltin || (base->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); + assert(!(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) || + (base->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); } } return 0; @@ -6948,8 +6948,12 @@ type_ready_post_checks(PyTypeObject *type) static int type_ready(PyTypeObject *type) { + _PyObject_ASSERT((PyObject *)type, + (type->tp_flags & Py_TPFLAGS_READYING) == 0); + type->tp_flags |= Py_TPFLAGS_READYING; + if (type_ready_pre_checks(type) < 0) { - return -1; + goto error; } #ifdef Py_TRACE_REFS @@ -6963,41 +6967,49 @@ type_ready(PyTypeObject *type) /* Initialize tp_dict: _PyType_IsReady() tests if tp_dict != NULL */ if (type_ready_set_dict(type) < 0) { - return -1; + goto error; } if (type_ready_set_bases(type) < 0) { - return -1; + goto error; } if (type_ready_mro(type) < 0) { - return -1; + goto error; } if (type_ready_set_new(type) < 0) { - return -1; + goto error; } if (type_ready_fill_dict(type) < 0) { - return -1; + goto error; } if (type_ready_inherit(type) < 0) { - return -1; + goto error; } if (type_ready_preheader(type) < 0) { - return -1; + goto error; } if (type_ready_set_hash(type) < 0) { - return -1; + goto error; } if (type_ready_add_subclasses(type) < 0) { - return -1; + goto error; } if (type_ready_managed_dict(type) < 0) { - return -1; + goto error; } if (type_ready_post_checks(type) < 0) { - return -1; + goto error; } + + /* All done -- set the ready flag */ + type->tp_flags = (type->tp_flags & ~Py_TPFLAGS_READYING) | Py_TPFLAGS_READY; + + assert(_PyType_CheckConsistency(type)); return 0; -} +error: + type->tp_flags &= ~Py_TPFLAGS_READYING; + return -1; +} int PyType_Ready(PyTypeObject *type) @@ -7006,31 +7018,29 @@ PyType_Ready(PyTypeObject *type) assert(_PyType_CheckConsistency(type)); return 0; } - _PyObject_ASSERT((PyObject *)type, - (type->tp_flags & Py_TPFLAGS_READYING) == 0); - - type->tp_flags |= Py_TPFLAGS_READYING; + assert(!(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); /* Historically, all static types were immutable. See bpo-43908 */ if (!(type->tp_flags & Py_TPFLAGS_HEAPTYPE)) { type->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; } - if (type_ready(type) < 0) { - type->tp_flags &= ~Py_TPFLAGS_READYING; - return -1; - } - - /* All done -- set the ready flag */ - type->tp_flags = (type->tp_flags & ~Py_TPFLAGS_READYING) | Py_TPFLAGS_READY; - assert(_PyType_CheckConsistency(type)); - return 0; + return type_ready(type); } int _PyStaticType_InitBuiltin(PyTypeObject *self) { + assert(!(self->tp_flags & Py_TPFLAGS_HEAPTYPE)); + + if (self->tp_flags & Py_TPFLAGS_READY) { + assert(self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN); + assert(_PyType_CheckConsistency(self)); + return 0; + } + self->tp_flags |= _Py_TPFLAGS_STATIC_BUILTIN; + self->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; assert(NEXT_GLOBAL_VERSION_TAG <= _Py_MAX_GLOBAL_TYPE_VERSION_TAG); self->tp_version_tag = NEXT_GLOBAL_VERSION_TAG++; @@ -7038,7 +7048,7 @@ _PyStaticType_InitBuiltin(PyTypeObject *self) static_builtin_state_init(self); - int res = PyType_Ready(self); + int res = type_ready(self); if (res < 0) { static_builtin_state_clear(self); } @@ -8296,17 +8306,23 @@ _Py_slot_tp_getattr_hook(PyObject *self, PyObject *name) if (getattribute == NULL || (Py_IS_TYPE(getattribute, &PyWrapperDescr_Type) && ((PyWrapperDescrObject *)getattribute)->d_wrapped == - (void *)PyObject_GenericGetAttr)) - res = PyObject_GenericGetAttr(self, name); - else { + (void *)PyObject_GenericGetAttr)) { + res = _PyObject_GenericGetAttrWithDict(self, name, NULL, 1); + /* if res == NULL with no exception set, then it must be an + AttributeError suppressed by us. */ + if (res == NULL && !PyErr_Occurred()) { + res = call_attribute(self, getattr, name); + } + } else { Py_INCREF(getattribute); res = call_attribute(self, getattribute, name); Py_DECREF(getattribute); + if (res == NULL && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + res = call_attribute(self, getattr, name); + } } - if (res == NULL && PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - res = call_attribute(self, getattr, name); - } + Py_DECREF(getattr); return res; } @@ -9380,22 +9396,19 @@ super_repr(PyObject *self) su->type ? su->type->tp_name : "NULL"); } -// if `method` is non-NULL, we are looking for a method descriptor, -// and setting `*method` to 1 means we found one. +/* Do a super lookup without executing descriptors or falling back to getattr +on the super object itself. + +May return NULL with or without an exception set, like PyDict_GetItemWithError. */ static PyObject * -do_super_lookup(superobject *su, PyTypeObject *su_type, PyObject *su_obj, - PyTypeObject *su_obj_type, PyObject *name, int *method) +_super_lookup_descr(PyTypeObject *su_type, PyTypeObject *su_obj_type, PyObject *name) { PyObject *mro, *res; Py_ssize_t i, n; - int temp_su = 0; - - if (su_obj_type == NULL) - goto skip; mro = su_obj_type->tp_mro; if (mro == NULL) - goto skip; + return NULL; assert(PyTuple_Check(mro)); n = PyTuple_GET_SIZE(mro); @@ -9407,7 +9420,7 @@ do_super_lookup(superobject *su, PyTypeObject *su_type, PyObject *su_obj, } i++; /* skip su->type (if any) */ if (i >= n) - goto skip; + return NULL; /* keep a strong reference to mro because su_obj_type->tp_mro can be replaced during PyDict_GetItemWithError(dict, name) */ @@ -9420,22 +9433,6 @@ do_super_lookup(superobject *su, PyTypeObject *su_type, PyObject *su_obj, res = PyDict_GetItemWithError(dict, name); if (res != NULL) { Py_INCREF(res); - if (method && _PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)) { - *method = 1; - } - else { - descrgetfunc f = Py_TYPE(res)->tp_descr_get; - if (f != NULL) { - PyObject *res2; - res2 = f(res, - /* Only pass 'obj' param if this is instance-mode super - (See SF ID #743627) */ - (su_obj == (PyObject *)su_obj_type) ? NULL : su_obj, - (PyObject *)su_obj_type); - Py_SETREF(res, res2); - } - } - Py_DECREF(mro); return res; } @@ -9447,6 +9444,45 @@ do_super_lookup(superobject *su, PyTypeObject *su_type, PyObject *su_obj, i++; } while (i < n); Py_DECREF(mro); + return NULL; +} + +// if `method` is non-NULL, we are looking for a method descriptor, +// and setting `*method = 1` means we found one. +static PyObject * +do_super_lookup(superobject *su, PyTypeObject *su_type, PyObject *su_obj, + PyTypeObject *su_obj_type, PyObject *name, int *method) +{ + PyObject *res; + int temp_su = 0; + + if (su_obj_type == NULL) { + goto skip; + } + + res = _super_lookup_descr(su_type, su_obj_type, name); + if (res != NULL) { + if (method && _PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)) { + *method = 1; + } + else { + descrgetfunc f = Py_TYPE(res)->tp_descr_get; + if (f != NULL) { + PyObject *res2; + res2 = f(res, + /* Only pass 'obj' param if this is instance-mode super + (See SF ID #743627) */ + (su_obj == (PyObject *)su_obj_type) ? NULL : su_obj, + (PyObject *)su_obj_type); + Py_SETREF(res, res2); + } + } + + return res; + } + else if (PyErr_Occurred()) { + return NULL; + } skip: if (su == NULL) { @@ -9544,6 +9580,18 @@ _PySuper_Lookup(PyTypeObject *su_type, PyObject *su_obj, PyObject *name, int *me return res; } +PyObject * +_PySuper_LookupDescr(PyTypeObject *su_type, PyObject *su_obj, PyObject *name) +{ + PyTypeObject *su_obj_type = supercheck(su_type, su_obj); + if (su_obj_type == NULL) { + return NULL; + } + PyObject *res = _super_lookup_descr(su_type, su_obj_type, name); + Py_DECREF(su_obj_type); + return res; +} + static PyObject * super_descr_get(PyObject *self, PyObject *obj, PyObject *type) { diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index fd056e38f3f86b..7537c12e92680c 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -14573,10 +14573,6 @@ _PyUnicode_InitGlobalObjects(PyInterpreterState *interp) PyStatus _PyUnicode_InitTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - if (_PyStaticType_InitBuiltin(&EncodingMapType) < 0) { goto error; } diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c index 55c0f6fdd620f4..0aaaed64c4037c 100644 --- a/Parser/action_helpers.c +++ b/Parser/action_helpers.c @@ -965,17 +965,43 @@ _PyPegen_check_legacy_stmt(Parser *p, expr_ty name) { return 0; } -expr_ty -_PyPegen_check_fstring_conversion(Parser *p, Token* symbol, expr_ty conv) { - if (symbol->lineno != conv->lineno || symbol->end_col_offset != conv->col_offset) { +static ResultTokenWithMetadata * +result_token_with_metadata(Parser *p, void *result, PyObject *metadata) +{ + ResultTokenWithMetadata *res = _PyArena_Malloc(p->arena, sizeof(ResultTokenWithMetadata)); + if (res == NULL) { + return NULL; + } + res->metadata = metadata; + res->result = result; + return res; +} + +ResultTokenWithMetadata * +_PyPegen_check_fstring_conversion(Parser *p, Token* conv_token, expr_ty conv) +{ + if (conv_token->lineno != conv->lineno || conv_token->end_col_offset != conv->col_offset) { return RAISE_SYNTAX_ERROR_KNOWN_RANGE( - symbol, conv, + conv_token, conv, "f-string: conversion type must come right after the exclamanation mark" ); } - return conv; + return result_token_with_metadata(p, conv, conv_token->metadata); } +ResultTokenWithMetadata * +_PyPegen_setup_full_format_spec(Parser *p, Token *colon, asdl_expr_seq *spec, int lineno, int col_offset, + int end_lineno, int end_col_offset, PyArena *arena) +{ + if (!spec) { + return NULL; + } + expr_ty res = _PyAST_JoinedStr(spec, lineno, col_offset, end_lineno, end_col_offset, p->arena); + if (!res) { + return NULL; + } + return result_token_with_metadata(p, res, colon->metadata); +} const char * _PyPegen_get_expr_name(expr_ty e) @@ -1197,27 +1223,6 @@ _PyPegen_nonparen_genexp_in_call(Parser *p, expr_ty args, asdl_comprehension_seq // Fstring stuff -static expr_ty -decode_fstring_buffer(Parser *p, int lineno, int col_offset, int end_lineno, - int end_col_offset) -{ - tokenizer_mode *tok_mode = &(p->tok->tok_mode_stack[p->tok->tok_mode_stack_index]); - assert(tok_mode->last_expr_buffer != NULL); - assert(tok_mode->last_expr_size >= 0 && tok_mode->last_expr_end >= 0); - - PyObject *res = PyUnicode_DecodeUTF8( - tok_mode->last_expr_buffer, - tok_mode->last_expr_size - tok_mode->last_expr_end, - NULL - ); - if (!res || _PyArena_AddPyObject(p->arena, res) < 0) { - Py_XDECREF(res); - return NULL; - } - - return _PyAST_Constant(res, NULL, lineno, col_offset, end_lineno, end_col_offset, p->arena); -} - static expr_ty _PyPegen_decode_fstring_part(Parser* p, int is_raw, expr_ty constant) { assert(PyUnicode_CheckExact(constant->v.Constant.value)); @@ -1386,19 +1391,20 @@ expr_ty _PyPegen_constant_from_string(Parser* p, Token* tok) { return _PyAST_Constant(s, kind, tok->lineno, tok->col_offset, tok->end_lineno, tok->end_col_offset, p->arena); } -expr_ty _PyPegen_formatted_value(Parser *p, expr_ty expression, Token *debug, expr_ty conversion, - expr_ty format, int lineno, int col_offset, int end_lineno, int end_col_offset, - PyArena *arena) { +expr_ty _PyPegen_formatted_value(Parser *p, expr_ty expression, Token *debug, ResultTokenWithMetadata *conversion, + ResultTokenWithMetadata *format, Token *closing_brace, int lineno, int col_offset, + int end_lineno, int end_col_offset, PyArena *arena) { int conversion_val = -1; if (conversion != NULL) { - assert(conversion->kind == Name_kind); - Py_UCS4 first = PyUnicode_READ_CHAR(conversion->v.Name.id, 0); + expr_ty conversion_expr = (expr_ty) conversion->result; + assert(conversion_expr->kind == Name_kind); + Py_UCS4 first = PyUnicode_READ_CHAR(conversion_expr->v.Name.id, 0); - if (PyUnicode_GET_LENGTH(conversion->v.Name.id) > 1 || + if (PyUnicode_GET_LENGTH(conversion_expr->v.Name.id) > 1 || !(first == 's' || first == 'r' || first == 'a')) { - RAISE_SYNTAX_ERROR_KNOWN_LOCATION(conversion, + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(conversion_expr, "f-string: invalid conversion character %R: expected 's', 'r', or 'a'", - conversion->v.Name.id); + conversion_expr->v.Name.id); return NULL; } @@ -1410,7 +1416,7 @@ expr_ty _PyPegen_formatted_value(Parser *p, expr_ty expression, Token *debug, ex } expr_ty formatted_value = _PyAST_FormattedValue( - expression, conversion_val, format, + expression, conversion_val, format ? (expr_ty) format->result : NULL, lineno, col_offset, end_lineno, end_col_offset, arena ); @@ -1418,22 +1424,26 @@ expr_ty _PyPegen_formatted_value(Parser *p, expr_ty expression, Token *debug, ex if (debug) { /* Find the non whitespace token after the "=" */ int debug_end_line, debug_end_offset; + PyObject *debug_metadata; if (conversion) { - debug_end_line = conversion->lineno; - debug_end_offset = conversion->col_offset; + debug_end_line = ((expr_ty) conversion->result)->lineno; + debug_end_offset = ((expr_ty) conversion->result)->col_offset; + debug_metadata = conversion->metadata; } else if (format) { - debug_end_line = format->lineno; - debug_end_offset = format->col_offset + 1; // HACK: ?? + debug_end_line = ((expr_ty) format->result)->lineno; + debug_end_offset = ((expr_ty) format->result)->col_offset + 1; + debug_metadata = format->metadata; } else { debug_end_line = end_lineno; debug_end_offset = end_col_offset; + debug_metadata = closing_brace->metadata; } - expr_ty debug_text = decode_fstring_buffer(p, lineno, col_offset + 1, - debug_end_line, debug_end_offset - 1); + expr_ty debug_text = _PyAST_Constant(debug_metadata, NULL, lineno, col_offset + 1, debug_end_line, + debug_end_offset - 1, p->arena); if (!debug_text) { return NULL; } diff --git a/Parser/parser.c b/Parser/parser.c index 771366844fc489..6eb985a7d3e123 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -738,8 +738,8 @@ static NameDefaultPair* lambda_param_maybe_default_rule(Parser *p); static arg_ty lambda_param_rule(Parser *p); static expr_ty fstring_middle_rule(Parser *p); static expr_ty fstring_replacement_field_rule(Parser *p); -static expr_ty fstring_conversion_rule(Parser *p); -static expr_ty fstring_full_format_spec_rule(Parser *p); +static ResultTokenWithMetadata* fstring_conversion_rule(Parser *p); +static ResultTokenWithMetadata* fstring_full_format_spec_rule(Parser *p); static expr_ty fstring_format_spec_rule(Parser *p); static expr_ty string_rule(Parser *p); static expr_ty strings_rule(Parser *p); @@ -15639,11 +15639,11 @@ fstring_replacement_field_rule(Parser *p) } D(fprintf(stderr, "%*c> fstring_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) \"=\"? fstring_conversion? fstring_full_format_spec? '}'")); Token * _literal; - Token * _literal_1; void *a; void *conversion; void *debug_expr; void *format; + Token * rbrace; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' && @@ -15655,7 +15655,7 @@ fstring_replacement_field_rule(Parser *p) && (format = fstring_full_format_spec_rule(p), !p->error_indicator) // fstring_full_format_spec? && - (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' + (rbrace = _PyPegen_expect_token(p, 26)) // token='}' ) { D(fprintf(stderr, "%*c+ fstring_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) \"=\"? fstring_conversion? fstring_full_format_spec? '}'")); @@ -15668,7 +15668,7 @@ fstring_replacement_field_rule(Parser *p) UNUSED(_end_lineno); // Only used by EXTRA macro int _end_col_offset = _token->end_col_offset; UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _PyPegen_formatted_value ( p , a , debug_expr , conversion , format , EXTRA ); + _res = _PyPegen_formatted_value ( p , a , debug_expr , conversion , format , rbrace , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; p->level--; @@ -15706,7 +15706,7 @@ fstring_replacement_field_rule(Parser *p) } // fstring_conversion: "!" NAME -static expr_ty +static ResultTokenWithMetadata* fstring_conversion_rule(Parser *p) { if (p->level++ == MAXSTACK) { @@ -15717,7 +15717,7 @@ fstring_conversion_rule(Parser *p) p->level--; return NULL; } - expr_ty _res = NULL; + ResultTokenWithMetadata* _res = NULL; int _mark = p->mark; { // "!" NAME if (p->error_indicator) { @@ -15753,7 +15753,7 @@ fstring_conversion_rule(Parser *p) } // fstring_full_format_spec: ':' fstring_format_spec* -static expr_ty +static ResultTokenWithMetadata* fstring_full_format_spec_rule(Parser *p) { if (p->level++ == MAXSTACK) { @@ -15764,7 +15764,7 @@ fstring_full_format_spec_rule(Parser *p) p->level--; return NULL; } - expr_ty _res = NULL; + ResultTokenWithMetadata* _res = NULL; int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; @@ -15781,10 +15781,10 @@ fstring_full_format_spec_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> fstring_full_format_spec[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' fstring_format_spec*")); - Token * _literal; + Token * colon; asdl_seq * spec; if ( - (_literal = _PyPegen_expect_token(p, 11)) // token=':' + (colon = _PyPegen_expect_token(p, 11)) // token=':' && (spec = _loop0_112_rule(p)) // fstring_format_spec* ) @@ -15799,7 +15799,7 @@ fstring_full_format_spec_rule(Parser *p) UNUSED(_end_lineno); // Only used by EXTRA macro int _end_col_offset = _token->end_col_offset; UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = spec ? _PyAST_JoinedStr ( ( asdl_expr_seq* ) spec , EXTRA ) : NULL; + _res = _PyPegen_setup_full_format_spec ( p , colon , ( asdl_expr_seq* ) spec , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; p->level--; diff --git a/Parser/pegen.c b/Parser/pegen.c index 262bfabfba7a25..da410ea84ecb8e 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -155,6 +155,16 @@ initialize_token(Parser *p, Token *parser_token, struct token *new_token, int to return -1; } + parser_token->metadata = NULL; + if (new_token->metadata != NULL) { + if (_PyArena_AddPyObject(p->arena, new_token->metadata) < 0) { + Py_DECREF(parser_token->metadata); + return -1; + } + parser_token->metadata = new_token->metadata; + new_token->metadata = NULL; + } + parser_token->level = new_token->level; parser_token->lineno = new_token->lineno; parser_token->col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + new_token->col_offset @@ -198,6 +208,7 @@ int _PyPegen_fill_token(Parser *p) { struct token new_token; + new_token.metadata = NULL; int type = _PyTokenizer_Get(p->tok, &new_token); // Record and skip '# type: ignore' comments @@ -206,14 +217,14 @@ _PyPegen_fill_token(Parser *p) char *tag = PyMem_Malloc(len + 1); if (tag == NULL) { PyErr_NoMemory(); - return -1; + goto error; } strncpy(tag, new_token.start, len); tag[len] = '\0'; // Ownership of tag passes to the growable array if (!growable_comment_array_add(&p->type_ignore_comments, p->tok->lineno, tag)) { PyErr_NoMemory(); - return -1; + goto error; } type = _PyTokenizer_Get(p->tok, &new_token); } @@ -234,11 +245,14 @@ _PyPegen_fill_token(Parser *p) // Check if we are at the limit of the token array capacity and resize if needed if ((p->fill == p->size) && (_resize_tokens_array(p) != 0)) { - return -1; + goto error; } Token *t = p->tokens[p->fill]; return initialize_token(p, t, &new_token, type); +error: + Py_XDECREF(new_token.metadata); + return -1; } #if defined(Py_DEBUG) diff --git a/Parser/pegen.h b/Parser/pegen.h index 6962013c2d18b4..8800e9f97f5e04 100644 --- a/Parser/pegen.h +++ b/Parser/pegen.h @@ -39,6 +39,7 @@ typedef struct { int level; int lineno, col_offset, end_lineno, end_col_offset; Memo *memo; + PyObject *metadata; } Token; typedef struct { @@ -118,6 +119,11 @@ typedef struct { int is_keyword; } KeywordOrStarred; +typedef struct { + void *result; + PyObject *metadata; +} ResultTokenWithMetadata; + // Internal parser functions #if defined(Py_DEBUG) void _PyPegen_clear_memo_statistics(void); @@ -310,7 +316,8 @@ StarEtc *_PyPegen_star_etc(Parser *, arg_ty, asdl_seq *, arg_ty); arguments_ty _PyPegen_make_arguments(Parser *, asdl_arg_seq *, SlashWithDefault *, asdl_arg_seq *, asdl_seq *, StarEtc *); arguments_ty _PyPegen_empty_arguments(Parser *); -expr_ty _PyPegen_formatted_value(Parser *, expr_ty, Token *, expr_ty, expr_ty, int, int, int, int, PyArena *); +expr_ty _PyPegen_formatted_value(Parser *, expr_ty, Token *, ResultTokenWithMetadata *, ResultTokenWithMetadata *, Token *, + int, int, int, int, PyArena *); AugOperator *_PyPegen_augoperator(Parser*, operator_ty type); stmt_ty _PyPegen_function_def_decorators(Parser *, asdl_expr_seq *, stmt_ty); stmt_ty _PyPegen_class_def_decorators(Parser *, asdl_expr_seq *, stmt_ty); @@ -329,7 +336,9 @@ expr_ty _PyPegen_ensure_real(Parser *p, expr_ty); asdl_seq *_PyPegen_join_sequences(Parser *, asdl_seq *, asdl_seq *); int _PyPegen_check_barry_as_flufl(Parser *, Token *); int _PyPegen_check_legacy_stmt(Parser *p, expr_ty t); -expr_ty _PyPegen_check_fstring_conversion(Parser *p, Token *, expr_ty t); +ResultTokenWithMetadata *_PyPegen_check_fstring_conversion(Parser *p, Token *, expr_ty t); +ResultTokenWithMetadata *_PyPegen_setup_full_format_spec(Parser *, Token *, asdl_expr_seq *, int, int, + int, int, PyArena *); mod_ty _PyPegen_make_module(Parser *, asdl_stmt_seq *); void *_PyPegen_arguments_parsing_error(Parser *, expr_ty); expr_ty _PyPegen_get_last_comprehension_item(comprehension_ty comprehension); diff --git a/Parser/pegen_errors.c b/Parser/pegen_errors.c index e26bad20a27575..1f227da0194e3c 100644 --- a/Parser/pegen_errors.c +++ b/Parser/pegen_errors.c @@ -165,6 +165,7 @@ _PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) { int ret = 0; struct token new_token; + new_token.metadata = NULL; for (;;) { switch (_PyTokenizer_Get(p->tok, &new_token)) { @@ -192,6 +193,7 @@ _PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) { exit: + Py_XDECREF(new_token.metadata); // If we're in an f-string, we want the syntax error in the expression part // to propagate, so that tokenizer errors (like expecting '}') that happen afterwards // do not swallow it. diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index 5244ab7d4f7e02..8de0572a1fc459 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -111,7 +111,7 @@ tok_new(void) tok->interactive_underflow = IUNDERFLOW_NORMAL; tok->str = NULL; tok->report_warnings = 1; - tok->tok_mode_stack[0] = (tokenizer_mode){.kind =TOK_REGULAR_MODE, .f_string_quote='\0', .f_string_quote_size = 0}; + tok->tok_mode_stack[0] = (tokenizer_mode){.kind =TOK_REGULAR_MODE, .f_string_quote='\0', .f_string_quote_size = 0, .f_string_debug=0}; tok->tok_mode_stack_index = 0; tok->tok_report_warnings = 1; #ifdef Py_DEBUG @@ -371,10 +371,8 @@ remember_fstring_buffers(struct tok_state *tok) for (index = tok->tok_mode_stack_index; index >= 0; --index) { mode = &(tok->tok_mode_stack[index]); - if (mode->kind == TOK_FSTRING_MODE) { - mode->f_string_start_offset = mode->f_string_start - tok->buf; - mode->f_string_multi_line_start_offset = mode->f_string_multi_line_start - tok->buf; - } + mode->f_string_start_offset = mode->f_string_start - tok->buf; + mode->f_string_multi_line_start_offset = mode->f_string_multi_line_start - tok->buf; } } @@ -387,13 +385,33 @@ restore_fstring_buffers(struct tok_state *tok) for (index = tok->tok_mode_stack_index; index >= 0; --index) { mode = &(tok->tok_mode_stack[index]); - if (mode->kind == TOK_FSTRING_MODE) { - mode->f_string_start = tok->buf + mode->f_string_start_offset; - mode->f_string_multi_line_start = tok->buf + mode->f_string_multi_line_start_offset; - } + mode->f_string_start = tok->buf + mode->f_string_start_offset; + mode->f_string_multi_line_start = tok->buf + mode->f_string_multi_line_start_offset; } } +static int +set_fstring_expr(struct tok_state* tok, struct token *token, char c) { + assert(token != NULL); + assert(c == '}' || c == ':' || c == '!'); + tokenizer_mode *tok_mode = TOK_GET_MODE(tok); + + if (!tok_mode->f_string_debug || token->metadata) { + return 0; + } + + PyObject *res = PyUnicode_DecodeUTF8( + tok_mode->last_expr_buffer, + tok_mode->last_expr_size - tok_mode->last_expr_end, + NULL + ); + if (!res) { + return -1; + } + token->metadata = res; + return 0; +} + static int update_fstring_expr(struct tok_state *tok, char cur) { @@ -1081,6 +1099,7 @@ tok_underflow_interactive(struct tok_state *tok) { restore_fstring_buffers(tok); } else { + remember_fstring_buffers(tok); ADVANCE_LINENO(); PyMem_Free(tok->buf); tok->buf = newtok; @@ -1088,6 +1107,7 @@ tok_underflow_interactive(struct tok_state *tok) { tok->line_start = tok->buf; tok->inp = strchr(tok->buf, '\0'); tok->end = tok->inp + 1; + restore_fstring_buffers(tok); } if (tok->done != E_OK) { if (tok->prompt != NULL) { @@ -2226,6 +2246,7 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t the_current_tok->last_expr_buffer = NULL; the_current_tok->last_expr_size = 0; the_current_tok->last_expr_end = -1; + the_current_tok->f_string_debug = 0; switch (*tok->start) { case 'F': @@ -2352,10 +2373,12 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t * by the `{` case, so for ensuring that we are on the 0th level, we need * to adjust it manually */ int cursor = current_tok->curly_bracket_depth - (c != '{'); - if (cursor == 0 && !update_fstring_expr(tok, c)) { return MAKE_TOKEN(ENDMARKER); } + if (cursor == 0 && c != '{' && set_fstring_expr(tok, token, c)) { + return MAKE_TOKEN(ERRORTOKEN); + } if (c == ':' && cursor == current_tok->curly_bracket_expr_start_depth) { current_tok->kind = TOK_FSTRING_MODE; @@ -2447,6 +2470,7 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t if (c == '}' && current_tok->curly_bracket_depth == current_tok->curly_bracket_expr_start_depth) { current_tok->curly_bracket_expr_start_depth--; current_tok->kind = TOK_FSTRING_MODE; + current_tok->f_string_debug = 0; } } break; @@ -2460,6 +2484,10 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t return MAKE_TOKEN(syntaxerror(tok, "invalid non-printable character U+%s", hex)); } + if( c == '=' && INSIDE_FSTRING_EXPR(current_tok)) { + current_tok->f_string_debug = 1; + } + /* Punctuation character */ p_start = tok->start; p_end = tok->cur; diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h index 2b94aecce626c3..8b4213c4ce3b5a 100644 --- a/Parser/tokenizer.h +++ b/Parser/tokenizer.h @@ -31,6 +31,7 @@ struct token { int level; int lineno, col_offset, end_lineno, end_col_offset; const char *start, *end; + PyObject *metadata; }; enum tokenizer_mode_kind_t { @@ -58,6 +59,7 @@ typedef struct _tokenizer_mode { Py_ssize_t last_expr_size; Py_ssize_t last_expr_end; char* last_expr_buffer; + int f_string_debug; } tokenizer_mode; /* Tokenizer state */ diff --git a/Python/assemble.c b/Python/assemble.c index e5a361b230cf1c..369dd8dcde9b9b 100644 --- a/Python/assemble.c +++ b/Python/assemble.c @@ -1,10 +1,10 @@ #include <stdbool.h> #include "Python.h" -#include "pycore_flowgraph.h" +#include "pycore_code.h" // write_location_entry_start() #include "pycore_compile.h" +#include "pycore_opcode.h" // _PyOpcode_Caches[] and opcode category macros #include "pycore_pymem.h" // _PyMem_IsPtrFreed() -#include "pycore_code.h" // write_location_entry_start() #define DEFAULT_CODE_SIZE 128 @@ -22,8 +22,8 @@ } typedef _PyCompilerSrcLocation location; -typedef _PyCfgInstruction cfg_instr; -typedef _PyCfgBasicblock basicblock; +typedef _PyCompile_Instruction instruction; +typedef _PyCompile_InstructionSequence instr_sequence; static inline bool same_location(location a, location b) @@ -117,7 +117,8 @@ assemble_emit_exception_table_item(struct assembler *a, int value, int msb) #define MAX_SIZE_OF_ENTRY 20 static int -assemble_emit_exception_table_entry(struct assembler *a, int start, int end, basicblock *handler) +assemble_emit_exception_table_entry(struct assembler *a, int start, int end, + _PyCompile_ExceptHandlerInfo *handler) { Py_ssize_t len = PyBytes_GET_SIZE(a->a_except_table); if (a->a_except_table_off + MAX_SIZE_OF_ENTRY >= len) { @@ -125,13 +126,13 @@ assemble_emit_exception_table_entry(struct assembler *a, int start, int end, bas } int size = end-start; assert(end > start); - int target = handler->b_offset; - int depth = handler->b_startdepth - 1; - if (handler->b_preserve_lasti) { + int target = handler->h_offset; + int depth = handler->h_startdepth - 1; + if (handler->h_preserve_lasti) { depth -= 1; } assert(depth >= 0); - int depth_lasti = (depth<<1) | handler->b_preserve_lasti; + int depth_lasti = (depth<<1) | handler->h_preserve_lasti; assemble_emit_exception_table_item(a, start, (1<<7)); assemble_emit_exception_table_item(a, size, 0); assemble_emit_exception_table_item(a, target, 0); @@ -140,29 +141,26 @@ assemble_emit_exception_table_entry(struct assembler *a, int start, int end, bas } static int -assemble_exception_table(struct assembler *a, basicblock *entryblock) +assemble_exception_table(struct assembler *a, instr_sequence *instrs) { - basicblock *b; int ioffset = 0; - basicblock *handler = NULL; + _PyCompile_ExceptHandlerInfo handler; + handler.h_offset = -1; int start = -1; - for (b = entryblock; b != NULL; b = b->b_next) { - ioffset = b->b_offset; - for (int i = 0; i < b->b_iused; i++) { - cfg_instr *instr = &b->b_instr[i]; - if (instr->i_except != handler) { - if (handler != NULL) { - RETURN_IF_ERROR( - assemble_emit_exception_table_entry(a, start, ioffset, handler)); - } - start = ioffset; - handler = instr->i_except; + for (int i = 0; i < instrs->s_used; i++) { + instruction *instr = &instrs->s_instrs[i]; + if (instr->i_except_handler_info.h_offset != handler.h_offset) { + if (handler.h_offset >= 0) { + RETURN_IF_ERROR( + assemble_emit_exception_table_entry(a, start, ioffset, &handler)); } - ioffset += _PyCfg_InstrSize(instr); + start = ioffset; + handler = instr->i_except_handler_info; } + ioffset += _PyCompile_InstrSize(instr->i_opcode, instr->i_oparg); } - if (handler != NULL) { - RETURN_IF_ERROR(assemble_emit_exception_table_entry(a, start, ioffset, handler)); + if (handler.h_offset >= 0) { + RETURN_IF_ERROR(assemble_emit_exception_table_entry(a, start, ioffset, &handler)); } return SUCCESS; } @@ -316,31 +314,31 @@ assemble_emit_location(struct assembler* a, location loc, int isize) } static int -assemble_location_info(struct assembler *a, basicblock *entryblock, int firstlineno) +assemble_location_info(struct assembler *a, instr_sequence *instrs, + int firstlineno) { a->a_lineno = firstlineno; location loc = NO_LOCATION; int size = 0; - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - for (int j = 0; j < b->b_iused; j++) { - if (!same_location(loc, b->b_instr[j].i_loc)) { + for (int i = 0; i < instrs->s_used; i++) { + instruction *instr = &instrs->s_instrs[i]; + if (!same_location(loc, instr->i_loc)) { RETURN_IF_ERROR(assemble_emit_location(a, loc, size)); - loc = b->b_instr[j].i_loc; + loc = instr->i_loc; size = 0; - } - size += _PyCfg_InstrSize(&b->b_instr[j]); } + size += _PyCompile_InstrSize(instr->i_opcode, instr->i_oparg); } RETURN_IF_ERROR(assemble_emit_location(a, loc, size)); return SUCCESS; } static void -write_instr(_Py_CODEUNIT *codestr, cfg_instr *instruction, int ilen) +write_instr(_Py_CODEUNIT *codestr, instruction *instr, int ilen) { - int opcode = instruction->i_opcode; + int opcode = instr->i_opcode; assert(!IS_PSEUDO_OPCODE(opcode)); - int oparg = instruction->i_oparg; + int oparg = instr->i_oparg; assert(HAS_ARG(opcode) || oparg == 0); int caches = _PyOpcode_Caches[opcode]; switch (ilen - caches) { @@ -380,12 +378,12 @@ write_instr(_Py_CODEUNIT *codestr, cfg_instr *instruction, int ilen) */ static int -assemble_emit_instr(struct assembler *a, cfg_instr *i) +assemble_emit_instr(struct assembler *a, instruction *instr) { Py_ssize_t len = PyBytes_GET_SIZE(a->a_bytecode); _Py_CODEUNIT *code; - int size = _PyCfg_InstrSize(i); + int size = _PyCompile_InstrSize(instr->i_opcode, instr->i_oparg); if (a->a_offset + size >= len / (int)sizeof(_Py_CODEUNIT)) { if (len > PY_SSIZE_T_MAX / 2) { return ERROR; @@ -394,25 +392,24 @@ assemble_emit_instr(struct assembler *a, cfg_instr *i) } code = (_Py_CODEUNIT *)PyBytes_AS_STRING(a->a_bytecode) + a->a_offset; a->a_offset += size; - write_instr(code, i, size); + write_instr(code, instr, size); return SUCCESS; } static int -assemble_emit(struct assembler *a, basicblock *entryblock, int first_lineno, - PyObject *const_cache) +assemble_emit(struct assembler *a, instr_sequence *instrs, + int first_lineno, PyObject *const_cache) { RETURN_IF_ERROR(assemble_init(a, first_lineno)); - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - for (int j = 0; j < b->b_iused; j++) { - RETURN_IF_ERROR(assemble_emit_instr(a, &b->b_instr[j])); - } + for (int i = 0; i < instrs->s_used; i++) { + instruction *instr = &instrs->s_instrs[i]; + RETURN_IF_ERROR(assemble_emit_instr(a, instr)); } - RETURN_IF_ERROR(assemble_location_info(a, entryblock, a->a_lineno)); + RETURN_IF_ERROR(assemble_location_info(a, instrs, a->a_lineno)); - RETURN_IF_ERROR(assemble_exception_table(a, entryblock)); + RETURN_IF_ERROR(assemble_exception_table(a, instrs)); RETURN_IF_ERROR(_PyBytes_Resize(&a->a_except_table, a->a_except_table_off)); RETURN_IF_ERROR(_PyCompile_ConstCacheMergeOne(const_cache, &a->a_except_table)); @@ -586,13 +583,13 @@ makecode(_PyCompile_CodeUnitMetadata *umd, struct assembler *a, PyObject *const_ PyCodeObject * _PyAssemble_MakeCodeObject(_PyCompile_CodeUnitMetadata *umd, PyObject *const_cache, - PyObject *consts, int maxdepth, basicblock *entryblock, + PyObject *consts, int maxdepth, instr_sequence *instrs, int nlocalsplus, int code_flags, PyObject *filename) { PyCodeObject *co = NULL; struct assembler a; - int res = assemble_emit(&a, entryblock, umd->u_firstlineno, const_cache); + int res = assemble_emit(&a, instrs, umd->u_firstlineno, const_cache); if (res == SUCCESS) { co = makecode(umd, &a, const_cache, consts, maxdepth, nlocalsplus, code_flags, filename); diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index fcb4d7a9a975c6..8840bbabe4b584 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -2316,7 +2316,7 @@ builtin_round_impl(PyObject *module, PyObject *number, PyObject *ndigits) { PyObject *round, *result; - if (Py_TYPE(number)->tp_dict == NULL) { + if (!_PyType_IsReady(Py_TYPE(number))) { if (PyType_Ready(Py_TYPE(number)) < 0) return NULL; } diff --git a/Python/bytecodes.c b/Python/bytecodes.c index dc66059cc81cc0..e83894e8902872 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -1554,34 +1554,47 @@ dummy_func( PREDICT(JUMP_BACKWARD); } - inst(LOAD_SUPER_ATTR, (global_super, class, self -- res2 if (oparg & 1), res)) { + family(load_super_attr, INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR) = { + LOAD_SUPER_ATTR, + LOAD_SUPER_ATTR_METHOD, + }; + + inst(LOAD_SUPER_ATTR, (unused/9, global_super, class, self -- res2 if (oparg & 1), res)) { PyObject *name = GETITEM(frame->f_code->co_names, oparg >> 2); - if (global_super == (PyObject *)&PySuper_Type && PyType_Check(class)) { - int method = 0; - Py_DECREF(global_super); - res = _PySuper_Lookup((PyTypeObject *)class, self, name, oparg & 1 ? &method : NULL); - Py_DECREF(class); - if (res == NULL) { - Py_DECREF(self); - ERROR_IF(true, error); - } - // Works with CALL, pushes two values: either `meth | self` or `NULL | meth`. - if (method) { - res2 = res; - res = self; // transfer ownership - } else { - res2 = NULL; - Py_DECREF(self); - } - } else { - PyObject *stack[] = {class, self}; - PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL); - DECREF_INPUTS(); - ERROR_IF(super == NULL, error); - res = PyObject_GetAttr(super, name); - Py_DECREF(super); - ERROR_IF(res == NULL, error); + int load_method = oparg & 1; + #if ENABLE_SPECIALIZATION + _PySuperAttrCache *cache = (_PySuperAttrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + next_instr--; + _Py_Specialize_LoadSuperAttr(global_super, class, self, next_instr, name, load_method); + DISPATCH_SAME_OPARG(); } + STAT_INC(LOAD_SUPER_ATTR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + #endif /* ENABLE_SPECIALIZATION */ + + // we make no attempt to optimize here; specializations should + // handle any case whose performance we care about + PyObject *stack[] = {class, self}; + PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL); + DECREF_INPUTS(); + ERROR_IF(super == NULL, error); + res = PyObject_GetAttr(super, name); + Py_DECREF(super); + ERROR_IF(res == NULL, error); + } + + inst(LOAD_SUPER_ATTR_METHOD, (unused/1, class_version/2, self_type_version/2, method/4, global_super, class, self -- res2, res)) { + DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR); + DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR); + DEOPT_IF(((PyTypeObject *)class)->tp_version_tag != class_version, LOAD_SUPER_ATTR); + PyTypeObject *self_type = Py_TYPE(self); + DEOPT_IF(self_type->tp_version_tag != self_type_version, LOAD_SUPER_ATTR); + res2 = method; + res = self; // transfer ownership + Py_INCREF(res2); + Py_DECREF(global_super); + Py_DECREF(class); } family(load_attr, INLINE_CACHE_ENTRIES_LOAD_ATTR) = { @@ -3090,6 +3103,25 @@ dummy_func( } } else { + if (Py_TYPE(func) == &PyFunction_Type && + tstate->interp->eval_frame == NULL && + ((PyFunctionObject *)func)->vectorcall == _PyFunction_Vectorcall) { + assert(PyTuple_CheckExact(callargs)); + Py_ssize_t nargs = PyTuple_GET_SIZE(callargs); + int code_flags = ((PyCodeObject *)PyFunction_GET_CODE(func))->co_flags; + PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(func)); + + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_Ex(tstate, + (PyFunctionObject *)func, locals, + nargs, callargs, kwargs); + // Need to manually shrink the stack since we exit with DISPATCH_INLINED. + STACK_SHRINK(oparg + 3); + if (new_frame == NULL) { + goto error; + } + frame->return_offset = 0; + DISPATCH_INLINED(new_frame); + } result = PyObject_Call(func, callargs, kwargs); } DECREF_INPUTS(); diff --git a/Python/ceval.c b/Python/ceval.c index 5d5221b2e40990..958689debc87f8 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -212,6 +212,9 @@ static _PyInterpreterFrame * _PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func, PyObject *locals, PyObject* const* args, size_t argcount, PyObject *kwnames); +static _PyInterpreterFrame * +_PyEvalFramePushAndInit_Ex(PyThreadState *tstate, PyFunctionObject *func, + PyObject *locals, Py_ssize_t nargs, PyObject *callargs, PyObject *kwargs); static void _PyEvalFrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame *frame); @@ -1501,6 +1504,49 @@ _PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func, return NULL; } +/* Same as _PyEvalFramePushAndInit but takes an args tuple and kwargs dict. + Steals references to func, callargs and kwargs. +*/ +static _PyInterpreterFrame * +_PyEvalFramePushAndInit_Ex(PyThreadState *tstate, PyFunctionObject *func, + PyObject *locals, Py_ssize_t nargs, PyObject *callargs, PyObject *kwargs) +{ + bool has_dict = (kwargs != NULL && PyDict_GET_SIZE(kwargs) > 0); + PyObject *kwnames = NULL; + PyObject *const *newargs; + if (has_dict) { + newargs = _PyStack_UnpackDict(tstate, _PyTuple_ITEMS(callargs), nargs, kwargs, &kwnames); + if (newargs == NULL) { + Py_DECREF(func); + goto error; + } + } + else { + newargs = &PyTuple_GET_ITEM(callargs, 0); + /* We need to incref all our args since the new frame steals the references. */ + for (Py_ssize_t i = 0; i < nargs; ++i) { + Py_INCREF(PyTuple_GET_ITEM(callargs, i)); + } + } + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( + tstate, (PyFunctionObject *)func, locals, + newargs, nargs, kwnames + ); + if (has_dict) { + _PyStack_UnpackDict_FreeNoDecRef(newargs, kwnames); + } + /* No need to decref func here because the reference has been stolen by + _PyEvalFramePushAndInit. + */ + Py_DECREF(callargs); + Py_XDECREF(kwargs); + return new_frame; +error: + Py_DECREF(callargs); + Py_XDECREF(kwargs); + return NULL; +} + PyObject * _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func, PyObject *locals, diff --git a/Python/compile.c b/Python/compile.c index a0ad3687f586d8..e8789def867308 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -149,7 +149,18 @@ enum { COMPILER_SCOPE_COMPREHENSION, }; -typedef _PyCompilerInstruction instruction; + +int +_PyCompile_InstrSize(int opcode, int oparg) +{ + assert(!IS_PSEUDO_OPCODE(opcode)); + assert(HAS_ARG(opcode) || oparg == 0); + int extended_args = (0xFFFFFF < oparg) + (0xFFFF < oparg) + (0xFF < oparg); + int caches = _PyOpcode_Caches[opcode]; + return extended_args + 1 + caches; +} + +typedef _PyCompile_Instruction instruction; typedef _PyCompile_InstructionSequence instr_sequence; #define INITIAL_INSTR_SEQUENCE_SIZE 100 @@ -6968,10 +6979,6 @@ optimize_and_assemble_code_unit(struct compiler_unit *u, PyObject *const_cache, goto error; } - if (cfg_to_instr_sequence(&g, &optimized_instrs) < 0) { - goto error; - } - /** Assembly **/ int nlocalsplus = prepare_localsplus(u, &g, code_flags); if (nlocalsplus < 0) { @@ -6990,15 +6997,15 @@ optimize_and_assemble_code_unit(struct compiler_unit *u, PyObject *const_cache, if (_PyCfg_ResolveJumps(&g) < 0) { goto error; } + + /* Can't modify the bytecode after computing jump offsets. */ + if (cfg_to_instr_sequence(&g, &optimized_instrs) < 0) { goto error; } - - /* Can't modify the bytecode after computing jump offsets. */ - co = _PyAssemble_MakeCodeObject(&u->u_metadata, const_cache, consts, - maxdepth, g.g_entryblock, nlocalsplus, + maxdepth, &optimized_instrs, nlocalsplus, code_flags, filename); error: @@ -7039,11 +7046,18 @@ cfg_to_instr_sequence(cfg_builder *g, instr_sequence *seq) RETURN_IF_ERROR(instr_sequence_use_label(seq, b->b_label.id)); for (int i = 0; i < b->b_iused; i++) { cfg_instr *instr = &b->b_instr[i]; - int arg = HAS_TARGET(instr->i_opcode) ? - instr->i_target->b_label.id : - instr->i_oparg; RETURN_IF_ERROR( - instr_sequence_addop(seq, instr->i_opcode, arg, instr->i_loc)); + instr_sequence_addop(seq, instr->i_opcode, instr->i_oparg, instr->i_loc)); + + _PyCompile_ExceptHandlerInfo *hi = &seq->s_instrs[seq->s_used-1].i_except_handler_info; + if (instr->i_except != NULL) { + hi->h_offset = instr->i_except->b_offset; + hi->h_startdepth = instr->i_except->b_startdepth; + hi->h_preserve_lasti = instr->i_except->b_preserve_lasti; + } + else { + hi->h_offset = -1; + } } } return SUCCESS; diff --git a/Python/errors.c b/Python/errors.c index 0ff6a0d5985f0f..7fc267385c569b 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -1342,15 +1342,9 @@ static PyStructSequence_Desc UnraisableHookArgs_desc = { PyStatus _PyErr_InitTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return _PyStatus_OK(); - } - - if (UnraisableHookArgsType.tp_name == NULL) { - if (_PyStructSequence_InitBuiltin(&UnraisableHookArgsType, - &UnraisableHookArgs_desc) < 0) { - return _PyStatus_ERR("failed to initialize UnraisableHookArgs type"); - } + if (_PyStructSequence_InitBuiltin(&UnraisableHookArgsType, + &UnraisableHookArgs_desc) < 0) { + return _PyStatus_ERR("failed to initialize UnraisableHookArgs type"); } return _PyStatus_OK(); } diff --git a/Python/flowgraph.c b/Python/flowgraph.c index 67cc5c5e88be10..6f83a910cab392 100644 --- a/Python/flowgraph.c +++ b/Python/flowgraph.c @@ -166,16 +166,10 @@ _PyBasicblock_InsertInstruction(basicblock *block, int pos, cfg_instr *instr) { return SUCCESS; } -int -_PyCfg_InstrSize(cfg_instr *instruction) +static int +instr_size(cfg_instr *instruction) { - int opcode = instruction->i_opcode; - assert(!IS_PSEUDO_OPCODE(opcode)); - int oparg = instruction->i_oparg; - assert(HAS_ARG(opcode) || oparg == 0); - int extended_args = (0xFFFFFF < oparg) + (0xFFFF < oparg) + (0xFF < oparg); - int caches = _PyOpcode_Caches[opcode]; - return extended_args + 1 + caches; + return _PyCompile_InstrSize(instruction->i_opcode, instruction->i_oparg); } static int @@ -183,7 +177,7 @@ blocksize(basicblock *b) { int size = 0; for (int i = 0; i < b->b_iused; i++) { - size += _PyCfg_InstrSize(&b->b_instr[i]); + size += instr_size(&b->b_instr[i]); } return size; } @@ -492,7 +486,7 @@ resolve_jump_offsets(basicblock *entryblock) bsize = b->b_offset; for (int i = 0; i < b->b_iused; i++) { cfg_instr *instr = &b->b_instr[i]; - int isize = _PyCfg_InstrSize(instr); + int isize = instr_size(instr); /* jump offsets are computed relative to * the instruction pointer after fetching * the jump instruction. @@ -508,7 +502,7 @@ resolve_jump_offsets(basicblock *entryblock) assert(!IS_BACKWARDS_JUMP_OPCODE(instr->i_opcode)); instr->i_oparg -= bsize; } - if (_PyCfg_InstrSize(instr) != isize) { + if (instr_size(instr) != isize) { extended_arg_recompile = 1; } } @@ -520,7 +514,7 @@ resolve_jump_offsets(basicblock *entryblock) with a better solution. The issue is that in the first loop blocksize() is called - which calls _PyCfg_InstrSize() which requires i_oparg be set + which calls instr_size() which requires i_oparg be set appropriately. There is a bootstrap problem because i_oparg is calculated in the second loop above. diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 0b5a91f7a1d94a..069a7ced0a4c25 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -2168,48 +2168,74 @@ } TARGET(LOAD_SUPER_ATTR) { + PREDICTED(LOAD_SUPER_ATTR); + static_assert(INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR == 9, "incorrect cache size"); PyObject *self = stack_pointer[-1]; PyObject *class = stack_pointer[-2]; PyObject *global_super = stack_pointer[-3]; PyObject *res2 = NULL; PyObject *res; - #line 1558 "Python/bytecodes.c" + #line 1563 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg >> 2); - if (global_super == (PyObject *)&PySuper_Type && PyType_Check(class)) { - int method = 0; - Py_DECREF(global_super); - res = _PySuper_Lookup((PyTypeObject *)class, self, name, oparg & 1 ? &method : NULL); - Py_DECREF(class); - if (res == NULL) { - Py_DECREF(self); - if (true) goto pop_3_error; - } - // Works with CALL, pushes two values: either `meth | self` or `NULL | meth`. - if (method) { - res2 = res; - res = self; // transfer ownership - } else { - res2 = NULL; - Py_DECREF(self); - } - } else { - PyObject *stack[] = {class, self}; - PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL); - #line 2199 "Python/generated_cases.c.h" - Py_DECREF(global_super); - Py_DECREF(class); - Py_DECREF(self); - #line 1580 "Python/bytecodes.c" - if (super == NULL) goto pop_3_error; - res = PyObject_GetAttr(super, name); - Py_DECREF(super); - if (res == NULL) goto pop_3_error; - } - #line 2209 "Python/generated_cases.c.h" + int load_method = oparg & 1; + #if ENABLE_SPECIALIZATION + _PySuperAttrCache *cache = (_PySuperAttrCache *)next_instr; + if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { + next_instr--; + _Py_Specialize_LoadSuperAttr(global_super, class, self, next_instr, name, load_method); + DISPATCH_SAME_OPARG(); + } + STAT_INC(LOAD_SUPER_ATTR, deferred); + DECREMENT_ADAPTIVE_COUNTER(cache->counter); + #endif /* ENABLE_SPECIALIZATION */ + + // we make no attempt to optimize here; specializations should + // handle any case whose performance we care about + PyObject *stack[] = {class, self}; + PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL); + #line 2197 "Python/generated_cases.c.h" + Py_DECREF(global_super); + Py_DECREF(class); + Py_DECREF(self); + #line 1581 "Python/bytecodes.c" + if (super == NULL) goto pop_3_error; + res = PyObject_GetAttr(super, name); + Py_DECREF(super); + if (res == NULL) goto pop_3_error; + #line 2206 "Python/generated_cases.c.h" STACK_SHRINK(2); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + next_instr += 9; + DISPATCH(); + } + + TARGET(LOAD_SUPER_ATTR_METHOD) { + PyObject *self = stack_pointer[-1]; + PyObject *class = stack_pointer[-2]; + PyObject *global_super = stack_pointer[-3]; + PyObject *res2; + PyObject *res; + uint32_t class_version = read_u32(&next_instr[1].cache); + uint32_t self_type_version = read_u32(&next_instr[3].cache); + PyObject *method = read_obj(&next_instr[5].cache); + #line 1588 "Python/bytecodes.c" + DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR); + DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR); + DEOPT_IF(((PyTypeObject *)class)->tp_version_tag != class_version, LOAD_SUPER_ATTR); + PyTypeObject *self_type = Py_TYPE(self); + DEOPT_IF(self_type->tp_version_tag != self_type_version, LOAD_SUPER_ATTR); + res2 = method; + res = self; // transfer ownership + Py_INCREF(res2); + Py_DECREF(global_super); + Py_DECREF(class); + #line 2235 "Python/generated_cases.c.h" + STACK_SHRINK(1); + stack_pointer[-1] = res; + stack_pointer[-2] = res2; + next_instr += 9; DISPATCH(); } @@ -2219,7 +2245,7 @@ PyObject *owner = stack_pointer[-1]; PyObject *res2 = NULL; PyObject *res; - #line 1602 "Python/bytecodes.c" + #line 1615 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyAttrCache *cache = (_PyAttrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -2253,9 +2279,9 @@ NULL | meth | arg1 | ... | argN */ - #line 2257 "Python/generated_cases.c.h" + #line 2283 "Python/generated_cases.c.h" Py_DECREF(owner); - #line 1636 "Python/bytecodes.c" + #line 1649 "Python/bytecodes.c" if (meth == NULL) goto pop_1_error; res2 = NULL; res = meth; @@ -2264,12 +2290,12 @@ else { /* Classic, pushes one value. */ res = PyObject_GetAttr(owner, name); - #line 2268 "Python/generated_cases.c.h" + #line 2294 "Python/generated_cases.c.h" Py_DECREF(owner); - #line 1645 "Python/bytecodes.c" + #line 1658 "Python/bytecodes.c" if (res == NULL) goto pop_1_error; } - #line 2273 "Python/generated_cases.c.h" + #line 2299 "Python/generated_cases.c.h" STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } @@ -2283,7 +2309,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1650 "Python/bytecodes.c" + #line 1663 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); @@ -2296,7 +2322,7 @@ STAT_INC(LOAD_ATTR, hit); Py_INCREF(res); res2 = NULL; - #line 2300 "Python/generated_cases.c.h" + #line 2326 "Python/generated_cases.c.h" Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2311,7 +2337,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1666 "Python/bytecodes.c" + #line 1679 "Python/bytecodes.c" DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR); PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict; assert(dict != NULL); @@ -2324,7 +2350,7 @@ STAT_INC(LOAD_ATTR, hit); Py_INCREF(res); res2 = NULL; - #line 2328 "Python/generated_cases.c.h" + #line 2354 "Python/generated_cases.c.h" Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2339,7 +2365,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1682 "Python/bytecodes.c" + #line 1695 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); @@ -2366,7 +2392,7 @@ STAT_INC(LOAD_ATTR, hit); Py_INCREF(res); res2 = NULL; - #line 2370 "Python/generated_cases.c.h" + #line 2396 "Python/generated_cases.c.h" Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2381,7 +2407,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1712 "Python/bytecodes.c" + #line 1725 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); @@ -2391,7 +2417,7 @@ STAT_INC(LOAD_ATTR, hit); Py_INCREF(res); res2 = NULL; - #line 2395 "Python/generated_cases.c.h" + #line 2421 "Python/generated_cases.c.h" Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2406,7 +2432,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); PyObject *descr = read_obj(&next_instr[5].cache); - #line 1725 "Python/bytecodes.c" + #line 1738 "Python/bytecodes.c" DEOPT_IF(!PyType_Check(cls), LOAD_ATTR); DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version, @@ -2418,7 +2444,7 @@ res = descr; assert(res != NULL); Py_INCREF(res); - #line 2422 "Python/generated_cases.c.h" + #line 2448 "Python/generated_cases.c.h" Py_DECREF(cls); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2432,7 +2458,7 @@ uint32_t type_version = read_u32(&next_instr[1].cache); uint32_t func_version = read_u32(&next_instr[3].cache); PyObject *fget = read_obj(&next_instr[5].cache); - #line 1740 "Python/bytecodes.c" + #line 1753 "Python/bytecodes.c" DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); PyTypeObject *cls = Py_TYPE(owner); @@ -2456,7 +2482,7 @@ JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); frame->return_offset = 0; DISPATCH_INLINED(new_frame); - #line 2460 "Python/generated_cases.c.h" + #line 2486 "Python/generated_cases.c.h" } TARGET(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) { @@ -2464,7 +2490,7 @@ uint32_t type_version = read_u32(&next_instr[1].cache); uint32_t func_version = read_u32(&next_instr[3].cache); PyObject *getattribute = read_obj(&next_instr[5].cache); - #line 1766 "Python/bytecodes.c" + #line 1779 "Python/bytecodes.c" DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); PyTypeObject *cls = Py_TYPE(owner); DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); @@ -2490,7 +2516,7 @@ JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); frame->return_offset = 0; DISPATCH_INLINED(new_frame); - #line 2494 "Python/generated_cases.c.h" + #line 2520 "Python/generated_cases.c.h" } TARGET(STORE_ATTR_INSTANCE_VALUE) { @@ -2498,7 +2524,7 @@ PyObject *value = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1794 "Python/bytecodes.c" + #line 1807 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); @@ -2516,7 +2542,7 @@ Py_DECREF(old_value); } Py_DECREF(owner); - #line 2520 "Python/generated_cases.c.h" + #line 2546 "Python/generated_cases.c.h" STACK_SHRINK(2); next_instr += 4; DISPATCH(); @@ -2527,7 +2553,7 @@ PyObject *value = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t hint = read_u16(&next_instr[3].cache); - #line 1814 "Python/bytecodes.c" + #line 1827 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); @@ -2566,7 +2592,7 @@ /* PEP 509 */ dict->ma_version_tag = new_version; Py_DECREF(owner); - #line 2570 "Python/generated_cases.c.h" + #line 2596 "Python/generated_cases.c.h" STACK_SHRINK(2); next_instr += 4; DISPATCH(); @@ -2577,7 +2603,7 @@ PyObject *value = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1855 "Python/bytecodes.c" + #line 1868 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); @@ -2587,7 +2613,7 @@ *(PyObject **)addr = value; Py_XDECREF(old_value); Py_DECREF(owner); - #line 2591 "Python/generated_cases.c.h" + #line 2617 "Python/generated_cases.c.h" STACK_SHRINK(2); next_instr += 4; DISPATCH(); @@ -2599,7 +2625,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *res; - #line 1874 "Python/bytecodes.c" + #line 1887 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -2612,12 +2638,12 @@ #endif /* ENABLE_SPECIALIZATION */ assert((oparg >> 4) <= Py_GE); res = PyObject_RichCompare(left, right, oparg>>4); - #line 2616 "Python/generated_cases.c.h" + #line 2642 "Python/generated_cases.c.h" Py_DECREF(left); Py_DECREF(right); - #line 1887 "Python/bytecodes.c" + #line 1900 "Python/bytecodes.c" if (res == NULL) goto pop_2_error; - #line 2621 "Python/generated_cases.c.h" + #line 2647 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -2628,7 +2654,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *res; - #line 1891 "Python/bytecodes.c" + #line 1904 "Python/bytecodes.c" DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP); DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP); STAT_INC(COMPARE_OP, hit); @@ -2640,7 +2666,7 @@ _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); res = (sign_ish & oparg) ? Py_True : Py_False; Py_INCREF(res); - #line 2644 "Python/generated_cases.c.h" + #line 2670 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -2651,7 +2677,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *res; - #line 1906 "Python/bytecodes.c" + #line 1919 "Python/bytecodes.c" DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP); DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP); DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)left), COMPARE_OP); @@ -2667,7 +2693,7 @@ _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); res = (sign_ish & oparg) ? Py_True : Py_False; Py_INCREF(res); - #line 2671 "Python/generated_cases.c.h" + #line 2697 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -2678,7 +2704,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *res; - #line 1925 "Python/bytecodes.c" + #line 1938 "Python/bytecodes.c" DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP); DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP); STAT_INC(COMPARE_OP, hit); @@ -2691,7 +2717,7 @@ assert(COMPARISON_NOT_EQUALS + 1 == COMPARISON_EQUALS); res = ((COMPARISON_NOT_EQUALS + eq) & oparg) ? Py_True : Py_False; Py_INCREF(res); - #line 2695 "Python/generated_cases.c.h" + #line 2721 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -2702,14 +2728,14 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *b; - #line 1940 "Python/bytecodes.c" + #line 1953 "Python/bytecodes.c" int res = Py_Is(left, right) ^ oparg; - #line 2708 "Python/generated_cases.c.h" + #line 2734 "Python/generated_cases.c.h" Py_DECREF(left); Py_DECREF(right); - #line 1942 "Python/bytecodes.c" + #line 1955 "Python/bytecodes.c" b = Py_NewRef(res ? Py_True : Py_False); - #line 2713 "Python/generated_cases.c.h" + #line 2739 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = b; DISPATCH(); @@ -2719,15 +2745,15 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *b; - #line 1946 "Python/bytecodes.c" + #line 1959 "Python/bytecodes.c" int res = PySequence_Contains(right, left); - #line 2725 "Python/generated_cases.c.h" + #line 2751 "Python/generated_cases.c.h" Py_DECREF(left); Py_DECREF(right); - #line 1948 "Python/bytecodes.c" + #line 1961 "Python/bytecodes.c" if (res < 0) goto pop_2_error; b = Py_NewRef((res^oparg) ? Py_True : Py_False); - #line 2731 "Python/generated_cases.c.h" + #line 2757 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = b; DISPATCH(); @@ -2738,12 +2764,12 @@ PyObject *exc_value = stack_pointer[-2]; PyObject *rest; PyObject *match; - #line 1953 "Python/bytecodes.c" + #line 1966 "Python/bytecodes.c" if (check_except_star_type_valid(tstate, match_type) < 0) { - #line 2744 "Python/generated_cases.c.h" + #line 2770 "Python/generated_cases.c.h" Py_DECREF(exc_value); Py_DECREF(match_type); - #line 1955 "Python/bytecodes.c" + #line 1968 "Python/bytecodes.c" if (true) goto pop_2_error; } @@ -2751,10 +2777,10 @@ rest = NULL; int res = exception_group_match(exc_value, match_type, &match, &rest); - #line 2755 "Python/generated_cases.c.h" + #line 2781 "Python/generated_cases.c.h" Py_DECREF(exc_value); Py_DECREF(match_type); - #line 1963 "Python/bytecodes.c" + #line 1976 "Python/bytecodes.c" if (res < 0) goto pop_2_error; assert((match == NULL) == (rest == NULL)); @@ -2763,7 +2789,7 @@ if (!Py_IsNone(match)) { PyErr_SetHandledException(match); } - #line 2767 "Python/generated_cases.c.h" + #line 2793 "Python/generated_cases.c.h" stack_pointer[-1] = match; stack_pointer[-2] = rest; DISPATCH(); @@ -2773,21 +2799,21 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *b; - #line 1974 "Python/bytecodes.c" + #line 1987 "Python/bytecodes.c" assert(PyExceptionInstance_Check(left)); if (check_except_type_valid(tstate, right) < 0) { - #line 2780 "Python/generated_cases.c.h" + #line 2806 "Python/generated_cases.c.h" Py_DECREF(right); - #line 1977 "Python/bytecodes.c" + #line 1990 "Python/bytecodes.c" if (true) goto pop_1_error; } int res = PyErr_GivenExceptionMatches(left, right); - #line 2787 "Python/generated_cases.c.h" + #line 2813 "Python/generated_cases.c.h" Py_DECREF(right); - #line 1982 "Python/bytecodes.c" + #line 1995 "Python/bytecodes.c" b = Py_NewRef(res ? Py_True : Py_False); - #line 2791 "Python/generated_cases.c.h" + #line 2817 "Python/generated_cases.c.h" stack_pointer[-1] = b; DISPATCH(); } @@ -2796,15 +2822,15 @@ PyObject *fromlist = stack_pointer[-1]; PyObject *level = stack_pointer[-2]; PyObject *res; - #line 1986 "Python/bytecodes.c" + #line 1999 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); res = import_name(tstate, frame, name, fromlist, level); - #line 2803 "Python/generated_cases.c.h" + #line 2829 "Python/generated_cases.c.h" Py_DECREF(level); Py_DECREF(fromlist); - #line 1989 "Python/bytecodes.c" + #line 2002 "Python/bytecodes.c" if (res == NULL) goto pop_2_error; - #line 2808 "Python/generated_cases.c.h" + #line 2834 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; DISPATCH(); @@ -2813,29 +2839,29 @@ TARGET(IMPORT_FROM) { PyObject *from = stack_pointer[-1]; PyObject *res; - #line 1993 "Python/bytecodes.c" + #line 2006 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); res = import_from(tstate, from, name); if (res == NULL) goto error; - #line 2821 "Python/generated_cases.c.h" + #line 2847 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; DISPATCH(); } TARGET(JUMP_FORWARD) { - #line 1999 "Python/bytecodes.c" + #line 2012 "Python/bytecodes.c" JUMPBY(oparg); - #line 2830 "Python/generated_cases.c.h" + #line 2856 "Python/generated_cases.c.h" DISPATCH(); } TARGET(JUMP_BACKWARD) { PREDICTED(JUMP_BACKWARD); - #line 2003 "Python/bytecodes.c" + #line 2016 "Python/bytecodes.c" assert(oparg < INSTR_OFFSET()); JUMPBY(-oparg); - #line 2839 "Python/generated_cases.c.h" + #line 2865 "Python/generated_cases.c.h" CHECK_EVAL_BREAKER(); DISPATCH(); } @@ -2843,7 +2869,7 @@ TARGET(POP_JUMP_IF_FALSE) { PREDICTED(POP_JUMP_IF_FALSE); PyObject *cond = stack_pointer[-1]; - #line 2009 "Python/bytecodes.c" + #line 2022 "Python/bytecodes.c" if (Py_IsTrue(cond)) { _Py_DECREF_NO_DEALLOC(cond); } @@ -2853,9 +2879,9 @@ } else { int err = PyObject_IsTrue(cond); - #line 2857 "Python/generated_cases.c.h" + #line 2883 "Python/generated_cases.c.h" Py_DECREF(cond); - #line 2019 "Python/bytecodes.c" + #line 2032 "Python/bytecodes.c" if (err == 0) { JUMPBY(oparg); } @@ -2863,14 +2889,14 @@ if (err < 0) goto pop_1_error; } } - #line 2867 "Python/generated_cases.c.h" + #line 2893 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(POP_JUMP_IF_TRUE) { PyObject *cond = stack_pointer[-1]; - #line 2029 "Python/bytecodes.c" + #line 2042 "Python/bytecodes.c" if (Py_IsFalse(cond)) { _Py_DECREF_NO_DEALLOC(cond); } @@ -2880,9 +2906,9 @@ } else { int err = PyObject_IsTrue(cond); - #line 2884 "Python/generated_cases.c.h" + #line 2910 "Python/generated_cases.c.h" Py_DECREF(cond); - #line 2039 "Python/bytecodes.c" + #line 2052 "Python/bytecodes.c" if (err > 0) { JUMPBY(oparg); } @@ -2890,67 +2916,67 @@ if (err < 0) goto pop_1_error; } } - #line 2894 "Python/generated_cases.c.h" + #line 2920 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(POP_JUMP_IF_NOT_NONE) { PyObject *value = stack_pointer[-1]; - #line 2049 "Python/bytecodes.c" + #line 2062 "Python/bytecodes.c" if (!Py_IsNone(value)) { - #line 2903 "Python/generated_cases.c.h" + #line 2929 "Python/generated_cases.c.h" Py_DECREF(value); - #line 2051 "Python/bytecodes.c" + #line 2064 "Python/bytecodes.c" JUMPBY(oparg); } else { _Py_DECREF_NO_DEALLOC(value); } - #line 2911 "Python/generated_cases.c.h" + #line 2937 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(POP_JUMP_IF_NONE) { PyObject *value = stack_pointer[-1]; - #line 2059 "Python/bytecodes.c" + #line 2072 "Python/bytecodes.c" if (Py_IsNone(value)) { _Py_DECREF_NO_DEALLOC(value); JUMPBY(oparg); } else { - #line 2924 "Python/generated_cases.c.h" + #line 2950 "Python/generated_cases.c.h" Py_DECREF(value); - #line 2065 "Python/bytecodes.c" + #line 2078 "Python/bytecodes.c" } - #line 2928 "Python/generated_cases.c.h" + #line 2954 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(JUMP_BACKWARD_NO_INTERRUPT) { - #line 2069 "Python/bytecodes.c" + #line 2082 "Python/bytecodes.c" /* This bytecode is used in the `yield from` or `await` loop. * If there is an interrupt, we want it handled in the innermost * generator or coroutine, so we deliberately do not check it here. * (see bpo-30039). */ JUMPBY(-oparg); - #line 2941 "Python/generated_cases.c.h" + #line 2967 "Python/generated_cases.c.h" DISPATCH(); } TARGET(GET_LEN) { PyObject *obj = stack_pointer[-1]; PyObject *len_o; - #line 2078 "Python/bytecodes.c" + #line 2091 "Python/bytecodes.c" // PUSH(len(TOS)) Py_ssize_t len_i = PyObject_Length(obj); if (len_i < 0) goto error; len_o = PyLong_FromSsize_t(len_i); if (len_o == NULL) goto error; - #line 2954 "Python/generated_cases.c.h" + #line 2980 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = len_o; DISPATCH(); @@ -2961,16 +2987,16 @@ PyObject *type = stack_pointer[-2]; PyObject *subject = stack_pointer[-3]; PyObject *attrs; - #line 2086 "Python/bytecodes.c" + #line 2099 "Python/bytecodes.c" // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or // None on failure. assert(PyTuple_CheckExact(names)); attrs = match_class(tstate, subject, type, oparg, names); - #line 2970 "Python/generated_cases.c.h" + #line 2996 "Python/generated_cases.c.h" Py_DECREF(subject); Py_DECREF(type); Py_DECREF(names); - #line 2091 "Python/bytecodes.c" + #line 2104 "Python/bytecodes.c" if (attrs) { assert(PyTuple_CheckExact(attrs)); // Success! } @@ -2978,7 +3004,7 @@ if (_PyErr_Occurred(tstate)) goto pop_3_error; attrs = Py_NewRef(Py_None); // Failure! } - #line 2982 "Python/generated_cases.c.h" + #line 3008 "Python/generated_cases.c.h" STACK_SHRINK(2); stack_pointer[-1] = attrs; DISPATCH(); @@ -2987,10 +3013,10 @@ TARGET(MATCH_MAPPING) { PyObject *subject = stack_pointer[-1]; PyObject *res; - #line 2101 "Python/bytecodes.c" + #line 2114 "Python/bytecodes.c" int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING; res = Py_NewRef(match ? Py_True : Py_False); - #line 2994 "Python/generated_cases.c.h" + #line 3020 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; PREDICT(POP_JUMP_IF_FALSE); @@ -3000,10 +3026,10 @@ TARGET(MATCH_SEQUENCE) { PyObject *subject = stack_pointer[-1]; PyObject *res; - #line 2107 "Python/bytecodes.c" + #line 2120 "Python/bytecodes.c" int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE; res = Py_NewRef(match ? Py_True : Py_False); - #line 3007 "Python/generated_cases.c.h" + #line 3033 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; PREDICT(POP_JUMP_IF_FALSE); @@ -3014,11 +3040,11 @@ PyObject *keys = stack_pointer[-1]; PyObject *subject = stack_pointer[-2]; PyObject *values_or_none; - #line 2113 "Python/bytecodes.c" + #line 2126 "Python/bytecodes.c" // On successful match, PUSH(values). Otherwise, PUSH(None). values_or_none = match_keys(tstate, subject, keys); if (values_or_none == NULL) goto error; - #line 3022 "Python/generated_cases.c.h" + #line 3048 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = values_or_none; DISPATCH(); @@ -3027,14 +3053,14 @@ TARGET(GET_ITER) { PyObject *iterable = stack_pointer[-1]; PyObject *iter; - #line 2119 "Python/bytecodes.c" + #line 2132 "Python/bytecodes.c" /* before: [obj]; after [getiter(obj)] */ iter = PyObject_GetIter(iterable); - #line 3034 "Python/generated_cases.c.h" + #line 3060 "Python/generated_cases.c.h" Py_DECREF(iterable); - #line 2122 "Python/bytecodes.c" + #line 2135 "Python/bytecodes.c" if (iter == NULL) goto pop_1_error; - #line 3038 "Python/generated_cases.c.h" + #line 3064 "Python/generated_cases.c.h" stack_pointer[-1] = iter; DISPATCH(); } @@ -3042,7 +3068,7 @@ TARGET(GET_YIELD_FROM_ITER) { PyObject *iterable = stack_pointer[-1]; PyObject *iter; - #line 2126 "Python/bytecodes.c" + #line 2139 "Python/bytecodes.c" /* before: [obj]; after [getiter(obj)] */ if (PyCoro_CheckExact(iterable)) { /* `iterable` is a coroutine */ @@ -3065,11 +3091,11 @@ if (iter == NULL) { goto error; } - #line 3069 "Python/generated_cases.c.h" + #line 3095 "Python/generated_cases.c.h" Py_DECREF(iterable); - #line 2149 "Python/bytecodes.c" + #line 2162 "Python/bytecodes.c" } - #line 3073 "Python/generated_cases.c.h" + #line 3099 "Python/generated_cases.c.h" stack_pointer[-1] = iter; PREDICT(LOAD_CONST); DISPATCH(); @@ -3080,7 +3106,7 @@ static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size"); PyObject *iter = stack_pointer[-1]; PyObject *next; - #line 2168 "Python/bytecodes.c" + #line 2181 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyForIterCache *cache = (_PyForIterCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -3111,7 +3137,7 @@ DISPATCH(); } // Common case: no jump, leave it to the code generator - #line 3115 "Python/generated_cases.c.h" + #line 3141 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = next; next_instr += 1; @@ -3119,7 +3145,7 @@ } TARGET(INSTRUMENTED_FOR_ITER) { - #line 2201 "Python/bytecodes.c" + #line 2214 "Python/bytecodes.c" _Py_CODEUNIT *here = next_instr-1; _Py_CODEUNIT *target; PyObject *iter = TOP(); @@ -3145,14 +3171,14 @@ target = next_instr + INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1; } INSTRUMENTED_JUMP(here, target, PY_MONITORING_EVENT_BRANCH); - #line 3149 "Python/generated_cases.c.h" + #line 3175 "Python/generated_cases.c.h" DISPATCH(); } TARGET(FOR_ITER_LIST) { PyObject *iter = stack_pointer[-1]; PyObject *next; - #line 2229 "Python/bytecodes.c" + #line 2242 "Python/bytecodes.c" DEOPT_IF(Py_TYPE(iter) != &PyListIter_Type, FOR_ITER); _PyListIterObject *it = (_PyListIterObject *)iter; STAT_INC(FOR_ITER, hit); @@ -3172,7 +3198,7 @@ DISPATCH(); end_for_iter_list: // Common case: no jump, leave it to the code generator - #line 3176 "Python/generated_cases.c.h" + #line 3202 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = next; next_instr += 1; @@ -3182,7 +3208,7 @@ TARGET(FOR_ITER_TUPLE) { PyObject *iter = stack_pointer[-1]; PyObject *next; - #line 2251 "Python/bytecodes.c" + #line 2264 "Python/bytecodes.c" _PyTupleIterObject *it = (_PyTupleIterObject *)iter; DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER); STAT_INC(FOR_ITER, hit); @@ -3202,7 +3228,7 @@ DISPATCH(); end_for_iter_tuple: // Common case: no jump, leave it to the code generator - #line 3206 "Python/generated_cases.c.h" + #line 3232 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = next; next_instr += 1; @@ -3212,7 +3238,7 @@ TARGET(FOR_ITER_RANGE) { PyObject *iter = stack_pointer[-1]; PyObject *next; - #line 2273 "Python/bytecodes.c" + #line 2286 "Python/bytecodes.c" _PyRangeIterObject *r = (_PyRangeIterObject *)iter; DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER); STAT_INC(FOR_ITER, hit); @@ -3230,7 +3256,7 @@ if (next == NULL) { goto error; } - #line 3234 "Python/generated_cases.c.h" + #line 3260 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = next; next_instr += 1; @@ -3239,7 +3265,7 @@ TARGET(FOR_ITER_GEN) { PyObject *iter = stack_pointer[-1]; - #line 2293 "Python/bytecodes.c" + #line 2306 "Python/bytecodes.c" PyGenObject *gen = (PyGenObject *)iter; DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER); DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, FOR_ITER); @@ -3254,14 +3280,14 @@ assert(next_instr[oparg].op.code == END_FOR || next_instr[oparg].op.code == INSTRUMENTED_END_FOR); DISPATCH_INLINED(gen_frame); - #line 3258 "Python/generated_cases.c.h" + #line 3284 "Python/generated_cases.c.h" } TARGET(BEFORE_ASYNC_WITH) { PyObject *mgr = stack_pointer[-1]; PyObject *exit; PyObject *res; - #line 2310 "Python/bytecodes.c" + #line 2323 "Python/bytecodes.c" PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__)); if (enter == NULL) { if (!_PyErr_Occurred(tstate)) { @@ -3284,16 +3310,16 @@ Py_DECREF(enter); goto error; } - #line 3288 "Python/generated_cases.c.h" + #line 3314 "Python/generated_cases.c.h" Py_DECREF(mgr); - #line 2333 "Python/bytecodes.c" + #line 2346 "Python/bytecodes.c" res = _PyObject_CallNoArgs(enter); Py_DECREF(enter); if (res == NULL) { Py_DECREF(exit); if (true) goto pop_1_error; } - #line 3297 "Python/generated_cases.c.h" + #line 3323 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; stack_pointer[-2] = exit; @@ -3305,7 +3331,7 @@ PyObject *mgr = stack_pointer[-1]; PyObject *exit; PyObject *res; - #line 2343 "Python/bytecodes.c" + #line 2356 "Python/bytecodes.c" /* pop the context manager, push its __exit__ and the * value returned from calling its __enter__ */ @@ -3331,16 +3357,16 @@ Py_DECREF(enter); goto error; } - #line 3335 "Python/generated_cases.c.h" + #line 3361 "Python/generated_cases.c.h" Py_DECREF(mgr); - #line 2369 "Python/bytecodes.c" + #line 2382 "Python/bytecodes.c" res = _PyObject_CallNoArgs(enter); Py_DECREF(enter); if (res == NULL) { Py_DECREF(exit); if (true) goto pop_1_error; } - #line 3344 "Python/generated_cases.c.h" + #line 3370 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; stack_pointer[-2] = exit; @@ -3352,7 +3378,7 @@ PyObject *lasti = stack_pointer[-3]; PyObject *exit_func = stack_pointer[-4]; PyObject *res; - #line 2378 "Python/bytecodes.c" + #line 2391 "Python/bytecodes.c" /* At the top of the stack are 4 values: - val: TOP = exc_info() - unused: SECOND = previous exception @@ -3373,7 +3399,7 @@ res = PyObject_Vectorcall(exit_func, stack + 1, 3 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); if (res == NULL) goto error; - #line 3377 "Python/generated_cases.c.h" + #line 3403 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; DISPATCH(); @@ -3382,7 +3408,7 @@ TARGET(PUSH_EXC_INFO) { PyObject *new_exc = stack_pointer[-1]; PyObject *prev_exc; - #line 2401 "Python/bytecodes.c" + #line 2414 "Python/bytecodes.c" _PyErr_StackItem *exc_info = tstate->exc_info; if (exc_info->exc_value != NULL) { prev_exc = exc_info->exc_value; @@ -3392,7 +3418,7 @@ } assert(PyExceptionInstance_Check(new_exc)); exc_info->exc_value = Py_NewRef(new_exc); - #line 3396 "Python/generated_cases.c.h" + #line 3422 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = new_exc; stack_pointer[-2] = prev_exc; @@ -3406,7 +3432,7 @@ uint32_t type_version = read_u32(&next_instr[1].cache); uint32_t keys_version = read_u32(&next_instr[3].cache); PyObject *descr = read_obj(&next_instr[5].cache); - #line 2413 "Python/bytecodes.c" + #line 2426 "Python/bytecodes.c" /* Cached method object */ PyTypeObject *self_cls = Py_TYPE(self); assert(type_version != 0); @@ -3423,7 +3449,7 @@ assert(_PyType_HasFeature(Py_TYPE(res2), Py_TPFLAGS_METHOD_DESCRIPTOR)); res = self; assert(oparg & 1); - #line 3427 "Python/generated_cases.c.h" + #line 3453 "Python/generated_cases.c.h" STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } @@ -3437,7 +3463,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); PyObject *descr = read_obj(&next_instr[5].cache); - #line 2432 "Python/bytecodes.c" + #line 2445 "Python/bytecodes.c" PyTypeObject *self_cls = Py_TYPE(self); DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); assert(self_cls->tp_dictoffset == 0); @@ -3447,7 +3473,7 @@ res2 = Py_NewRef(descr); res = self; assert(oparg & 1); - #line 3451 "Python/generated_cases.c.h" + #line 3477 "Python/generated_cases.c.h" STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } @@ -3461,7 +3487,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); PyObject *descr = read_obj(&next_instr[5].cache); - #line 2444 "Python/bytecodes.c" + #line 2457 "Python/bytecodes.c" PyTypeObject *self_cls = Py_TYPE(self); DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); Py_ssize_t dictoffset = self_cls->tp_dictoffset; @@ -3475,7 +3501,7 @@ res2 = Py_NewRef(descr); res = self; assert(oparg & 1); - #line 3479 "Python/generated_cases.c.h" + #line 3505 "Python/generated_cases.c.h" STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } @@ -3484,16 +3510,16 @@ } TARGET(KW_NAMES) { - #line 2460 "Python/bytecodes.c" + #line 2473 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg < PyTuple_GET_SIZE(frame->f_code->co_consts)); kwnames = GETITEM(frame->f_code->co_consts, oparg); - #line 3492 "Python/generated_cases.c.h" + #line 3518 "Python/generated_cases.c.h" DISPATCH(); } TARGET(INSTRUMENTED_CALL) { - #line 2466 "Python/bytecodes.c" + #line 2479 "Python/bytecodes.c" int is_meth = PEEK(oparg+2) != NULL; int total_args = oparg + is_meth; PyObject *function = PEEK(total_args + 1); @@ -3506,7 +3532,7 @@ _PyCallCache *cache = (_PyCallCache *)next_instr; INCREMENT_ADAPTIVE_COUNTER(cache->counter); GO_TO_INSTRUCTION(CALL); - #line 3510 "Python/generated_cases.c.h" + #line 3536 "Python/generated_cases.c.h" } TARGET(CALL) { @@ -3516,7 +3542,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2511 "Python/bytecodes.c" + #line 2524 "Python/bytecodes.c" int is_meth = method != NULL; int total_args = oparg; if (is_meth) { @@ -3598,7 +3624,7 @@ Py_DECREF(args[i]); } if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3602 "Python/generated_cases.c.h" + #line 3628 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3610,7 +3636,7 @@ TARGET(CALL_BOUND_METHOD_EXACT_ARGS) { PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; - #line 2599 "Python/bytecodes.c" + #line 2612 "Python/bytecodes.c" DEOPT_IF(method != NULL, CALL); DEOPT_IF(Py_TYPE(callable) != &PyMethod_Type, CALL); STAT_INC(CALL, hit); @@ -3620,7 +3646,7 @@ PEEK(oparg + 2) = Py_NewRef(meth); // method Py_DECREF(callable); GO_TO_INSTRUCTION(CALL_PY_EXACT_ARGS); - #line 3624 "Python/generated_cases.c.h" + #line 3650 "Python/generated_cases.c.h" } TARGET(CALL_PY_EXACT_ARGS) { @@ -3629,7 +3655,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; uint32_t func_version = read_u32(&next_instr[1].cache); - #line 2611 "Python/bytecodes.c" + #line 2624 "Python/bytecodes.c" assert(kwnames == NULL); DEOPT_IF(tstate->interp->eval_frame, CALL); int is_meth = method != NULL; @@ -3655,7 +3681,7 @@ JUMPBY(INLINE_CACHE_ENTRIES_CALL); frame->return_offset = 0; DISPATCH_INLINED(new_frame); - #line 3659 "Python/generated_cases.c.h" + #line 3685 "Python/generated_cases.c.h" } TARGET(CALL_PY_WITH_DEFAULTS) { @@ -3663,7 +3689,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; uint32_t func_version = read_u32(&next_instr[1].cache); - #line 2639 "Python/bytecodes.c" + #line 2652 "Python/bytecodes.c" assert(kwnames == NULL); DEOPT_IF(tstate->interp->eval_frame, CALL); int is_meth = method != NULL; @@ -3699,7 +3725,7 @@ JUMPBY(INLINE_CACHE_ENTRIES_CALL); frame->return_offset = 0; DISPATCH_INLINED(new_frame); - #line 3703 "Python/generated_cases.c.h" + #line 3729 "Python/generated_cases.c.h" } TARGET(CALL_NO_KW_TYPE_1) { @@ -3707,7 +3733,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *null = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2677 "Python/bytecodes.c" + #line 2690 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -3717,7 +3743,7 @@ res = Py_NewRef(Py_TYPE(obj)); Py_DECREF(obj); Py_DECREF(&PyType_Type); // I.e., callable - #line 3721 "Python/generated_cases.c.h" + #line 3747 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3730,7 +3756,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *null = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2689 "Python/bytecodes.c" + #line 2702 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -3741,7 +3767,7 @@ Py_DECREF(arg); Py_DECREF(&PyUnicode_Type); // I.e., callable if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3745 "Python/generated_cases.c.h" + #line 3771 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3755,7 +3781,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *null = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2703 "Python/bytecodes.c" + #line 2716 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -3766,7 +3792,7 @@ Py_DECREF(arg); Py_DECREF(&PyTuple_Type); // I.e., tuple if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3770 "Python/generated_cases.c.h" + #line 3796 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3780,7 +3806,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2717 "Python/bytecodes.c" + #line 2730 "Python/bytecodes.c" int is_meth = method != NULL; int total_args = oparg; if (is_meth) { @@ -3802,7 +3828,7 @@ } Py_DECREF(tp); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3806 "Python/generated_cases.c.h" + #line 3832 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3816,7 +3842,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2742 "Python/bytecodes.c" + #line 2755 "Python/bytecodes.c" /* Builtin METH_O functions */ assert(kwnames == NULL); int is_meth = method != NULL; @@ -3844,7 +3870,7 @@ Py_DECREF(arg); Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3848 "Python/generated_cases.c.h" + #line 3874 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3858,7 +3884,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2773 "Python/bytecodes.c" + #line 2786 "Python/bytecodes.c" /* Builtin METH_FASTCALL functions, without keywords */ assert(kwnames == NULL); int is_meth = method != NULL; @@ -3890,7 +3916,7 @@ 'invalid'). In those cases an exception is set, so we must handle it. */ - #line 3894 "Python/generated_cases.c.h" + #line 3920 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3904,7 +3930,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2808 "Python/bytecodes.c" + #line 2821 "Python/bytecodes.c" /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ int is_meth = method != NULL; int total_args = oparg; @@ -3936,7 +3962,7 @@ } Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3940 "Python/generated_cases.c.h" + #line 3966 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3950,7 +3976,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2843 "Python/bytecodes.c" + #line 2856 "Python/bytecodes.c" assert(kwnames == NULL); /* len(o) */ int is_meth = method != NULL; @@ -3975,7 +4001,7 @@ Py_DECREF(callable); Py_DECREF(arg); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3979 "Python/generated_cases.c.h" + #line 4005 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3988,7 +4014,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2870 "Python/bytecodes.c" + #line 2883 "Python/bytecodes.c" assert(kwnames == NULL); /* isinstance(o, o2) */ int is_meth = method != NULL; @@ -4015,7 +4041,7 @@ Py_DECREF(cls); Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 4019 "Python/generated_cases.c.h" + #line 4045 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -4027,7 +4053,7 @@ PyObject **args = (stack_pointer - oparg); PyObject *self = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; - #line 2900 "Python/bytecodes.c" + #line 2913 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg == 1); assert(method != NULL); @@ -4045,14 +4071,14 @@ JUMPBY(INLINE_CACHE_ENTRIES_CALL + 1); assert(next_instr[-1].op.code == POP_TOP); DISPATCH(); - #line 4049 "Python/generated_cases.c.h" + #line 4075 "Python/generated_cases.c.h" } TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_O) { PyObject **args = (stack_pointer - oparg); PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2920 "Python/bytecodes.c" + #line 2933 "Python/bytecodes.c" assert(kwnames == NULL); int is_meth = method != NULL; int total_args = oparg; @@ -4083,7 +4109,7 @@ Py_DECREF(arg); Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 4087 "Python/generated_cases.c.h" + #line 4113 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -4096,7 +4122,7 @@ PyObject **args = (stack_pointer - oparg); PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2954 "Python/bytecodes.c" + #line 2967 "Python/bytecodes.c" int is_meth = method != NULL; int total_args = oparg; if (is_meth) { @@ -4125,7 +4151,7 @@ } Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 4129 "Python/generated_cases.c.h" + #line 4155 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -4138,7 +4164,7 @@ PyObject **args = (stack_pointer - oparg); PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2986 "Python/bytecodes.c" + #line 2999 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg == 0 || oparg == 1); int is_meth = method != NULL; @@ -4167,7 +4193,7 @@ Py_DECREF(self); Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 4171 "Python/generated_cases.c.h" + #line 4197 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -4180,7 +4206,7 @@ PyObject **args = (stack_pointer - oparg); PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 3018 "Python/bytecodes.c" + #line 3031 "Python/bytecodes.c" assert(kwnames == NULL); int is_meth = method != NULL; int total_args = oparg; @@ -4208,7 +4234,7 @@ } Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 4212 "Python/generated_cases.c.h" + #line 4238 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -4218,9 +4244,9 @@ } TARGET(INSTRUMENTED_CALL_FUNCTION_EX) { - #line 3049 "Python/bytecodes.c" + #line 3062 "Python/bytecodes.c" GO_TO_INSTRUCTION(CALL_FUNCTION_EX); - #line 4224 "Python/generated_cases.c.h" + #line 4250 "Python/generated_cases.c.h" } TARGET(CALL_FUNCTION_EX) { @@ -4229,7 +4255,7 @@ PyObject *callargs = stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))]; PyObject *func = stack_pointer[-(2 + ((oparg & 1) ? 1 : 0))]; PyObject *result; - #line 3053 "Python/bytecodes.c" + #line 3066 "Python/bytecodes.c" // DICT_MERGE is called before this opcode if there are kwargs. // It converts all dict subtypes in kwargs into regular dicts. assert(kwargs == NULL || PyDict_CheckExact(kwargs)); @@ -4270,16 +4296,35 @@ } } else { + if (Py_TYPE(func) == &PyFunction_Type && + tstate->interp->eval_frame == NULL && + ((PyFunctionObject *)func)->vectorcall == _PyFunction_Vectorcall) { + assert(PyTuple_CheckExact(callargs)); + Py_ssize_t nargs = PyTuple_GET_SIZE(callargs); + int code_flags = ((PyCodeObject *)PyFunction_GET_CODE(func))->co_flags; + PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(func)); + + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_Ex(tstate, + (PyFunctionObject *)func, locals, + nargs, callargs, kwargs); + // Need to manually shrink the stack since we exit with DISPATCH_INLINED. + STACK_SHRINK(oparg + 3); + if (new_frame == NULL) { + goto error; + } + frame->return_offset = 0; + DISPATCH_INLINED(new_frame); + } result = PyObject_Call(func, callargs, kwargs); } - #line 4276 "Python/generated_cases.c.h" + #line 4321 "Python/generated_cases.c.h" Py_DECREF(func); Py_DECREF(callargs); Py_XDECREF(kwargs); - #line 3096 "Python/bytecodes.c" + #line 3128 "Python/bytecodes.c" assert(PEEK(3 + (oparg & 1)) == NULL); if (result == NULL) { STACK_SHRINK(((oparg & 1) ? 1 : 0)); goto pop_3_error; } - #line 4283 "Python/generated_cases.c.h" + #line 4328 "Python/generated_cases.c.h" STACK_SHRINK(((oparg & 1) ? 1 : 0)); STACK_SHRINK(2); stack_pointer[-1] = result; @@ -4294,7 +4339,7 @@ PyObject *kwdefaults = (oparg & 0x02) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0))] : NULL; PyObject *defaults = (oparg & 0x01) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0) + ((oparg & 0x01) ? 1 : 0))] : NULL; PyObject *func; - #line 3106 "Python/bytecodes.c" + #line 3138 "Python/bytecodes.c" PyFunctionObject *func_obj = (PyFunctionObject *) PyFunction_New(codeobj, GLOBALS()); @@ -4323,14 +4368,14 @@ func_obj->func_version = ((PyCodeObject *)codeobj)->co_version; func = (PyObject *)func_obj; - #line 4327 "Python/generated_cases.c.h" + #line 4372 "Python/generated_cases.c.h" STACK_SHRINK(((oparg & 0x01) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x08) ? 1 : 0)); stack_pointer[-1] = func; DISPATCH(); } TARGET(RETURN_GENERATOR) { - #line 3137 "Python/bytecodes.c" + #line 3169 "Python/bytecodes.c" assert(PyFunction_Check(frame->f_funcobj)); PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); @@ -4351,7 +4396,7 @@ frame = cframe.current_frame = prev; _PyFrame_StackPush(frame, (PyObject *)gen); goto resume_frame; - #line 4355 "Python/generated_cases.c.h" + #line 4400 "Python/generated_cases.c.h" } TARGET(BUILD_SLICE) { @@ -4359,15 +4404,15 @@ PyObject *stop = stack_pointer[-(1 + ((oparg == 3) ? 1 : 0))]; PyObject *start = stack_pointer[-(2 + ((oparg == 3) ? 1 : 0))]; PyObject *slice; - #line 3160 "Python/bytecodes.c" + #line 3192 "Python/bytecodes.c" slice = PySlice_New(start, stop, step); - #line 4365 "Python/generated_cases.c.h" + #line 4410 "Python/generated_cases.c.h" Py_DECREF(start); Py_DECREF(stop); Py_XDECREF(step); - #line 3162 "Python/bytecodes.c" + #line 3194 "Python/bytecodes.c" if (slice == NULL) { STACK_SHRINK(((oparg == 3) ? 1 : 0)); goto pop_2_error; } - #line 4371 "Python/generated_cases.c.h" + #line 4416 "Python/generated_cases.c.h" STACK_SHRINK(((oparg == 3) ? 1 : 0)); STACK_SHRINK(1); stack_pointer[-1] = slice; @@ -4378,7 +4423,7 @@ PyObject *fmt_spec = ((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? stack_pointer[-((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0))] : NULL; PyObject *value = stack_pointer[-(1 + (((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0))]; PyObject *result; - #line 3166 "Python/bytecodes.c" + #line 3198 "Python/bytecodes.c" /* Handles f-string value formatting. */ PyObject *(*conv_fn)(PyObject *); int which_conversion = oparg & FVC_MASK; @@ -4413,7 +4458,7 @@ Py_DECREF(value); Py_XDECREF(fmt_spec); if (result == NULL) { STACK_SHRINK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0)); goto pop_1_error; } - #line 4417 "Python/generated_cases.c.h" + #line 4462 "Python/generated_cases.c.h" STACK_SHRINK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0)); stack_pointer[-1] = result; DISPATCH(); @@ -4422,10 +4467,10 @@ TARGET(COPY) { PyObject *bottom = stack_pointer[-(1 + (oparg-1))]; PyObject *top; - #line 3203 "Python/bytecodes.c" + #line 3235 "Python/bytecodes.c" assert(oparg > 0); top = Py_NewRef(bottom); - #line 4429 "Python/generated_cases.c.h" + #line 4474 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = top; DISPATCH(); @@ -4437,7 +4482,7 @@ PyObject *rhs = stack_pointer[-1]; PyObject *lhs = stack_pointer[-2]; PyObject *res; - #line 3208 "Python/bytecodes.c" + #line 3240 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -4452,12 +4497,12 @@ assert((unsigned)oparg < Py_ARRAY_LENGTH(binary_ops)); assert(binary_ops[oparg]); res = binary_ops[oparg](lhs, rhs); - #line 4456 "Python/generated_cases.c.h" + #line 4501 "Python/generated_cases.c.h" Py_DECREF(lhs); Py_DECREF(rhs); - #line 3223 "Python/bytecodes.c" + #line 3255 "Python/bytecodes.c" if (res == NULL) goto pop_2_error; - #line 4461 "Python/generated_cases.c.h" + #line 4506 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -4467,16 +4512,16 @@ TARGET(SWAP) { PyObject *top = stack_pointer[-1]; PyObject *bottom = stack_pointer[-(2 + (oparg-2))]; - #line 3228 "Python/bytecodes.c" + #line 3260 "Python/bytecodes.c" assert(oparg >= 2); - #line 4473 "Python/generated_cases.c.h" + #line 4518 "Python/generated_cases.c.h" stack_pointer[-1] = bottom; stack_pointer[-(2 + (oparg-2))] = top; DISPATCH(); } TARGET(INSTRUMENTED_LINE) { - #line 3232 "Python/bytecodes.c" + #line 3264 "Python/bytecodes.c" _Py_CODEUNIT *here = next_instr-1; _PyFrame_SetStackPointer(frame, stack_pointer); int original_opcode = _Py_call_instrumentation_line( @@ -4496,11 +4541,11 @@ } opcode = original_opcode; DISPATCH_GOTO(); - #line 4500 "Python/generated_cases.c.h" + #line 4545 "Python/generated_cases.c.h" } TARGET(INSTRUMENTED_INSTRUCTION) { - #line 3254 "Python/bytecodes.c" + #line 3286 "Python/bytecodes.c" int next_opcode = _Py_call_instrumentation_instruction( tstate, frame, next_instr-1); if (next_opcode < 0) goto error; @@ -4512,26 +4557,26 @@ assert(next_opcode > 0 && next_opcode < 256); opcode = next_opcode; DISPATCH_GOTO(); - #line 4516 "Python/generated_cases.c.h" + #line 4561 "Python/generated_cases.c.h" } TARGET(INSTRUMENTED_JUMP_FORWARD) { - #line 3268 "Python/bytecodes.c" + #line 3300 "Python/bytecodes.c" INSTRUMENTED_JUMP(next_instr-1, next_instr+oparg, PY_MONITORING_EVENT_JUMP); - #line 4522 "Python/generated_cases.c.h" + #line 4567 "Python/generated_cases.c.h" DISPATCH(); } TARGET(INSTRUMENTED_JUMP_BACKWARD) { - #line 3272 "Python/bytecodes.c" + #line 3304 "Python/bytecodes.c" INSTRUMENTED_JUMP(next_instr-1, next_instr-oparg, PY_MONITORING_EVENT_JUMP); - #line 4529 "Python/generated_cases.c.h" + #line 4574 "Python/generated_cases.c.h" CHECK_EVAL_BREAKER(); DISPATCH(); } TARGET(INSTRUMENTED_POP_JUMP_IF_TRUE) { - #line 3277 "Python/bytecodes.c" + #line 3309 "Python/bytecodes.c" PyObject *cond = POP(); int err = PyObject_IsTrue(cond); Py_DECREF(cond); @@ -4540,12 +4585,12 @@ assert(err == 0 || err == 1); int offset = err*oparg; INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); - #line 4544 "Python/generated_cases.c.h" + #line 4589 "Python/generated_cases.c.h" DISPATCH(); } TARGET(INSTRUMENTED_POP_JUMP_IF_FALSE) { - #line 3288 "Python/bytecodes.c" + #line 3320 "Python/bytecodes.c" PyObject *cond = POP(); int err = PyObject_IsTrue(cond); Py_DECREF(cond); @@ -4554,12 +4599,12 @@ assert(err == 0 || err == 1); int offset = (1-err)*oparg; INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); - #line 4558 "Python/generated_cases.c.h" + #line 4603 "Python/generated_cases.c.h" DISPATCH(); } TARGET(INSTRUMENTED_POP_JUMP_IF_NONE) { - #line 3299 "Python/bytecodes.c" + #line 3331 "Python/bytecodes.c" PyObject *value = POP(); _Py_CODEUNIT *here = next_instr-1; int offset; @@ -4572,12 +4617,12 @@ offset = 0; } INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); - #line 4576 "Python/generated_cases.c.h" + #line 4621 "Python/generated_cases.c.h" DISPATCH(); } TARGET(INSTRUMENTED_POP_JUMP_IF_NOT_NONE) { - #line 3314 "Python/bytecodes.c" + #line 3346 "Python/bytecodes.c" PyObject *value = POP(); _Py_CODEUNIT *here = next_instr-1; int offset; @@ -4590,30 +4635,30 @@ offset = oparg; } INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); - #line 4594 "Python/generated_cases.c.h" + #line 4639 "Python/generated_cases.c.h" DISPATCH(); } TARGET(EXTENDED_ARG) { - #line 3329 "Python/bytecodes.c" + #line 3361 "Python/bytecodes.c" assert(oparg); opcode = next_instr->op.code; oparg = oparg << 8 | next_instr->op.arg; PRE_DISPATCH_GOTO(); DISPATCH_GOTO(); - #line 4605 "Python/generated_cases.c.h" + #line 4650 "Python/generated_cases.c.h" } TARGET(CACHE) { - #line 3337 "Python/bytecodes.c" + #line 3369 "Python/bytecodes.c" assert(0 && "Executing a cache."); Py_UNREACHABLE(); - #line 4612 "Python/generated_cases.c.h" + #line 4657 "Python/generated_cases.c.h" } TARGET(RESERVED) { - #line 3342 "Python/bytecodes.c" + #line 3374 "Python/bytecodes.c" assert(0 && "Executing RESERVED instruction."); Py_UNREACHABLE(); - #line 4619 "Python/generated_cases.c.h" + #line 4664 "Python/generated_cases.c.h" } diff --git a/Python/import.c b/Python/import.c index df577800528d27..0bf107b28d3990 100644 --- a/Python/import.c +++ b/Python/import.c @@ -413,8 +413,11 @@ remove_module(PyThreadState *tstate, PyObject *name) Py_ssize_t _PyImport_GetNextModuleIndex(void) { + PyThread_acquire_lock(EXTENSIONS.mutex, WAIT_LOCK); LAST_MODULE_INDEX++; - return LAST_MODULE_INDEX; + Py_ssize_t index = LAST_MODULE_INDEX; + PyThread_release_lock(EXTENSIONS.mutex); + return index; } static const char * @@ -591,11 +594,11 @@ _PyImport_ClearModulesByIndex(PyInterpreterState *interp) /* It may help to have a big picture view of what happens when an extension is loaded. This includes when it is imported - for the first time or via imp.load_dynamic(). + for the first time. - Here's a summary, using imp.load_dynamic() as the starting point: + Here's a summary, using importlib._boostrap._load() as a starting point. - 1. imp.load_dynamic() -> importlib._bootstrap._load() + 1. importlib._bootstrap._load() 2. _load(): acquire import lock 3. _load() -> importlib._bootstrap._load_unlocked() 4. _load_unlocked() -> importlib._bootstrap.module_from_spec() @@ -703,6 +706,7 @@ _PyImport_ClearModulesByIndex(PyInterpreterState *interp) const char * _PyImport_ResolveNameWithPackageContext(const char *name) { + PyThread_acquire_lock(EXTENSIONS.mutex, WAIT_LOCK); if (PKGCONTEXT != NULL) { const char *p = strrchr(PKGCONTEXT, '.'); if (p != NULL && strcmp(name, p+1) == 0) { @@ -710,14 +714,17 @@ _PyImport_ResolveNameWithPackageContext(const char *name) PKGCONTEXT = NULL; } } + PyThread_release_lock(EXTENSIONS.mutex); return name; } const char * _PyImport_SwapPackageContext(const char *newcontext) { + PyThread_acquire_lock(EXTENSIONS.mutex, WAIT_LOCK); const char *oldcontext = PKGCONTEXT; PKGCONTEXT = newcontext; + PyThread_release_lock(EXTENSIONS.mutex); return oldcontext; } @@ -865,13 +872,13 @@ gets even messier. static inline void extensions_lock_acquire(void) { - // XXX For now the GIL is sufficient. + PyThread_acquire_lock(_PyRuntime.imports.extensions.mutex, WAIT_LOCK); } static inline void extensions_lock_release(void) { - // XXX For now the GIL is sufficient. + PyThread_release_lock(_PyRuntime.imports.extensions.mutex); } /* Magic for extension modules (built-in as well as dynamically @@ -3787,7 +3794,7 @@ _imp_source_hash_impl(PyObject *module, long key, Py_buffer *source) PyDoc_STRVAR(doc_imp, -"(Extremely) low-level import machinery bits as used by importlib and imp."); +"(Extremely) low-level import machinery bits as used by importlib."); static PyMethodDef imp_methods[] = { _IMP_EXTENSION_SUFFIXES_METHODDEF diff --git a/Python/instrumentation.c b/Python/instrumentation.c index 8334f596eb3e19..c5bbbdacbb851e 100644 --- a/Python/instrumentation.c +++ b/Python/instrumentation.c @@ -113,13 +113,17 @@ static const uint8_t INSTRUMENTED_OPCODES[256] = { }; static inline bool -opcode_has_event(int opcode) { - return opcode < INSTRUMENTED_LINE && - INSTRUMENTED_OPCODES[opcode] > 0; +opcode_has_event(int opcode) +{ + return ( + opcode < INSTRUMENTED_LINE && + INSTRUMENTED_OPCODES[opcode] > 0 + ); } static inline bool -is_instrumented(int opcode) { +is_instrumented(int opcode) +{ assert(opcode != 0); assert(opcode != RESERVED); return opcode >= MIN_INSTRUMENTED_OPCODE; @@ -339,7 +343,8 @@ dump_monitors(const char *prefix, _Py_Monitors monitors, FILE*out) /* Like _Py_GetBaseOpcode but without asserts. * Does its best to give the right answer, but won't abort * if something is wrong */ -int get_base_opcode_best_attempt(PyCodeObject *code, int offset) +static int +get_base_opcode_best_attempt(PyCodeObject *code, int offset) { int opcode = _Py_OPCODE(_PyCode_CODE(code)[offset]); if (INSTRUMENTED_OPCODES[opcode] != opcode) { @@ -418,13 +423,15 @@ dump_instrumentation_data(PyCodeObject *code, int star, FILE*out) assert(test); \ } while (0) -bool valid_opcode(int opcode) { +static bool +valid_opcode(int opcode) +{ if (opcode > 0 && opcode != RESERVED && opcode < 255 && _PyOpcode_OpName[opcode] && - _PyOpcode_OpName[opcode][0] != '<' - ) { + _PyOpcode_OpName[opcode][0] != '<') + { return true; } return false; @@ -550,11 +557,11 @@ de_instrument(PyCodeObject *code, int i, int event) opcode_ptr = &code->_co_monitoring->lines[i].original_opcode; opcode = *opcode_ptr; } - if (opcode == INSTRUMENTED_INSTRUCTION) { + if (opcode == INSTRUMENTED_INSTRUCTION) { opcode_ptr = &code->_co_monitoring->per_instruction_opcodes[i]; opcode = *opcode_ptr; } - int deinstrumented = DE_INSTRUMENT[opcode]; + int deinstrumented = DE_INSTRUMENT[opcode]; if (deinstrumented == 0) { return; } @@ -781,8 +788,7 @@ add_line_tools(PyCodeObject * code, int offset, int tools) { assert(tools_is_subset_for_event(code, PY_MONITORING_EVENT_LINE, tools)); assert(code->_co_monitoring); - if (code->_co_monitoring->line_tools - ) { + if (code->_co_monitoring->line_tools) { code->_co_monitoring->line_tools[offset] |= tools; } else { @@ -798,8 +804,7 @@ add_per_instruction_tools(PyCodeObject * code, int offset, int tools) { assert(tools_is_subset_for_event(code, PY_MONITORING_EVENT_INSTRUCTION, tools)); assert(code->_co_monitoring); - if (code->_co_monitoring->per_instruction_tools - ) { + if (code->_co_monitoring->per_instruction_tools) { code->_co_monitoring->per_instruction_tools[offset] |= tools; } else { @@ -814,11 +819,10 @@ static void remove_per_instruction_tools(PyCodeObject * code, int offset, int tools) { assert(code->_co_monitoring); - if (code->_co_monitoring->per_instruction_tools) - { + if (code->_co_monitoring->per_instruction_tools) { uint8_t *toolsptr = &code->_co_monitoring->per_instruction_tools[offset]; *toolsptr &= ~tools; - if (*toolsptr == 0 ) { + if (*toolsptr == 0) { de_instrument_per_instruction(code, offset); } } @@ -843,7 +847,7 @@ call_one_instrument( assert(tstate->tracing == 0); PyObject *instrument = interp->monitoring_callables[tool][event]; if (instrument == NULL) { - return 0; + return 0; } int old_what = tstate->what_event; tstate->what_event = event; @@ -865,16 +869,15 @@ static const int8_t MOST_SIGNIFICANT_BITS[16] = { 3, 3, 3, 3, }; -/* We could use _Py_bit_length here, but that is designed for larger (32/64) bit ints, - and can perform relatively poorly on platforms without the necessary intrinsics. */ +/* We could use _Py_bit_length here, but that is designed for larger (32/64) + * bit ints, and can perform relatively poorly on platforms without the + * necessary intrinsics. */ static inline int most_significant_bit(uint8_t bits) { assert(bits != 0); if (bits > 15) { return MOST_SIGNIFICANT_BITS[bits>>4]+4; } - else { - return MOST_SIGNIFICANT_BITS[bits]; - } + return MOST_SIGNIFICANT_BITS[bits]; } static bool @@ -1002,8 +1005,8 @@ _Py_call_instrumentation_2args( int _Py_call_instrumentation_jump( PyThreadState *tstate, int event, - _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, _Py_CODEUNIT *target -) { + _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, _Py_CODEUNIT *target) +{ assert(event == PY_MONITORING_EVENT_JUMP || event == PY_MONITORING_EVENT_BRANCH); assert(frame->prev_instr == instr); @@ -1309,8 +1312,8 @@ initialize_line_tools(PyCodeObject *code, _Py_Monitors *all_events) } } -static -int allocate_instrumentation_data(PyCodeObject *code) +static int +allocate_instrumentation_data(PyCodeObject *code) { if (code->_co_monitoring == NULL) { @@ -1404,7 +1407,7 @@ static const uint8_t super_instructions[256] = { /* Should use instruction metadata for this */ static bool -is_super_instruction(int opcode) { +is_super_instruction(uint8_t opcode) { return super_instructions[opcode] != 0; } @@ -1516,7 +1519,7 @@ _Py_Instrument(PyCodeObject *code, PyInterpreterState *interp) #define C_RETURN_EVENTS \ ((1 << PY_MONITORING_EVENT_C_RETURN) | \ - (1 << PY_MONITORING_EVENT_C_RAISE)) + (1 << PY_MONITORING_EVENT_C_RAISE)) #define C_CALL_EVENTS \ (C_RETURN_EVENTS | (1 << PY_MONITORING_EVENT_CALL)) @@ -1561,8 +1564,8 @@ static int check_tool(PyInterpreterState *interp, int tool_id) { if (tool_id < PY_MONITORING_SYS_PROFILE_ID && - interp->monitoring_tool_names[tool_id] == NULL - ) { + interp->monitoring_tool_names[tool_id] == NULL) + { PyErr_Format(PyExc_ValueError, "tool %d is not in use", tool_id); return -1; } diff --git a/Python/makeopcodetargets.py b/Python/makeopcodetargets.py index 5aa31803397ce4..2b402ae0b6a031 100755 --- a/Python/makeopcodetargets.py +++ b/Python/makeopcodetargets.py @@ -7,24 +7,18 @@ import sys -try: - from importlib.machinery import SourceFileLoader -except ImportError: - import imp - - def find_module(modname): - """Finds and returns a module in the local dist/checkout. - """ - modpath = os.path.join( - os.path.dirname(os.path.dirname(__file__)), "Lib") - return imp.load_module(modname, *imp.find_module(modname, [modpath])) -else: - def find_module(modname): - """Finds and returns a module in the local dist/checkout. - """ - modpath = os.path.join( - os.path.dirname(os.path.dirname(__file__)), "Lib", modname + ".py") - return SourceFileLoader(modname, modpath).load_module() +# 2023-04-27(warsaw): Pre-Python 3.12, this would catch ImportErrors and try to +# import imp, and then use imp.load_module(). The imp module was removed in +# Python 3.12 (and long deprecated before that), and it's unclear under what +# conditions this import will now fail, so the fallback was simply removed. +from importlib.machinery import SourceFileLoader + +def find_module(modname): + """Finds and returns a module in the local dist/checkout. + """ + modpath = os.path.join( + os.path.dirname(os.path.dirname(__file__)), "Lib", modname + ".py") + return SourceFileLoader(modname, modpath).load_module() def write_contents(f): diff --git a/Python/modsupport.c b/Python/modsupport.c index 75698455c88166..be229c987b8a78 100644 --- a/Python/modsupport.c +++ b/Python/modsupport.c @@ -3,6 +3,7 @@ #include "Python.h" #include "pycore_abstract.h" // _PyIndex_Check() +#include "pycore_object.h" // _PyType_IsReady() #define FLAG_SIZE_T 1 typedef double va_double; @@ -693,7 +694,7 @@ PyModule_AddStringConstant(PyObject *m, const char *name, const char *value) int PyModule_AddType(PyObject *module, PyTypeObject *type) { - if (PyType_Ready(type) < 0) { + if (!_PyType_IsReady(type) && PyType_Ready(type) < 0) { return -1; } diff --git a/Python/opcode_metadata.h b/Python/opcode_metadata.h index fb370ef923dd92..77f0ae0c1a4c30 100644 --- a/Python/opcode_metadata.h +++ b/Python/opcode_metadata.h @@ -207,6 +207,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 2; case LOAD_SUPER_ATTR: return 3; + case LOAD_SUPER_ATTR_METHOD: + return 3; case LOAD_ATTR: return 1; case LOAD_ATTR_INSTANCE_VALUE: @@ -593,6 +595,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 0; case LOAD_SUPER_ATTR: return ((oparg & 1) ? 1 : 0) + 1; + case LOAD_SUPER_ATTR_METHOD: + return 2; case LOAD_ATTR: return ((oparg & 1) ? 1 : 0) + 1; case LOAD_ATTR_INSTANCE_VALUE: @@ -775,7 +779,7 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { } #endif -enum InstructionFormat { INSTR_FMT_IB, INSTR_FMT_IBC, INSTR_FMT_IBC00, INSTR_FMT_IBC000, INSTR_FMT_IBC00000000, INSTR_FMT_IBIB, INSTR_FMT_IX, INSTR_FMT_IXC, INSTR_FMT_IXC000 }; +enum InstructionFormat { INSTR_FMT_IB, INSTR_FMT_IBC, INSTR_FMT_IBC00, INSTR_FMT_IBC000, INSTR_FMT_IBC00000000, INSTR_FMT_IBIB, INSTR_FMT_IX, INSTR_FMT_IXC, INSTR_FMT_IXC000, INSTR_FMT_IXC00000000 }; struct opcode_metadata { bool valid_entry; enum InstructionFormat instr_format; @@ -883,7 +887,8 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = { [DICT_UPDATE] = { true, INSTR_FMT_IB }, [DICT_MERGE] = { true, INSTR_FMT_IB }, [MAP_ADD] = { true, INSTR_FMT_IB }, - [LOAD_SUPER_ATTR] = { true, INSTR_FMT_IB }, + [LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC00000000 }, + [LOAD_SUPER_ATTR_METHOD] = { true, INSTR_FMT_IXC00000000 }, [LOAD_ATTR] = { true, INSTR_FMT_IBC00000000 }, [LOAD_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IBC00000000 }, [LOAD_ATTR_MODULE] = { true, INSTR_FMT_IBC00000000 }, diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h index 01091829271bc5..042cee222f705c 100644 --- a/Python/opcode_targets.h +++ b/Python/opcode_targets.h @@ -65,29 +65,29 @@ static void *opcode_targets[256] = { &&TARGET_FOR_ITER_TUPLE, &&TARGET_FOR_ITER_RANGE, &&TARGET_FOR_ITER_GEN, + &&TARGET_LOAD_SUPER_ATTR_METHOD, &&TARGET_LOAD_ATTR_CLASS, - &&TARGET_LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, &&TARGET_GET_ITER, &&TARGET_GET_YIELD_FROM_ITER, - &&TARGET_LOAD_ATTR_INSTANCE_VALUE, + &&TARGET_LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, &&TARGET_LOAD_BUILD_CLASS, + &&TARGET_LOAD_ATTR_INSTANCE_VALUE, &&TARGET_LOAD_ATTR_MODULE, - &&TARGET_LOAD_ATTR_PROPERTY, &&TARGET_LOAD_ASSERTION_ERROR, &&TARGET_RETURN_GENERATOR, + &&TARGET_LOAD_ATTR_PROPERTY, &&TARGET_LOAD_ATTR_SLOT, &&TARGET_LOAD_ATTR_WITH_HINT, &&TARGET_LOAD_ATTR_METHOD_LAZY_DICT, &&TARGET_LOAD_ATTR_METHOD_NO_DICT, &&TARGET_LOAD_ATTR_METHOD_WITH_VALUES, &&TARGET_LOAD_CONST__LOAD_FAST, - &&TARGET_LOAD_FAST__LOAD_CONST, &&TARGET_RETURN_VALUE, - &&TARGET_LOAD_FAST__LOAD_FAST, + &&TARGET_LOAD_FAST__LOAD_CONST, &&TARGET_SETUP_ANNOTATIONS, + &&TARGET_LOAD_FAST__LOAD_FAST, &&TARGET_LOAD_GLOBAL_BUILTIN, &&TARGET_LOAD_GLOBAL_MODULE, - &&TARGET_STORE_ATTR_INSTANCE_VALUE, &&TARGET_POP_EXCEPT, &&TARGET_STORE_NAME, &&TARGET_DELETE_NAME, @@ -110,9 +110,9 @@ static void *opcode_targets[256] = { &&TARGET_IMPORT_NAME, &&TARGET_IMPORT_FROM, &&TARGET_JUMP_FORWARD, + &&TARGET_STORE_ATTR_INSTANCE_VALUE, &&TARGET_STORE_ATTR_SLOT, &&TARGET_STORE_ATTR_WITH_HINT, - &&TARGET_STORE_FAST__LOAD_FAST, &&TARGET_POP_JUMP_IF_FALSE, &&TARGET_POP_JUMP_IF_TRUE, &&TARGET_LOAD_GLOBAL, @@ -142,7 +142,7 @@ static void *opcode_targets[256] = { &&TARGET_JUMP_BACKWARD, &&TARGET_LOAD_SUPER_ATTR, &&TARGET_CALL_FUNCTION_EX, - &&TARGET_STORE_FAST__STORE_FAST, + &&TARGET_STORE_FAST__LOAD_FAST, &&TARGET_EXTENDED_ARG, &&TARGET_LIST_APPEND, &&TARGET_SET_ADD, @@ -152,20 +152,20 @@ static void *opcode_targets[256] = { &&TARGET_YIELD_VALUE, &&TARGET_RESUME, &&TARGET_MATCH_CLASS, + &&TARGET_STORE_FAST__STORE_FAST, &&TARGET_STORE_SUBSCR_DICT, - &&TARGET_STORE_SUBSCR_LIST_INT, &&TARGET_FORMAT_VALUE, &&TARGET_BUILD_CONST_KEY_MAP, &&TARGET_BUILD_STRING, + &&TARGET_STORE_SUBSCR_LIST_INT, &&TARGET_UNPACK_SEQUENCE_LIST, &&TARGET_UNPACK_SEQUENCE_TUPLE, &&TARGET_UNPACK_SEQUENCE_TWO_TUPLE, - &&TARGET_SEND_GEN, &&TARGET_LIST_EXTEND, &&TARGET_SET_UPDATE, &&TARGET_DICT_MERGE, &&TARGET_DICT_UPDATE, - &&_unknown_opcode, + &&TARGET_SEND_GEN, &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index ebf1a0bff54eb0..ba248d208e425a 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -2,7 +2,6 @@ #include "Python.h" -#include "pycore_bytesobject.h" // _PyBytes_InitTypes() #include "pycore_ceval.h" // _PyEval_FiniGIL() #include "pycore_context.h" // _PyContext_Init() #include "pycore_exceptions.h" // _PyExc_InitTypes() @@ -26,7 +25,6 @@ #include "pycore_sliceobject.h" // _PySlice_Fini() #include "pycore_sysmodule.h" // _PySys_ClearAuditHooks() #include "pycore_traceback.h" // _Py_DumpTracebackThreads() -#include "pycore_tuple.h" // _PyTuple_InitTypes() #include "pycore_typeobject.h" // _PyTypes_InitTypes() #include "pycore_unicodeobject.h" // _PyUnicode_InitTypes() #include "opcode.h" @@ -684,11 +682,6 @@ pycore_init_types(PyInterpreterState *interp) return status; } - status = _PyBytes_InitTypes(interp); - if (_PyStatus_EXCEPTION(status)) { - return status; - } - status = _PyLong_InitTypes(interp); if (_PyStatus_EXCEPTION(status)) { return status; @@ -704,11 +697,6 @@ pycore_init_types(PyInterpreterState *interp) return status; } - status = _PyTuple_InitTypes(interp); - if (_PyStatus_EXCEPTION(status)) { - return status; - } - if (_PyExc_InitTypes(interp) < 0) { return _PyStatus_ERR("failed to initialize an exception type"); } @@ -2185,10 +2173,9 @@ add_main_module(PyInterpreterState *interp) Py_DECREF(bimod); } - /* Main is a little special - imp.is_builtin("__main__") will return - * False, but BuiltinImporter is still the most appropriate initial - * setting for its __loader__ attribute. A more suitable value will - * be set if __main__ gets further initialized later in the startup + /* Main is a little special - BuiltinImporter is the most appropriate + * initial setting for its __loader__ attribute. A more suitable value + * will be set if __main__ gets further initialized later in the startup * process. */ loader = _PyDict_GetItemStringWithError(d, "__loader__"); diff --git a/Python/pystate.c b/Python/pystate.c index b2ef7e2dddeeba..f103a059f0f369 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -380,7 +380,7 @@ _Py_COMP_DIAG_IGNORE_DEPR_DECLS static const _PyRuntimeState initial = _PyRuntimeState_INIT(_PyRuntime); _Py_COMP_DIAG_POP -#define NUMLOCKS 4 +#define NUMLOCKS 5 static int alloc_for_runtime(PyThread_type_lock locks[NUMLOCKS]) @@ -434,6 +434,7 @@ init_runtime(_PyRuntimeState *runtime, &runtime->xidregistry.mutex, &runtime->getargs.mutex, &runtime->unicode_state.ids.lock, + &runtime->imports.extensions.mutex, }; for (int i = 0; i < NUMLOCKS; i++) { assert(locks[i] != NULL); @@ -518,6 +519,7 @@ _PyRuntimeState_Fini(_PyRuntimeState *runtime) &runtime->xidregistry.mutex, &runtime->getargs.mutex, &runtime->unicode_state.ids.lock, + &runtime->imports.extensions.mutex, }; for (int i = 0; i < NUMLOCKS; i++) { FREE_LOCK(*lockptrs[i]); @@ -546,6 +548,7 @@ _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime) &runtime->xidregistry.mutex, &runtime->getargs.mutex, &runtime->unicode_state.ids.lock, + &runtime->imports.extensions.mutex, }; int reinit_err = 0; for (int i = 0; i < NUMLOCKS; i++) { @@ -683,11 +686,11 @@ init_interpreter(PyInterpreterState *interp, _PyGC_InitState(&interp->gc); PyConfig_InitPythonConfig(&interp->config); _PyType_InitCache(interp); - for(int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { interp->monitors.tools[i] = 0; } for (int t = 0; t < PY_MONITORING_TOOL_IDS; t++) { - for(int e = 0; e < PY_MONITORING_EVENTS; e++) { + for (int e = 0; e < PY_MONITORING_EVENTS; e++) { interp->monitoring_callables[t][e] = NULL; } @@ -831,11 +834,11 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate) Py_CLEAR(interp->audit_hooks); - for(int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { interp->monitors.tools[i] = 0; } for (int t = 0; t < PY_MONITORING_TOOL_IDS; t++) { - for(int e = 0; e < PY_MONITORING_EVENTS; e++) { + for (int e = 0; e < PY_MONITORING_EVENTS; e++) { Py_CLEAR(interp->monitoring_callables[t][e]); } } diff --git a/Python/pythonrun.c b/Python/pythonrun.c index b16d3f53f89fb9..05e7b4370869af 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -1107,7 +1107,7 @@ print_exception_notes(struct exception_print_context *ctx, PyObject *value) if (notes == NULL) { return -1; } - if (!PySequence_Check(notes)) { + if (!PySequence_Check(notes) || PyUnicode_Check(notes) || PyBytes_Check(notes)) { int res = 0; if (write_indented_margin(ctx, f) < 0) { res = -1; @@ -1122,6 +1122,9 @@ print_exception_notes(struct exception_print_context *ctx, PyObject *value) Py_DECREF(s); } Py_DECREF(notes); + if (PyFile_WriteString("\n", f) < 0) { + res = -1; + } return res; } Py_ssize_t num_notes = PySequence_Length(notes); diff --git a/Python/specialize.c b/Python/specialize.c index 3fa28f409892dc..b1cc66124cfa4a 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -96,6 +96,7 @@ _Py_GetSpecializationStats(void) { return NULL; } int err = 0; + err += add_stat_dict(stats, LOAD_SUPER_ATTR, "load_super_attr"); err += add_stat_dict(stats, LOAD_ATTR, "load_attr"); err += add_stat_dict(stats, LOAD_GLOBAL, "load_global"); err += add_stat_dict(stats, BINARY_SUBSCR, "binary_subscr"); @@ -147,7 +148,7 @@ print_spec_stats(FILE *out, OpcodeStats *stats) PRIu64 "\n", i, j, val); } } - for(int j = 0; j < 256; j++) { + for (int j = 0; j < 256; j++) { if (stats[i].pair_count[j]) { fprintf(out, "opcode[%d].pair_count[%d] : %" PRIu64 "\n", i, j, stats[i].pair_count[j]); @@ -320,6 +321,14 @@ _PyCode_Quicken(PyCodeObject *code) #define SPEC_FAIL_LOAD_GLOBAL_NON_DICT 17 #define SPEC_FAIL_LOAD_GLOBAL_NON_STRING_OR_SPLIT 18 +/* Super */ + +#define SPEC_FAIL_SUPER_NOT_LOAD_METHOD 9 +#define SPEC_FAIL_SUPER_BAD_CLASS 10 +#define SPEC_FAIL_SUPER_SHADOWED 11 +#define SPEC_FAIL_SUPER_NOT_METHOD 12 +#define SPEC_FAIL_SUPER_ERROR_OR_NOT_FOUND 13 + /* Attributes */ #define SPEC_FAIL_ATTR_OVERRIDING_DESCRIPTOR 9 @@ -505,6 +514,54 @@ specialize_module_load_attr( /* Attribute specialization */ +void +_Py_Specialize_LoadSuperAttr(PyObject *global_super, PyObject *cls, PyObject *self, + _Py_CODEUNIT *instr, PyObject *name, int load_method) { + assert(ENABLE_SPECIALIZATION); + assert(_PyOpcode_Caches[LOAD_SUPER_ATTR] == INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR); + _PySuperAttrCache *cache = (_PySuperAttrCache *)(instr + 1); + if (!load_method) { + SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_NOT_LOAD_METHOD); + goto fail; + } + if (global_super != (PyObject *)&PySuper_Type) { + SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_SHADOWED); + goto fail; + } + if (!PyType_Check(cls)) { + SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_BAD_CLASS); + goto fail; + } + PyTypeObject *tp = (PyTypeObject *)cls; + PyObject *res = _PySuper_LookupDescr(tp, self, name); + if (res == NULL) { + SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_ERROR_OR_NOT_FOUND); + PyErr_Clear(); + goto fail; + } + if (_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)) { + write_u32(cache->class_version, tp->tp_version_tag); + write_u32(cache->self_type_version, Py_TYPE(self)->tp_version_tag); + write_obj(cache->method, res); // borrowed + instr->op.code = LOAD_SUPER_ATTR_METHOD; + Py_DECREF(res); + goto success; + } + Py_DECREF(res); + SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_NOT_METHOD); + +fail: + STAT_INC(LOAD_SUPER_ATTR, failure); + assert(!PyErr_Occurred()); + instr->op.code = LOAD_SUPER_ATTR; + cache->counter = adaptive_counter_backoff(cache->counter); + return; +success: + STAT_INC(LOAD_SUPER_ATTR, success); + assert(!PyErr_Occurred()); + cache->counter = adaptive_counter_cooldown(); +} + typedef enum { OVERRIDING, /* Is an overriding descriptor, and will remain so. */ METHOD, /* Attribute has Py_TPFLAGS_METHOD_DESCRIPTOR set */ diff --git a/Python/stdlib_module_names.h b/Python/stdlib_module_names.h index e9f0061a59d3ba..27f42e5202e571 100644 --- a/Python/stdlib_module_names.h +++ b/Python/stdlib_module_names.h @@ -164,7 +164,6 @@ static const char* _Py_stdlib_module_names[] = { "idlelib", "imaplib", "imghdr", -"imp", "importlib", "inspect", "io", diff --git a/Python/sysmodule.c b/Python/sysmodule.c index 58ed48859b5f3a..d673e40af5e1de 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -3166,10 +3166,8 @@ _PySys_InitCore(PyThreadState *tstate, PyObject *sysdict) SET_SYS("float_info", PyFloat_GetInfo()); SET_SYS("int_info", PyLong_GetInfo()); /* initialize hash_info */ - if (Hash_InfoType.tp_name == NULL) { - if (_PyStructSequence_InitBuiltin(&Hash_InfoType, &hash_info_desc) < 0) { - goto type_init_failed; - } + if (_PyStructSequence_InitBuiltin(&Hash_InfoType, &hash_info_desc) < 0) { + goto type_init_failed; } SET_SYS("hash_info", get_hash_info(tstate)); SET_SYS("maxunicode", PyLong_FromLong(0x10FFFF)); @@ -3191,11 +3189,9 @@ _PySys_InitCore(PyThreadState *tstate, PyObject *sysdict) #define ENSURE_INFO_TYPE(TYPE, DESC) \ do { \ - if (TYPE.tp_name == NULL) { \ - if (_PyStructSequence_InitBuiltinWithFlags( \ - &TYPE, &DESC, Py_TPFLAGS_DISALLOW_INSTANTIATION) < 0) { \ - goto type_init_failed; \ - } \ + if (_PyStructSequence_InitBuiltinWithFlags( \ + &TYPE, &DESC, Py_TPFLAGS_DISALLOW_INSTANTIATION) < 0) { \ + goto type_init_failed; \ } \ } while (0) @@ -3230,11 +3226,9 @@ _PySys_InitCore(PyThreadState *tstate, PyObject *sysdict) SET_SYS("thread_info", PyThread_GetInfo()); /* initialize asyncgen_hooks */ - if (AsyncGenHooksType.tp_name == NULL) { - if (_PyStructSequence_InitBuiltin( - &AsyncGenHooksType, &asyncgen_hooks_desc) < 0) { - goto type_init_failed; - } + if (_PyStructSequence_InitBuiltin( + &AsyncGenHooksType, &asyncgen_hooks_desc) < 0) { + goto type_init_failed; } #ifdef __EMSCRIPTEN__ diff --git a/Python/thread.c b/Python/thread.c index 4581f1af043a37..7fdedb0b9b7e26 100644 --- a/Python/thread.c +++ b/Python/thread.c @@ -137,10 +137,8 @@ PyThread_GetInfo(void) int len; #endif - if (ThreadInfoType.tp_name == 0) { - if (_PyStructSequence_InitBuiltin(&ThreadInfoType, - &threadinfo_desc) < 0) - return NULL; + if (_PyStructSequence_InitBuiltin(&ThreadInfoType, &threadinfo_desc) < 0) { + return NULL; } threadinfo = PyStructSequence_New(&ThreadInfoType); diff --git a/Tools/build/deepfreeze.py b/Tools/build/deepfreeze.py index 5cfef5c572c4ae..b084d3e457f782 100644 --- a/Tools/build/deepfreeze.py +++ b/Tools/build/deepfreeze.py @@ -175,6 +175,12 @@ def generate_unicode(self, name: str, s: str) -> str: return f"&_Py_STR({strings[s]})" if s in identifiers: return f"&_Py_ID({s})" + if len(s) == 1: + c = ord(s) + if c < 128: + return f"(PyObject *)&_Py_SINGLETON(strings).ascii[{c}]" + elif c < 256: + return f"(PyObject *)&_Py_SINGLETON(strings).latin1[{c - 128}]" if re.match(r'\A[A-Za-z0-9_]+\Z', s): name = f"const_str_{s}" kind, ascii = analyze_character_width(s) diff --git a/Tools/build/generate_stdlib_module_names.py b/Tools/build/generate_stdlib_module_names.py index d15e5e2d5450d7..7e0e9602a10765 100644 --- a/Tools/build/generate_stdlib_module_names.py +++ b/Tools/build/generate_stdlib_module_names.py @@ -1,5 +1,5 @@ # This script lists the names of standard library modules -# to update Python/stdlib_mod_names.h +# to update Python/stdlib_module_names.h import _imp import os.path import re diff --git a/Tools/c-analyzer/TODO b/Tools/c-analyzer/TODO index 43760369b1980e..27a535814ea52b 100644 --- a/Tools/c-analyzer/TODO +++ b/Tools/c-analyzer/TODO @@ -495,7 +495,6 @@ Python/import.c:PyImport_ImportModuleLevelObject():PyId___path__ _Py_IDENTIFIER( Python/import.c:PyImport_ImportModuleLevelObject():PyId___spec__ _Py_IDENTIFIER(__spec__) Python/import.c:PyImport_ImportModuleLevelObject():PyId__handle_fromlist _Py_IDENTIFIER(_handle_fromlist) Python/import.c:PyImport_ImportModuleLevelObject():PyId__lock_unlock_module _Py_IDENTIFIER(_lock_unlock_module) -Python/import.c:PyImport_ReloadModule():PyId_imp _Py_IDENTIFIER(imp) Python/import.c:PyImport_ReloadModule():PyId_reload _Py_IDENTIFIER(reload) Python/import.c:_PyImportZip_Init():PyId_zipimporter _Py_IDENTIFIER(zipimporter) Python/import.c:import_find_and_load():PyId__find_and_load _Py_IDENTIFIER(_find_and_load) diff --git a/Tools/c-analyzer/cpython/globals-to-fix.tsv b/Tools/c-analyzer/cpython/globals-to-fix.tsv index 849fd5d9a1e8d5..4dfbbe72df56a0 100644 --- a/Tools/c-analyzer/cpython/globals-to-fix.tsv +++ b/Tools/c-analyzer/cpython/globals-to-fix.tsv @@ -341,7 +341,6 @@ Modules/_testcapi/vectorcall.c - MethodDescriptor2_Type - ##----------------------- ## static types -Modules/_ctypes/_ctypes.c - DictRemover_Type - Modules/_ctypes/_ctypes.c - PyCArrayType_Type - Modules/_ctypes/_ctypes.c - PyCArray_Type - Modules/_ctypes/_ctypes.c - PyCData_Type - @@ -352,18 +351,14 @@ Modules/_ctypes/_ctypes.c - PyCPointer_Type - Modules/_ctypes/_ctypes.c - PyCSimpleType_Type - Modules/_ctypes/_ctypes.c - PyCStructType_Type - Modules/_ctypes/_ctypes.c - Simple_Type - -Modules/_ctypes/_ctypes.c - StructParam_Type - Modules/_ctypes/_ctypes.c - Struct_Type - Modules/_ctypes/_ctypes.c - UnionType_Type - Modules/_ctypes/_ctypes.c - Union_Type - -Modules/_ctypes/callbacks.c - PyCThunk_Type - Modules/_ctypes/callproc.c - PyCArg_Type - -Modules/_ctypes/cfield.c - PyCField_Type - Modules/_ctypes/ctypes.h - PyCArg_Type - Modules/_ctypes/ctypes.h - PyCArrayType_Type - Modules/_ctypes/ctypes.h - PyCArray_Type - Modules/_ctypes/ctypes.h - PyCData_Type - -Modules/_ctypes/ctypes.h - PyCField_Type - Modules/_ctypes/ctypes.h - PyCFuncPtrType_Type - Modules/_ctypes/ctypes.h - PyCFuncPtr_Type - Modules/_ctypes/ctypes.h - PyCPointerType_Type - @@ -371,7 +366,6 @@ Modules/_ctypes/ctypes.h - PyCPointer_Type - Modules/_ctypes/ctypes.h - PyCSimpleType_Type - Modules/_ctypes/ctypes.h - PyCStgDict_Type - Modules/_ctypes/ctypes.h - PyCStructType_Type - -Modules/_ctypes/ctypes.h - PyCThunk_Type - Modules/_ctypes/ctypes.h - PyExc_ArgError - Modules/_ctypes/ctypes.h - _ctypes_conversion_encoding - Modules/_ctypes/ctypes.h - _ctypes_conversion_errors - @@ -454,6 +448,8 @@ Modules/_decimal/_decimal.c - SignalTuple - Modules/_asynciomodule.c - fi_freelist - Modules/_asynciomodule.c - fi_freelist_len - Modules/_ctypes/_ctypes.c - _ctypes_ptrtype_cache - +Modules/_ctypes/_ctypes.c - global_state - +Modules/_ctypes/ctypes.h - global_state - Modules/_tkinter.c - tcl_lock - Modules/_tkinter.c - excInCmd - Modules/_tkinter.c - valInCmd - @@ -485,27 +481,6 @@ Modules/_decimal/_decimal.c - _py_float_abs - Modules/_decimal/_decimal.c - _py_long_bit_length - Modules/_decimal/_decimal.c - _py_float_as_integer_ratio - Modules/_elementtree.c - expat_capi - -Modules/cjkcodecs/_codecs_hk.c - big5_encmap - -Modules/cjkcodecs/_codecs_hk.c - big5_decmap - -Modules/cjkcodecs/_codecs_hk.c big5hkscs_codec_init initialized - -Modules/cjkcodecs/_codecs_iso2022.c - cp949_encmap - -Modules/cjkcodecs/_codecs_iso2022.c - ksx1001_decmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisxcommon_encmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0208_decmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0212_decmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_bmp_encmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_1_bmp_decmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_2_bmp_decmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_emp_encmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_1_emp_decmap - -Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_2_emp_decmap - -Modules/cjkcodecs/_codecs_iso2022.c - gbcommon_encmap - -Modules/cjkcodecs/_codecs_iso2022.c - gb2312_decmap - -Modules/cjkcodecs/_codecs_iso2022.c ksx1001_init initialized - -Modules/cjkcodecs/_codecs_iso2022.c jisx0208_init initialized - -Modules/cjkcodecs/_codecs_iso2022.c jisx0212_init initialized - -Modules/cjkcodecs/_codecs_iso2022.c jisx0213_init initialized - -Modules/cjkcodecs/_codecs_iso2022.c gb2312_init initialized - Modules/readline.c - libedit_append_replace_history_offset - Modules/readline.c - using_libedit_emulation - Modules/readline.c - libedit_history_start - diff --git a/Tools/importbench/importbench.py b/Tools/importbench/importbench.py index 6c4a537ad86e6c..619263b553c081 100644 --- a/Tools/importbench/importbench.py +++ b/Tools/importbench/importbench.py @@ -6,7 +6,7 @@ """ from test.test_importlib import util import decimal -import imp +from importlib.util import cache_from_source import importlib import importlib.machinery import json @@ -65,7 +65,7 @@ def source_wo_bytecode(seconds, repeat): name = '__importlib_test_benchmark__' # Clears out sys.modules and puts an entry at the front of sys.path. with util.create_modules(name) as mapping: - assert not os.path.exists(imp.cache_from_source(mapping[name])) + assert not os.path.exists(cache_from_source(mapping[name])) sys.meta_path.append(importlib.machinery.PathFinder) loader = (importlib.machinery.SourceFileLoader, importlib.machinery.SOURCE_SUFFIXES) @@ -80,7 +80,7 @@ def _wo_bytecode(module): name = module.__name__ def benchmark_wo_bytecode(seconds, repeat): """Source w/o bytecode: {}""" - bytecode_path = imp.cache_from_source(module.__file__) + bytecode_path = cache_from_source(module.__file__) if os.path.exists(bytecode_path): os.unlink(bytecode_path) sys.dont_write_bytecode = True @@ -108,9 +108,9 @@ def source_writing_bytecode(seconds, repeat): sys.path_hooks.append(importlib.machinery.FileFinder.path_hook(loader)) def cleanup(): sys.modules.pop(name) - os.unlink(imp.cache_from_source(mapping[name])) + os.unlink(cache_from_source(mapping[name])) for result in bench(name, cleanup, repeat=repeat, seconds=seconds): - assert not os.path.exists(imp.cache_from_source(mapping[name])) + assert not os.path.exists(cache_from_source(mapping[name])) yield result @@ -121,7 +121,7 @@ def writing_bytecode_benchmark(seconds, repeat): assert not sys.dont_write_bytecode def cleanup(): sys.modules.pop(name) - os.unlink(imp.cache_from_source(module.__file__)) + os.unlink(cache_from_source(module.__file__)) yield from bench(name, cleanup, repeat=repeat, seconds=seconds) writing_bytecode_benchmark.__doc__ = ( @@ -141,7 +141,7 @@ def source_using_bytecode(seconds, repeat): importlib.machinery.SOURCE_SUFFIXES) sys.path_hooks.append(importlib.machinery.FileFinder.path_hook(loader)) py_compile.compile(mapping[name]) - assert os.path.exists(imp.cache_from_source(mapping[name])) + assert os.path.exists(cache_from_source(mapping[name])) yield from bench(name, lambda: sys.modules.pop(name), repeat=repeat, seconds=seconds) diff --git a/netlify.toml b/netlify.toml deleted file mode 100644 index f5790fc5fec74f..00000000000000 --- a/netlify.toml +++ /dev/null @@ -1,11 +0,0 @@ -[build] - base = "Doc/" - command = "make html" - publish = "build/html" - # Do not trigger netlify builds if docs were not changed. - # Changed files should be in sync with `.github/workflows/doc.yml` - ignore = "git diff --quiet $CACHED_COMMIT_REF $COMMIT_REF . ../netlify.toml" - -[build.environment] - PYTHON_VERSION = "3.8" - IS_DEPLOYMENT_PREVIEW = "true"